GESTimeline * musicTL (void) { GESTimeline *timeline; timeline = ges_timeline_new_audio_video (); GESLayer *layer = ges_layer_new (); GESLayer *audiolayer1 = ges_layer_new (); GESLayer *audiolayer2 = ges_layer_new (); ges_timeline_add_layer (timeline, layer); ges_timeline_add_layer (timeline, audiolayer1); ges_timeline_add_layer (timeline, audiolayer2); g_object_set (layer, "priority", 0, NULL); g_object_set (audiolayer1, "priority", 1, NULL); g_object_set (audiolayer2, "priority", 2, NULL); ges_clip_from_rel_path ("hd/fluidsimulation.mp4", layer, 0, 20, 10, GES_TRACK_TYPE_VIDEO); ges_clip_from_rel_path ("audio/prof.ogg", audiolayer1, 0, 0, 10, GES_TRACK_TYPE_AUDIO); ges_clip_from_rel_path ("audio/vask.wav", audiolayer2, 2, 0, 7, GES_TRACK_TYPE_AUDIO); ges_timeline_commit (timeline); return timeline; }
GESTimeline * videoTransparencyTL (void) { GESTimeline *timeline = ges_timeline_new_video (); GESLayer *layer1 = ges_layer_new (); ges_timeline_add_layer (timeline, layer1); g_object_set (layer1, "priority", 0, NULL); ges_multi_clip_from_rel_path ("transparent/blender-cube/%04d.png", layer1, 0, 0, 10); //ges_clip_from_rel_path ("transparent/bokeeh-raw.mkv", layer1, 0, 0, 10, // GES_TRACK_TYPE_VIDEO); GESLayer *layer2 = ges_layer_new (); ges_timeline_add_layer (timeline, layer2); g_object_set (layer2, "priority", 1, NULL); ges_clip_from_rel_path ("hd/fluidsimulation.mp4", layer2, 0, 20, 10, GES_TRACK_TYPE_VIDEO); ges_timeline_commit (timeline); return timeline; }
GESTimeline * overlayTL (void) { GESTimeline *timeline; GESLayer *layer, *layer2; timeline = ges_timeline_new_video (); layer = ges_layer_new (); layer2 = ges_layer_new (); ges_layer_set_priority (layer2, 1); guint prio1 = ges_layer_get_priority (layer); guint prio2 = ges_layer_get_priority (layer2); g_object_set (layer, "auto-transition", TRUE, NULL); g_print ("prios %d %d\n", prio1, prio2); ges_timeline_add_layer (timeline, layer); ges_timeline_add_layer (timeline, layer2); ges_clip_unknown_from_rel_path ("image/PNG_transparency_demonstration_1.png", layer, 0, 0, 10); ges_clip_unknown_from_rel_path ("hd/fluidsimulation.mp4", layer2, 0, 0, 10); ges_timeline_commit (timeline); return timeline; }
GESTimeline * compTL (void) { GESTimeline *timeline; GESTrack *trackv; timeline = ges_timeline_new (); trackv = GES_TRACK (ges_video_track_new ()); ges_timeline_add_track (timeline, trackv); const gchar *assets[] = { "image/vieh.png", "image/PNG_transparency_demonstration_1.png", "image/Ice_Cream.png", "image/Fish.png" }; guint asset_count = 4; for (int i = 1; i <= asset_count; i++) { GESLayer *layer = ges_layer_new (); ges_timeline_add_layer (timeline, layer); g_object_set (layer, "priority", i - 1, NULL); GESClip *vieh = ges_clip_from_rel_path (assets[i - 1], layer, 0, 0, 10, GES_TRACK_TYPE_VIDEO); GESTrackElement *elem = ges_clip_find_track_element (vieh, trackv, G_TYPE_NONE); GESUriClipAsset *asset = GES_URI_CLIP_ASSET (ges_extractable_get_asset (GES_EXTRACTABLE (vieh))); guint width = ges_asset_get_width (asset); guint height = ges_asset_get_height (asset); g_print ("%s: %dx%d\n", assets[i - 1], width, height); ges_track_element_set_child_properties (elem, "posx", i * 100, "posy", i * 100, "width", i * 100 * width / height, "height", (i * 100) - 1, NULL); } GESLayer *backgroud_layer = ges_layer_new (); ges_timeline_add_layer (timeline, backgroud_layer); g_object_set (backgroud_layer, "priority", asset_count, NULL); ges_clip_from_rel_path ("image/wallpaper-2597248.jpg", backgroud_layer, 0, 0, 10, GES_TRACK_TYPE_VIDEO); ges_timeline_commit (timeline); return timeline; }
GESTimeline * testPatternTL (void) { GESTimeline *timeline; GESLayer *layer; GESTestClip *srca, *srcb; timeline = ges_timeline_new_audio_video (); layer = ges_layer_new (); g_object_set (layer, "auto-transition", TRUE, NULL); ges_timeline_add_layer (timeline, layer); srca = ges_test_clip_new (); srcb = ges_test_clip_new (); g_object_set (srca, "vpattern", GES_VIDEO_TEST_PATTERN_SMPTE, "duration", 3 * GST_SECOND, "start", 0, NULL); g_object_set (srcb, "vpattern", GES_VIDEO_TEST_PATTERN_CIRCULAR, "duration", 3 * GST_SECOND, "start", 2 * GST_SECOND, NULL); ges_test_clip_set_frequency (srcb, 800); ges_layer_add_clip (layer, GES_CLIP (srca)); ges_layer_add_clip (layer, GES_CLIP (srcb)); ges_timeline_commit (timeline); return timeline; }
GESTimeline * positionTestTL (void) { GESTimeline *timeline; GESTrack *trackv; GError **error = NULL; GESAsset *asset; GESClip *clip; timeline = ges_timeline_new (); trackv = GES_TRACK (ges_video_track_new ()); ges_timeline_add_track (timeline, trackv); GESLayer *layer = ges_layer_new (); ges_timeline_add_layer (timeline, layer); asset = GES_ASSET (ges_uri_clip_asset_request_sync (ges_renderer_get_absolute_path ("image/wallpaper720p.jpg"), error)); clip = ges_layer_add_asset (layer, asset, 0, 0, 2 * GST_SECOND, GES_TRACK_TYPE_VIDEO); GESTrackElement *elem = ges_clip_find_track_element (clip, trackv, G_TYPE_NONE); ges_track_element_set_child_properties (elem, "posx", 100, "width", 100, NULL); ges_timeline_commit (timeline); return timeline; }
GESTimeline * effectTL (void) { GESTimeline *timeline; GESLayer *layer; GESClip *clip1, *clip2; GESEffect *effect1, *effect2; timeline = ges_timeline_new_audio_video (); layer = ges_layer_new (); ges_timeline_add_layer (timeline, layer); clip1 = ges_clip_unknown_from_rel_path ("sd/trailer_400p.ogg", layer, 0, 0, 5); clip2 = ges_clip_unknown_from_rel_path ("sd/sintel_trailer-480p.ogv", layer, 5, 5, 5); effect1 = ges_effect_new ("agingtv"); ges_container_add (GES_CONTAINER (clip1), GES_TIMELINE_ELEMENT (effect1)); effect2 = ges_effect_new ("rippletv"); // some cool Frei0r plugins // "frei0r-filter-pixeliz0r", "frei0r-filter-flippo", "frei0r-filter-twolay0r" ges_container_add (GES_CONTAINER (clip2), GES_TIMELINE_ELEMENT (effect2)); ges_timeline_commit (timeline); return timeline; }
int main (int argc, char **argv) { GESTimeline *timeline; GESLayer *layer; GError **error = NULL; GESAsset *asset; const gchar *url = "file:///home/bmonkey/workspace/ges/ges-renderer/data/sd/sintel_trailer-480p.mp4"; const gchar *exportURL = "file:///home/bmonkey/workspace/ges/ges-renderer/transition.mp4"; gst_init (&argc, &argv); ges_init (); timeline = ges_timeline_new_audio_video (); layer = ges_layer_new (); g_object_set (layer, "auto-transition", TRUE, NULL); ges_timeline_add_layer (timeline, layer); asset = GES_ASSET (ges_uri_clip_asset_request_sync (url, error)); ges_layer_add_asset (layer, asset, 0 * GST_SECOND, 0 * GST_SECOND, 10 * GST_SECOND, GES_TRACK_TYPE_VIDEO); ges_layer_add_asset (layer, asset, 5 * GST_SECOND, 20 * GST_SECOND, 10 * GST_SECOND, GES_TRACK_TYPE_VIDEO); ges_timeline_commit (timeline); duration = ges_timeline_get_duration (timeline); pipeline = ges_pipeline_new (); ges_pipeline_set_timeline (pipeline, timeline); GESRendererProfile pal = { 720, 576, 25, PROFILE_AAC_H264_QUICKTIME }; GstCaps *settings = gst_caps_from_renderer_profile (&pal); GstEncodingProfile *profile = profile_get_encoding_profile (settings); ges_pipeline_set_render_settings (pipeline, exportURL, profile); ges_pipeline_set_mode (pipeline, GES_PIPELINE_MODE_RENDER); GMainLoop *mainloop; mainloop = g_main_loop_new (NULL, FALSE); GstBus *bus; bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); g_signal_connect (bus, "message", (GCallback) bus_message_cb, mainloop); g_timeout_add (100, (GSourceFunc) ges_renderer_print_progress, NULL); gst_bus_add_signal_watch (bus); gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); g_main_loop_run (mainloop); g_main_loop_unref (mainloop); return 0; }
static gboolean _add_layer (GstValidateScenario *scenario, GstValidateAction *action) { GESTimeline *timeline = get_timeline(scenario); GESLayer *layer; gint priority; gboolean res = FALSE; if (!gst_structure_get_int(action->structure, "priority", &priority)) { GST_ERROR("priority is needed when adding a layer"); goto beach; } layer = _get_layer_by_priority(timeline, priority); if (layer != NULL) { GST_ERROR("A layer with priority %d already exists, not creating a new one", priority); gst_object_unref(layer); goto beach; } layer = ges_layer_new(); g_object_set(layer, "priority", priority, NULL); res = ges_timeline_add_layer(timeline, layer); beach: g_object_unref(timeline); return res; }
GESTimeline * volumeTestTL (void) { GESTimeline *timeline; GESTrack *tracka; timeline = ges_timeline_new (); tracka = GES_TRACK (ges_audio_track_new ()); if (!ges_timeline_add_track (timeline, tracka)) { gst_object_unref (timeline); timeline = NULL; } GESLayer *layer1 = ges_layer_new (); GESLayer *layer2 = ges_layer_new (); ges_timeline_add_layer (timeline, layer1); ges_timeline_add_layer (timeline, layer2); g_object_set (layer1, "priority", 0, NULL); g_object_set (layer2, "priority", 1, NULL); GESClip *music1 = ges_clip_from_rel_path ("audio/02_Oliver_Huntemann_-_Rikarda.flac", layer1, 0, 0, 10, GES_TRACK_TYPE_AUDIO); ges_clip_from_rel_path ("audio/prof.ogg", layer2, 0, 0, 10, GES_TRACK_TYPE_AUDIO); GESTrackElement *elem = ges_clip_find_track_element (music1, tracka, G_TYPE_NONE); ges_track_element_set_child_properties (elem, "volume", 2.1, NULL); ges_timeline_commit (timeline); return timeline; }
GESTimeline * alphaTestTL (void) { GESTimeline *timeline; GESTrack *trackv; timeline = ges_timeline_new (); trackv = GES_TRACK (ges_video_track_new ()); if (!ges_timeline_add_track (timeline, trackv)) { gst_object_unref (timeline); timeline = NULL; } GESLayer *layer1 = ges_layer_new (); GESLayer *layer2 = ges_layer_new (); ges_timeline_add_layer (timeline, layer1); ges_timeline_add_layer (timeline, layer2); g_object_set (layer1, "priority", 0, NULL); g_object_set (layer2, "priority", 1, NULL); GESClip *png = ges_clip_from_rel_path ("image/Fish.png", layer1, 0, 0, 10, GES_TRACK_TYPE_VIDEO); GESTrackElement *elem = ges_clip_find_track_element (png, trackv, G_TYPE_NONE); ges_track_element_set_child_properties (elem, "alpha", 0.5, NULL); ges_clip_from_rel_path ("hd/fluidsimulation.mp4", layer2, 0, 20, 10, GES_TRACK_TYPE_VIDEO); ges_timeline_commit (timeline); return timeline; }
void ges_base_xml_formatter_add_layer (GESBaseXmlFormatter * self, GType extractable_type, guint priority, GstStructure * properties, const gchar * metadatas, GError ** error) { LayerEntry *entry; GESAsset *asset; GESLayer *layer; gboolean auto_transition = FALSE; GESBaseXmlFormatterPrivate *priv = _GET_PRIV (self); if (priv->check_only) return; if (extractable_type == G_TYPE_NONE) layer = ges_layer_new (); else { asset = ges_asset_request (extractable_type, NULL, error); if (asset == NULL) { if (error && *error == NULL) { g_set_error (error, G_MARKUP_ERROR, G_MARKUP_ERROR_INVALID_CONTENT, "Layer type %s could not be created'", g_type_name (extractable_type)); return; } } layer = GES_LAYER (ges_asset_extract (asset, error)); } ges_layer_set_priority (layer, priority); ges_timeline_add_layer (GES_FORMATTER (self)->timeline, layer); if (properties) { if (gst_structure_get_boolean (properties, "auto-transition", &auto_transition)) gst_structure_remove_field (properties, "auto-transition"); gst_structure_foreach (properties, (GstStructureForeachFunc) set_property_foreach, layer); } if (metadatas) ges_meta_container_add_metas_from_string (GES_META_CONTAINER (layer), metadatas); entry = g_slice_new0 (LayerEntry); entry->layer = gst_object_ref (layer); entry->auto_trans = auto_transition; g_hash_table_insert (priv->layers, GINT_TO_POINTER (priority), entry); }
GESTimeline * hdTL (void) { GESTimeline *timeline; GESLayer *layer; timeline = ges_timeline_new_video (); layer = ges_layer_new (); ges_timeline_add_layer (timeline, layer); ges_clip_unknown_from_rel_path ("hd/BlenderFluid.webm", layer, 0, 4, 5); ges_clip_unknown_from_rel_path ("hd/fluidsimulation.mp4", layer, 5, 7, 5); ges_timeline_commit (timeline); return timeline; }
GESTimeline * transitionTL (void) { GESTimeline *timeline; GESLayer *layer; timeline = ges_timeline_new_audio_video (); layer = ges_layer_new (); g_object_set (layer, "auto-transition", TRUE, NULL); ges_timeline_add_layer (timeline, layer); ges_clip_unknown_from_rel_path ("sd/Mandelbox.mp4", layer, 0, 0, 10); ges_clip_unknown_from_rel_path ("sd/trailer_400p.ogg", layer, 7, 5, 10); ges_timeline_commit (timeline); return timeline; }
GESTimeline * imageTL (void) { GESTimeline *timeline; GESLayer *layer; timeline = ges_timeline_new_video (); layer = ges_layer_new (); g_object_set (layer, "auto-transition", TRUE, NULL); ges_timeline_add_layer (timeline, layer); ges_clip_unknown_from_rel_path ("image/LAMP_720_576.jpg", layer, 0, 0, 6); ges_clip_unknown_from_rel_path ("image/wallpaper-1946968.jpg", layer, 3, 0, 6); ges_timeline_commit (timeline); return timeline; }
GESPipeline * make_timeline (char *path, float duration, char *text, guint32 color, gdouble xpos, gdouble ypos) { GESTimeline *timeline; GESTrack *trackv, *tracka; GESLayer *layer1; GESClip *srca; GESClip *overlay; GESPipeline *pipeline; guint64 aduration; pipeline = ges_pipeline_new (); ges_pipeline_set_mode (pipeline, TIMELINE_MODE_PREVIEW_VIDEO); timeline = ges_timeline_new (); ges_pipeline_add_timeline (pipeline, timeline); trackv = GES_TRACK (ges_video_track_new ()); ges_timeline_add_track (timeline, trackv); tracka = GES_TRACK (ges_audio_track_new ()); ges_timeline_add_track (timeline, tracka); layer1 = GES_LAYER (ges_layer_new ()); g_object_set (layer1, "priority", (gint32) 0, NULL); if (!ges_timeline_add_layer (timeline, layer1)) exit (-1); aduration = (guint64) (duration * GST_SECOND); srca = make_source (path, 0, aduration, 1); overlay = make_overlay (text, 0, aduration, 0, color, xpos, ypos); ges_layer_add_clip (layer1, srca); ges_layer_add_clip (layer1, overlay); return pipeline; }
GESTimeline * minuteTL (void) { GESTimeline *timeline; GESLayer *layer; timeline = ges_timeline_new_audio_video (); layer = ges_layer_new (); ges_timeline_add_layer (timeline, layer); ges_clip_unknown_from_rel_path ("sd/Black Ink and Water Test - A Place in Time Song.mp4", layer, 0, 0, 15); ges_clip_unknown_from_rel_path ("sd/trailer_400p.ogg", layer, 15, 2, 15); ges_clip_unknown_from_rel_path ("sd/sintel_trailer-480p.mp4", layer, 30, 4, 15); ges_clip_unknown_from_rel_path ("hd/fluidsimulation.mp4", layer, 45, 0, 15); ges_timeline_commit (timeline); return timeline; }
void render_json (const char *filename) { JsonParser *parser; JsonNode *root; GError *error; parser = json_parser_new (); error = NULL; json_parser_load_from_file (parser, filename, &error); if (error) { g_print ("Parsing error `%s':\n %s\n", filename, error->message); g_error_free (error); g_object_unref (parser); exit (0); } root = json_parser_get_root (parser); JsonReader *reader = json_reader_new (root); GESTimeline *timeline; json_reader_read_member (reader, "composition"); // comp strings const char *name = getString (reader, "name"); //const char *src_dir = getString (reader, "src-dir"); //g_print ("Source Directory: %s\nName: %s\n", src_dir, name); // comp ints int width = getInt (reader, "width"); int height = getInt (reader, "height"); int fps = getInt (reader, "fps"); gboolean transparency = TRUE; if (is_in_members (reader, "transparency")) { transparency = getBool (reader, "transparency"); } gboolean absolute_paths = FALSE; if (is_in_members (reader, "absolute_paths")) { absolute_paths = getBool (reader, "absolute_paths"); } g_print ("Resolution: %dx%d, FPS: %d\n", width, height, fps); timeline = ges_timeline_new_audio_video (); int i; json_reader_read_member (reader, "layers"); for (i = 0; i < json_reader_count_elements (reader); i++) { json_reader_read_element (reader, i); GESLayer *layer = ges_layer_new (); g_object_set (layer, "priority", i, NULL); if (is_in_members (reader, "autotransition")) { gboolean autotransition = getBool (reader, "autotransition"); if (autotransition) g_print ("Auto Transitions on.\n"); g_object_set (layer, "auto-transition", autotransition, NULL); } ges_timeline_add_layer (timeline, layer); getClips (reader, layer, GES_TRACK_TYPE_UNKNOWN, absolute_paths); json_reader_end_element (reader); } json_reader_end_member (reader); ges_timeline_commit (timeline); const gchar *xges_path = g_strconcat ("file://", filename, ".xges", NULL); ges_timeline_save_xges (timeline, xges_path); //free(xges_path); // formats GESRendererProfile res = { width, height, fps, PROFILE_AAC_H264_QUICKTIME, NULL }; if (!transparency) { g_print ("Deactivating transparency\n"); res.format = "I420"; } json_reader_read_member (reader, "formats"); for (i = 0; i < json_reader_count_elements (reader); i++) { json_reader_read_element (reader, i); const char *format = json_reader_get_string_value (reader); json_reader_end_element (reader); g_print ("format: %s\n", format); EncodingProfile prof = PROFILE_AAC_H264_QUICKTIME; if (strcmp (format, "webm") == 0) { prof = PROFILE_VORBIS_VP8_WEBM; } else if (strcmp (format, "mkv") == 0) { prof = PROFILE_VORBIS_H264_MATROSKA; } else if (strcmp (format, "mp4") == 0) { prof = PROFILE_AAC_H264_QUICKTIME; } else if (strcmp (format, "ogg") == 0) { prof = PROFILE_VORBIS_THEORA_OGG; } res.profile = prof; ges_renderer_render (timeline, name, &res, absolute_paths); } json_reader_end_member (reader); json_reader_end_member (reader); g_object_unref (reader); g_object_unref (parser); }
int main (int argc, gchar ** argv) { GError *err = NULL; GOptionContext *ctx; GESPipeline *pipeline; GESTimeline *timeline; GESTrack *tracka, *trackv; GESLayer *layer1, *layer2; GESUriClip *src; GMainLoop *mainloop; gint inpoint = 0, duration = 10; gboolean mute = FALSE; gchar *audiofile = NULL; GOptionEntry options[] = { {"inpoint", 'i', 0, G_OPTION_ARG_INT, &inpoint, "in-point in the file (in seconds, default:0s)", "seconds"}, {"duration", 'd', 0, G_OPTION_ARG_INT, &duration, "duration to use from the file (in seconds, default:10s)", "seconds"}, {"mute", 'm', 0, G_OPTION_ARG_NONE, &mute, "Whether to mute the audio from the file",}, {"audiofile", 'a', 0, G_OPTION_ARG_FILENAME, &audiofile, "Use this audiofile instead of the original audio from the file", "audiofile"}, {NULL} }; ctx = g_option_context_new ("- Plays an video file with sound (origin/muted/replaced)"); g_option_context_add_main_entries (ctx, options, NULL); g_option_context_add_group (ctx, gst_init_get_option_group ()); if (!g_option_context_parse (ctx, &argc, &argv, &err)) { g_print ("Error initializing %s\n", err->message); exit (1); } if (argc == 1) { g_print ("%s", g_option_context_get_help (ctx, TRUE, NULL)); exit (0); } g_option_context_free (ctx); ges_init (); /* Create an Audio/Video pipeline with two layers */ pipeline = ges_pipeline_new (); timeline = ges_timeline_new (); tracka = GES_TRACK (ges_audio_track_new ()); trackv = GES_TRACK (ges_video_track_new ()); layer1 = ges_layer_new (); layer2 = ges_layer_new (); g_object_set (layer2, "priority", 1, NULL); if (!ges_timeline_add_layer (timeline, layer1) || !ges_timeline_add_layer (timeline, layer2) || !ges_timeline_add_track (timeline, tracka) || !ges_timeline_add_track (timeline, trackv) || !ges_pipeline_set_timeline (pipeline, timeline)) return -1; if (1) { gchar *uri = gst_filename_to_uri (argv[1], NULL); /* Add the main audio/video file */ src = ges_uri_clip_new (uri); g_free (uri); g_object_set (src, "start", 0, "in-point", inpoint * GST_SECOND, "duration", duration * GST_SECOND, "mute", mute, NULL); ges_layer_add_clip (layer1, GES_CLIP (src)); } /* Play the pipeline */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); mainloop = g_main_loop_new (NULL, FALSE); g_timeout_add_seconds (duration + 1, (GSourceFunc) g_main_loop_quit, mainloop); g_main_loop_run (mainloop); return 0; }
/* A image sequence test */ int main (int argc, gchar ** argv) { GError *err = NULL; GOptionContext *ctx; GESPipeline *pipeline; GESTimeline *timeline; GESAsset *asset; GESLayer *layer; GMainLoop *mainloop; GESTrack *track; gint duration = 10; gchar *filepattern = NULL; GOptionEntry options[] = { {"duration", 'd', 0, G_OPTION_ARG_INT, &duration, "duration to use from the file (in seconds, default:10s)", "seconds"}, {"pattern-url", 'u', 0, G_OPTION_ARG_FILENAME, &filepattern, "Pattern of the files. i.e. multifile:///foo/%04d.jpg", "pattern-url"}, {NULL} }; ctx = g_option_context_new ("- Plays an image sequence"); g_option_context_add_main_entries (ctx, options, NULL); g_option_context_add_group (ctx, gst_init_get_option_group ()); if (!g_option_context_parse (ctx, &argc, &argv, &err)) { g_print ("Error initializing %s\n", err->message); exit (1); } if (filepattern == NULL) { g_print ("%s", g_option_context_get_help (ctx, TRUE, NULL)); exit (0); } g_option_context_free (ctx); gst_init (&argc, &argv); ges_init (); timeline = ges_timeline_new (); track = GES_TRACK (ges_video_track_new ()); ges_timeline_add_track (timeline, track); layer = ges_layer_new (); if (!ges_timeline_add_layer (timeline, layer)) return -1; asset = GES_ASSET (ges_uri_clip_asset_request_sync (filepattern, &err)); ges_layer_add_asset (layer, asset, 0, 0, 5 * GST_SECOND, GES_TRACK_TYPE_VIDEO); pipeline = ges_pipeline_new (); if (!ges_pipeline_set_timeline (pipeline, timeline)) return -1; gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); mainloop = g_main_loop_new (NULL, FALSE); g_timeout_add_seconds (4, (GSourceFunc) g_main_loop_quit, mainloop); g_main_loop_run (mainloop); return 0; }
static gboolean load_pitivi_file_from_uri (GESFormatter * self, GESTimeline * timeline, const gchar * uri, GError ** error) { xmlDocPtr doc; GESLayer *layer; GESPitiviFormatterPrivate *priv = GES_PITIVI_FORMATTER (self)->priv; gboolean ret = TRUE; gint *prio = malloc (sizeof (gint)); *prio = 0; layer = ges_layer_new (); g_object_set (layer, "auto-transition", TRUE, NULL); g_hash_table_insert (priv->layers_table, prio, layer); g_object_set (layer, "priority", (gint32) 0, NULL); if (!ges_timeline_add_layer (timeline, layer)) { GST_ERROR ("Couldn't add layer"); return FALSE; } if (!(doc = xmlParseFile (uri))) { GST_ERROR ("The xptv file for uri %s was badly formed or did not exist", uri); return FALSE; } priv->xpathCtx = xmlXPathNewContext (doc); if (self->project) parse_metadatas (self); if (!create_tracks (self)) { GST_ERROR ("Couldn't create tracks"); return FALSE; } list_sources (self); if (!parse_clips (self)) { GST_ERROR ("Couldn't find clips markup in the xptv file"); return FALSE; } if (!parse_track_elements (self)) { GST_ERROR ("Couldn't find track objects markup in the xptv file"); return FALSE; } /* If there are no clips to load we should emit * 'project-loaded' signal. */ if (!g_hash_table_size (priv->clips_table) && GES_FORMATTER (self)->project) { ges_project_set_loaded (GES_FORMATTER (self)->project, GES_FORMATTER (self)); } else { if (!make_clips (self)) { GST_ERROR ("Couldn't deserialise the project properly"); return FALSE; } } xmlXPathFreeContext (priv->xpathCtx); xmlFreeDoc (doc); return ret; }
static void make_source (GESFormatter * self, GList * reflist, GHashTable * source_table) { GHashTable *props_table, *effect_table; gchar **prio_array; GESLayer *layer; GESPitiviFormatterPrivate *priv = GES_PITIVI_FORMATTER (self)->priv; gchar *fac_ref = NULL, *media_type = NULL, *filename = NULL, *prio_str; GList *tmp = NULL, *keys, *tmp_key; GESUriClip *src = NULL; gint prio; gboolean a_avail = FALSE, v_avail = FALSE, video; GHashTable *trackelement_table = priv->track_elements_table; for (tmp = reflist; tmp; tmp = tmp->next) { /* Get the layer */ props_table = g_hash_table_lookup (trackelement_table, (gchar *) tmp->data); prio_str = (gchar *) g_hash_table_lookup (props_table, "priority"); prio_array = g_strsplit (prio_str, ")", 0); prio = (gint) g_ascii_strtod (prio_array[1], NULL); g_strfreev (prio_array); /* If we do not have any layer with this priority, create it */ if (!(layer = g_hash_table_lookup (priv->layers_table, &prio))) { layer = ges_layer_new (); g_object_set (layer, "auto-transition", TRUE, "priority", prio, NULL); ges_timeline_add_layer (self->timeline, layer); g_hash_table_insert (priv->layers_table, g_memdup (&prio, sizeof (guint64)), layer); } fac_ref = (gchar *) g_hash_table_lookup (props_table, "fac_ref"); media_type = (gchar *) g_hash_table_lookup (props_table, "media_type"); if (!g_strcmp0 (media_type, "pitivi.stream.VideoStream")) video = TRUE; else video = FALSE; /* FIXME I am sure we could reimplement this whole part * in a simpler way */ if (g_strcmp0 (fac_ref, (gchar *) "effect")) { /* FIXME this is a hack to get a ref to the formatter when receiving * child-added */ g_hash_table_insert (props_table, (gchar *) "current-formatter", self); if (a_avail && (!video)) { a_avail = FALSE; } else if (v_avail && (video)) { v_avail = FALSE; } else { /* If we only have audio or only video in the previous source, * set it has such */ if (a_avail) { ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_VIDEO); } else if (v_avail) { ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_AUDIO); } filename = (gchar *) g_hash_table_lookup (source_table, "filename"); src = ges_uri_clip_new (filename); if (!video) { v_avail = TRUE; a_avail = FALSE; } else { a_avail = TRUE; v_avail = FALSE; } set_properties (G_OBJECT (src), props_table); ges_layer_add_clip (layer, GES_CLIP (src)); g_signal_connect (src, "child-added", G_CALLBACK (track_element_added_cb), props_table); priv->sources_to_load = g_list_prepend (priv->sources_to_load, src); } } else { GESEffect *effect; gchar *active = (gchar *) g_hash_table_lookup (props_table, "active"); effect = ges_effect_new ((gchar *) g_hash_table_lookup (props_table, (gchar *) "effect_name")); ges_track_element_set_track_type (GES_TRACK_ELEMENT (effect), (video ? GES_TRACK_TYPE_VIDEO : GES_TRACK_TYPE_AUDIO)); effect_table = g_hash_table_lookup (props_table, (gchar *) "effect_props"); ges_container_add (GES_CONTAINER (src), GES_TIMELINE_ELEMENT (effect)); if (!g_strcmp0 (active, (gchar *) "(bool)False")) ges_track_element_set_active (GES_TRACK_ELEMENT (effect), FALSE); /* Set effect properties */ keys = g_hash_table_get_keys (effect_table); for (tmp_key = keys; tmp_key; tmp_key = tmp_key->next) { GstStructure *structure; const GValue *value; GParamSpec *spec; GstCaps *caps; gchar *prop_val; prop_val = (gchar *) g_hash_table_lookup (effect_table, (gchar *) tmp_key->data); if (g_strstr_len (prop_val, -1, "(GEnum)")) { gchar **val = g_strsplit (prop_val, ")", 2); ges_track_element_set_child_properties (GES_TRACK_ELEMENT (effect), (gchar *) tmp_key->data, atoi (val[1]), NULL); g_strfreev (val); } else if (ges_track_element_lookup_child (GES_TRACK_ELEMENT (effect), (gchar *) tmp->data, NULL, &spec)) { gchar *caps_str = g_strdup_printf ("structure1, property1=%s;", prop_val); caps = gst_caps_from_string (caps_str); g_free (caps_str); structure = gst_caps_get_structure (caps, 0); value = gst_structure_get_value (structure, "property1"); ges_track_element_set_child_property_by_pspec (GES_TRACK_ELEMENT (effect), spec, (GValue *) value); gst_caps_unref (caps); } } } } if (a_avail) { ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_VIDEO); } else if (v_avail) { ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_AUDIO); } }
int main (int argc, gchar ** argv) { GESPipeline *pipeline; GESTimeline *timeline; GESTrack *tracka; GESLayer *layer; GMainLoop *mainloop; GstClockTime offset = 0; guint i; if (argc < 2) { g_print ("Usage: %s <list of audio files>\n", argv[0]); return -1; } /* Initialize GStreamer (this will parse environment variables and commandline * arguments. */ gst_init (&argc, &argv); /* Initialize the GStreamer Editing Services */ ges_init (); /* Setup of an audio timeline */ /* This is our main GESTimeline */ timeline = ges_timeline_new (); tracka = GES_TRACK (ges_audio_track_new ()); /* We are only going to be doing one layer of clips */ layer = ges_layer_new (); /* Add the tracks and the layer to the timeline */ if (!ges_timeline_add_layer (timeline, layer)) return -1; if (!ges_timeline_add_track (timeline, tracka)) return -1; /* Here we've finished initializing our timeline, we're * ready to start using it... by solely working with the layer ! */ for (i = 1; i < argc; i++, offset += GST_SECOND) { gchar *uri = gst_filename_to_uri (argv[i], NULL); GESUriClip *src = ges_uri_clip_new (uri); g_assert (src); g_free (uri); g_object_set (src, "start", offset, "duration", GST_SECOND, NULL); ges_layer_add_clip (layer, (GESClip *) src); } /* In order to listen our timeline, let's grab a convenience pipeline to put * our timeline in. */ pipeline = ges_pipeline_new (); /* Add the timeline to that pipeline */ if (!ges_pipeline_add_timeline (pipeline, timeline)) return -1; /* The following is standard usage of a GStreamer pipeline (note how you * haven't had to care about GStreamer so far ?). * * We set the pipeline to playing ... */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); /* ... and we start a GMainLoop. GES **REQUIRES** a GMainLoop to be running in * order to function properly ! */ mainloop = g_main_loop_new (NULL, FALSE); /* Simple code to have the mainloop shutdown after 4s */ g_timeout_add_seconds (argc - 1, (GSourceFunc) g_main_loop_quit, mainloop); g_main_loop_run (mainloop); return 0; }