GESTimeline * effectTL (void) { GESTimeline *timeline; GESLayer *layer; GESClip *clip1, *clip2; GESEffect *effect1, *effect2; timeline = ges_timeline_new_audio_video (); layer = ges_layer_new (); ges_timeline_add_layer (timeline, layer); clip1 = ges_clip_unknown_from_rel_path ("sd/trailer_400p.ogg", layer, 0, 0, 5); clip2 = ges_clip_unknown_from_rel_path ("sd/sintel_trailer-480p.ogv", layer, 5, 5, 5); effect1 = ges_effect_new ("agingtv"); ges_container_add (GES_CONTAINER (clip1), GES_TIMELINE_ELEMENT (effect1)); effect2 = ges_effect_new ("rippletv"); // some cool Frei0r plugins // "frei0r-filter-pixeliz0r", "frei0r-filter-flippo", "frei0r-filter-twolay0r" ges_container_add (GES_CONTAINER (clip2), GES_TIMELINE_ELEMENT (effect2)); ges_timeline_commit (timeline); return timeline; }
GESTimeline * musicTL (void) { GESTimeline *timeline; timeline = ges_timeline_new_audio_video (); GESLayer *layer = ges_layer_new (); GESLayer *audiolayer1 = ges_layer_new (); GESLayer *audiolayer2 = ges_layer_new (); ges_timeline_add_layer (timeline, layer); ges_timeline_add_layer (timeline, audiolayer1); ges_timeline_add_layer (timeline, audiolayer2); g_object_set (layer, "priority", 0, NULL); g_object_set (audiolayer1, "priority", 1, NULL); g_object_set (audiolayer2, "priority", 2, NULL); ges_clip_from_rel_path ("hd/fluidsimulation.mp4", layer, 0, 20, 10, GES_TRACK_TYPE_VIDEO); ges_clip_from_rel_path ("audio/prof.ogg", audiolayer1, 0, 0, 10, GES_TRACK_TYPE_AUDIO); ges_clip_from_rel_path ("audio/vask.wav", audiolayer2, 2, 0, 7, GES_TRACK_TYPE_AUDIO); ges_timeline_commit (timeline); return timeline; }
GESTimeline * testPatternTL (void) { GESTimeline *timeline; GESLayer *layer; GESTestClip *srca, *srcb; timeline = ges_timeline_new_audio_video (); layer = ges_layer_new (); g_object_set (layer, "auto-transition", TRUE, NULL); ges_timeline_add_layer (timeline, layer); srca = ges_test_clip_new (); srcb = ges_test_clip_new (); g_object_set (srca, "vpattern", GES_VIDEO_TEST_PATTERN_SMPTE, "duration", 3 * GST_SECOND, "start", 0, NULL); g_object_set (srcb, "vpattern", GES_VIDEO_TEST_PATTERN_CIRCULAR, "duration", 3 * GST_SECOND, "start", 2 * GST_SECOND, NULL); ges_test_clip_set_frequency (srcb, 800); ges_layer_add_clip (layer, GES_CLIP (srca)); ges_layer_add_clip (layer, GES_CLIP (srcb)); ges_timeline_commit (timeline); return timeline; }
int main (int argc, char **argv) { GESTimeline *timeline; GESLayer *layer; GError **error = NULL; GESAsset *asset; const gchar *url = "file:///home/bmonkey/workspace/ges/ges-renderer/data/sd/sintel_trailer-480p.mp4"; const gchar *exportURL = "file:///home/bmonkey/workspace/ges/ges-renderer/transition.mp4"; gst_init (&argc, &argv); ges_init (); timeline = ges_timeline_new_audio_video (); layer = ges_layer_new (); g_object_set (layer, "auto-transition", TRUE, NULL); ges_timeline_add_layer (timeline, layer); asset = GES_ASSET (ges_uri_clip_asset_request_sync (url, error)); ges_layer_add_asset (layer, asset, 0 * GST_SECOND, 0 * GST_SECOND, 10 * GST_SECOND, GES_TRACK_TYPE_VIDEO); ges_layer_add_asset (layer, asset, 5 * GST_SECOND, 20 * GST_SECOND, 10 * GST_SECOND, GES_TRACK_TYPE_VIDEO); ges_timeline_commit (timeline); duration = ges_timeline_get_duration (timeline); pipeline = ges_pipeline_new (); ges_pipeline_set_timeline (pipeline, timeline); GESRendererProfile pal = { 720, 576, 25, PROFILE_AAC_H264_QUICKTIME }; GstCaps *settings = gst_caps_from_renderer_profile (&pal); GstEncodingProfile *profile = profile_get_encoding_profile (settings); ges_pipeline_set_render_settings (pipeline, exportURL, profile); ges_pipeline_set_mode (pipeline, GES_PIPELINE_MODE_RENDER); GMainLoop *mainloop; mainloop = g_main_loop_new (NULL, FALSE); GstBus *bus; bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); g_signal_connect (bus, "message", (GCallback) bus_message_cb, mainloop); g_timeout_add (100, (GSourceFunc) ges_renderer_print_progress, NULL); gst_bus_add_signal_watch (bus); gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); g_main_loop_run (mainloop); g_main_loop_unref (mainloop); return 0; }
int main (int argc, char **argv) { GMainLoop *mainloop = NULL; GESTimeline *timeline; GESLayer *layer = NULL; GstBus *bus = NULL; guint i; if (argc < 3) { g_print ("Usage: %s <output uri> <list of files>\n", argv[0]); return -1; } gst_init (&argc, &argv); ges_init (); timeline = ges_timeline_new_audio_video (); layer = (GESLayer *) ges_simple_layer_new (); if (!ges_timeline_add_layer (timeline, layer)) return -1; output_uri = argv[1]; assetsCount = argc - 2; for (i = 2; i < argc; i++) { ges_asset_request_async (GES_TYPE_URI_CLIP, argv[i], NULL, (GAsyncReadyCallback) asset_loaded_cb, mainloop); } /* In order to view our timeline, let's grab a convenience pipeline to put * our timeline in. */ pipeline = ges_pipeline_new (); /* Add the timeline to that pipeline */ if (!ges_pipeline_add_timeline (pipeline, timeline)) return -1; mainloop = g_main_loop_new (NULL, FALSE); bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message", G_CALLBACK (bus_message_cb), mainloop); g_main_loop_run (mainloop); return 0; }
GESTimeline * transitionTL (void) { GESTimeline *timeline; GESLayer *layer; timeline = ges_timeline_new_audio_video (); layer = ges_layer_new (); g_object_set (layer, "auto-transition", TRUE, NULL); ges_timeline_add_layer (timeline, layer); ges_clip_unknown_from_rel_path ("sd/Mandelbox.mp4", layer, 0, 0, 10); ges_clip_unknown_from_rel_path ("sd/trailer_400p.ogg", layer, 7, 5, 10); ges_timeline_commit (timeline); return timeline; }
QDeclarativeVideoEditor::QDeclarativeVideoEditor(QObject *parent) : QAbstractListModel(parent), m_position(0), m_positionTimer(this), m_rendering(false), m_size(0), m_width(0), m_height(0), m_fpsn(0), m_fpsd(0) { QHash<int, QByteArray> roles; roles.insert( 33 , "uri" ); roles.insert( 34 , "fileName" ); roles.insert( 35 , "inPoint" ); roles.insert( 36 , "duration" ); setRoleNames(roles); connect(&m_positionTimer, SIGNAL(timeout()), SLOT(updatePosition())); m_timeline = ges_timeline_new_audio_video(); m_timelineLayer = (GESTimelineLayer*) ges_simple_timeline_layer_new(); ges_timeline_add_layer(m_timeline, m_timelineLayer); m_pipeline = ges_timeline_pipeline_new(); GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline)); gst_bus_add_watch (bus, bus_call, this); gst_object_unref (bus); /* * gst-dsp encoders seems to not proxy downstream caps correctly, this can make * GES fail to render some projects. We override the default getcaps on our own */ g_signal_connect(m_pipeline, "element-added", (GCallback) gstcapstricks_pipeline_element_added, NULL); ges_timeline_pipeline_add_timeline (m_pipeline, m_timeline); m_vsink = gst_element_factory_make ("omapxvsink", "previewvsink"); ges_timeline_pipeline_preview_set_video_sink (m_pipeline, m_vsink); gst_x_overlay_set_render_rectangle (GST_X_OVERLAY (m_vsink), 171, 0, 512, 288); ges_timeline_pipeline_set_mode (m_pipeline, TIMELINE_MODE_PREVIEW); gst_element_set_state ((GstElement*) m_pipeline, GST_STATE_PAUSED); m_duration = GST_CLOCK_TIME_NONE; m_progress = 0.0; }
GESTimeline * minuteTL (void) { GESTimeline *timeline; GESLayer *layer; timeline = ges_timeline_new_audio_video (); layer = ges_layer_new (); ges_timeline_add_layer (timeline, layer); ges_clip_unknown_from_rel_path ("sd/Black Ink and Water Test - A Place in Time Song.mp4", layer, 0, 0, 15); ges_clip_unknown_from_rel_path ("sd/trailer_400p.ogg", layer, 15, 2, 15); ges_clip_unknown_from_rel_path ("sd/sintel_trailer-480p.mp4", layer, 30, 4, 15); ges_clip_unknown_from_rel_path ("hd/fluidsimulation.mp4", layer, 45, 0, 15); ges_timeline_commit (timeline); return timeline; }
Timeline:: Timeline(QObject *parent) : QAbstractListModel(parent) { timeline = ges_timeline_new_audio_video(); layer = ges_simple_timeline_layer_new(); ges_timeline_add_layer(timeline, GES_TIMELINE_LAYER(layer)); g_signal_connect(G_OBJECT(layer), "object-added", G_CALLBACK(layer_object_added_cb), this); g_signal_connect(G_OBJECT(layer), "object-moved", G_CALLBACK(layer_object_moved_cb), this); g_signal_connect(G_OBJECT(layer), "object-removed", G_CALLBACK(layer_object_removed_cb), this); QHash <int, QByteArray> rolenames; rolenames[thumb_uri_role] = "thumb_uri"; rolenames[media_uri_role] = "media_uri"; rolenames[inpoint_role] = "in_point"; rolenames[outpoint_role] = "out_point"; rolenames[duration_role] = "duration"; rolenames[duration_only_role] = "duration_only"; setRoleNames(rolenames); row_count = 0; }
/* A simple timeline with 3 audio/video sources */ int main (int argc, gchar ** argv) { GESAsset *src_asset; GESTimelinePipeline *pipeline; GESTimeline *timeline; GESClip *source; GESTimelineLayer *layer; GMainLoop *mainloop; /* Initialize GStreamer (this will parse environment variables and commandline * arguments. */ gst_init (&argc, &argv); /* Initialize the GStreamer Editing Services */ ges_init (); /* Setup of a A/V timeline */ /* This is our main GESTimeline */ timeline = ges_timeline_new_audio_video (); /* We are only going to be doing one layer of clips */ layer = ges_timeline_layer_new (); /* Add the tracks and the layer to the timeline */ if (!ges_timeline_add_layer (timeline, layer)) return -1; /* We create a simple asset able to extract GESTestClip */ src_asset = ges_asset_request (GES_TYPE_TEST_CLIP, NULL, NULL); /* Add sources to our layer */ ges_timeline_layer_add_asset (layer, src_asset, 0, 0, GST_SECOND, 1, GES_TRACK_TYPE_UNKNOWN); source = ges_timeline_layer_add_asset (layer, src_asset, GST_SECOND, 0, GST_SECOND, 1, GES_TRACK_TYPE_UNKNOWN); g_object_set (source, "freq", 480.0, "vpattern", 2, NULL); ges_timeline_layer_add_asset (layer, src_asset, 2 * GST_SECOND, 0, GST_SECOND, 1, GES_TRACK_TYPE_UNKNOWN); /* In order to view our timeline, let's grab a convenience pipeline to put * our timeline in. */ pipeline = ges_timeline_pipeline_new (); /* Add the timeline to that pipeline */ if (!ges_timeline_pipeline_add_timeline (pipeline, timeline)) return -1; /* The following is standard usage of a GStreamer pipeline (note how you haven't * had to care about GStreamer so far ?). * * We set the pipeline to playing ... */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); /* .. and we start a GMainLoop. GES **REQUIRES** a GMainLoop to be running in * order to function properly ! */ mainloop = g_main_loop_new (NULL, FALSE); /* Simple code to have the mainloop shutdown after 4s */ g_timeout_add_seconds (4, (GSourceFunc) g_main_loop_quit, mainloop); g_main_loop_run (mainloop); return 0; }
void render_json (const char *filename) { JsonParser *parser; JsonNode *root; GError *error; parser = json_parser_new (); error = NULL; json_parser_load_from_file (parser, filename, &error); if (error) { g_print ("Parsing error `%s':\n %s\n", filename, error->message); g_error_free (error); g_object_unref (parser); exit (0); } root = json_parser_get_root (parser); JsonReader *reader = json_reader_new (root); GESTimeline *timeline; json_reader_read_member (reader, "composition"); // comp strings const char *name = getString (reader, "name"); //const char *src_dir = getString (reader, "src-dir"); //g_print ("Source Directory: %s\nName: %s\n", src_dir, name); // comp ints int width = getInt (reader, "width"); int height = getInt (reader, "height"); int fps = getInt (reader, "fps"); gboolean transparency = TRUE; if (is_in_members (reader, "transparency")) { transparency = getBool (reader, "transparency"); } gboolean absolute_paths = FALSE; if (is_in_members (reader, "absolute_paths")) { absolute_paths = getBool (reader, "absolute_paths"); } g_print ("Resolution: %dx%d, FPS: %d\n", width, height, fps); timeline = ges_timeline_new_audio_video (); int i; json_reader_read_member (reader, "layers"); for (i = 0; i < json_reader_count_elements (reader); i++) { json_reader_read_element (reader, i); GESLayer *layer = ges_layer_new (); g_object_set (layer, "priority", i, NULL); if (is_in_members (reader, "autotransition")) { gboolean autotransition = getBool (reader, "autotransition"); if (autotransition) g_print ("Auto Transitions on.\n"); g_object_set (layer, "auto-transition", autotransition, NULL); } ges_timeline_add_layer (timeline, layer); getClips (reader, layer, GES_TRACK_TYPE_UNKNOWN, absolute_paths); json_reader_end_element (reader); } json_reader_end_member (reader); ges_timeline_commit (timeline); const gchar *xges_path = g_strconcat ("file://", filename, ".xges", NULL); ges_timeline_save_xges (timeline, xges_path); //free(xges_path); // formats GESRendererProfile res = { width, height, fps, PROFILE_AAC_H264_QUICKTIME, NULL }; if (!transparency) { g_print ("Deactivating transparency\n"); res.format = "I420"; } json_reader_read_member (reader, "formats"); for (i = 0; i < json_reader_count_elements (reader); i++) { json_reader_read_element (reader, i); const char *format = json_reader_get_string_value (reader); json_reader_end_element (reader); g_print ("format: %s\n", format); EncodingProfile prof = PROFILE_AAC_H264_QUICKTIME; if (strcmp (format, "webm") == 0) { prof = PROFILE_VORBIS_VP8_WEBM; } else if (strcmp (format, "mkv") == 0) { prof = PROFILE_VORBIS_H264_MATROSKA; } else if (strcmp (format, "mp4") == 0) { prof = PROFILE_AAC_H264_QUICKTIME; } else if (strcmp (format, "ogg") == 0) { prof = PROFILE_VORBIS_THEORA_OGG; } res.profile = prof; ges_renderer_render (timeline, name, &res, absolute_paths); } json_reader_end_member (reader); json_reader_end_member (reader); g_object_unref (reader); g_object_unref (parser); }