static GESTimelinePipeline * create_pipeline (gchar * load_path, gchar * save_path, int argc, char **argv, gchar * audio, gchar * video) { GESTimelinePipeline *pipeline = NULL; GESTimeline *timeline = NULL; /* Timeline creation */ if (load_path) { gchar *uri; g_printf ("Loading project from : %s\n", load_path); if (!(uri = ensure_uri (load_path))) { g_error ("couldn't create uri for '%s'", load_path); goto failure; } g_printf ("reading from '%s' (arguments ignored)\n", load_path); if (!(timeline = ges_timeline_new_from_uri (uri))) { g_error ("failed to create timeline from file '%s'", load_path); goto failure; } g_printf ("loaded project successfully\n"); g_free (uri); } else /* Normal timeline creation */ if (!(timeline = create_timeline (argc, argv, audio, video))) goto failure; /* save project if path is given. we do this now in case GES crashes or * hangs during playback. */ if (save_path) { gchar *uri; if (!(uri = ensure_uri (save_path))) { g_error ("couldn't create uri for '%s", save_path); goto failure; } ges_timeline_save_to_uri (timeline, uri); g_free (uri); } /* In order to view our timeline, let's grab a convenience pipeline to put * our timeline in. */ pipeline = ges_timeline_pipeline_new (); /* Add the timeline to that pipeline */ if (!ges_timeline_pipeline_add_timeline (pipeline, timeline)) goto failure; return pipeline; failure: { if (timeline) g_object_unref (timeline); if (pipeline) g_object_unref (pipeline); return NULL; } }
QDeclarativeVideoEditor::QDeclarativeVideoEditor(QObject *parent) : QAbstractListModel(parent), m_position(0), m_positionTimer(this), m_rendering(false), m_size(0), m_width(0), m_height(0), m_fpsn(0), m_fpsd(0) { QHash<int, QByteArray> roles; roles.insert( 33 , "uri" ); roles.insert( 34 , "fileName" ); roles.insert( 35 , "inPoint" ); roles.insert( 36 , "duration" ); setRoleNames(roles); connect(&m_positionTimer, SIGNAL(timeout()), SLOT(updatePosition())); m_timeline = ges_timeline_new_audio_video(); m_timelineLayer = (GESTimelineLayer*) ges_simple_timeline_layer_new(); ges_timeline_add_layer(m_timeline, m_timelineLayer); m_pipeline = ges_timeline_pipeline_new(); GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline)); gst_bus_add_watch (bus, bus_call, this); gst_object_unref (bus); /* * gst-dsp encoders seems to not proxy downstream caps correctly, this can make * GES fail to render some projects. We override the default getcaps on our own */ g_signal_connect(m_pipeline, "element-added", (GCallback) gstcapstricks_pipeline_element_added, NULL); ges_timeline_pipeline_add_timeline (m_pipeline, m_timeline); m_vsink = gst_element_factory_make ("omapxvsink", "previewvsink"); ges_timeline_pipeline_preview_set_video_sink (m_pipeline, m_vsink); gst_x_overlay_set_render_rectangle (GST_X_OVERLAY (m_vsink), 171, 0, 512, 288); ges_timeline_pipeline_set_mode (m_pipeline, TIMELINE_MODE_PREVIEW); gst_element_set_state ((GstElement*) m_pipeline, GST_STATE_PAUSED); m_duration = GST_CLOCK_TIME_NONE; m_progress = 0.0; }
GESTimelinePipeline * make_timeline (char *path, float duration, char *text, guint32 color, gdouble xpos, gdouble ypos) { GESTimeline *timeline; GESTrack *trackv, *tracka; GESTimelineLayer *layer1; GESTimelineObject *srca; GESTimelineObject *overlay; GESTimelinePipeline *pipeline; guint64 aduration; pipeline = ges_timeline_pipeline_new (); ges_timeline_pipeline_set_mode (pipeline, TIMELINE_MODE_PREVIEW_VIDEO); timeline = ges_timeline_new (); ges_timeline_pipeline_add_timeline (pipeline, timeline); trackv = ges_track_video_raw_new (); ges_timeline_add_track (timeline, trackv); tracka = ges_track_audio_raw_new (); ges_timeline_add_track (timeline, tracka); layer1 = GES_TIMELINE_LAYER (ges_timeline_layer_new ()); g_object_set (layer1, "priority", (gint32) 0, NULL); if (!ges_timeline_add_layer (timeline, layer1)) exit (-1); aduration = (guint64) (duration * GST_SECOND); srca = make_source (path, 0, aduration, 1); overlay = make_overlay (text, 0, aduration, 0, color, xpos, ypos); ges_timeline_layer_add_object (layer1, srca); ges_timeline_layer_add_object (layer1, overlay); return pipeline; }
static GESTimelinePipeline * create_timeline (void) { GESTimelinePipeline *pipeline; GESTimelineLayer *layer; GESTrack *tracka, *trackv; GESTimeline *timeline; GESTimelineObject *src; timeline = ges_timeline_new (); tracka = ges_track_audio_raw_new (); trackv = ges_track_video_raw_new (); layer = (GESTimelineLayer *) ges_simple_timeline_layer_new (); /* Add the tracks and the layer to the timeline */ if (!ges_timeline_add_layer (timeline, layer) || !ges_timeline_add_track (timeline, tracka) || !ges_timeline_add_track (timeline, trackv)) return NULL; /* Add the main audio/video file */ src = GES_TIMELINE_OBJECT (ges_timeline_test_source_new ()); g_object_set (src, "vpattern", GES_VIDEO_TEST_PATTERN_SNOW, "duration", 10 * GST_SECOND, NULL); ges_simple_timeline_layer_add_object ((GESSimpleTimelineLayer *) layer, GES_TIMELINE_OBJECT (src), 0); pipeline = ges_timeline_pipeline_new (); if (!ges_timeline_pipeline_add_timeline (pipeline, timeline)) return NULL; return pipeline; }
void load_project (gchar * uri) { GESFormatter *formatter; GESTimeline *timeline; GMainLoop *mainloop; GESTimelinePipeline *pipeline; GstBus *bus; formatter = GES_FORMATTER (ges_pitivi_formatter_new ()); timeline = ges_timeline_new (); pipeline = ges_timeline_pipeline_new (); bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline)); mainloop = g_main_loop_new (NULL, FALSE); ges_timeline_pipeline_add_timeline (pipeline, timeline); ges_formatter_load_from_uri (formatter, timeline, uri); ges_timeline_pipeline_set_mode (pipeline, TIMELINE_MODE_PREVIEW_VIDEO); gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); gst_bus_add_signal_watch (bus); g_signal_connect (bus, "message", G_CALLBACK (bus_message_cb), mainloop); g_main_loop_run (mainloop); }
int main (int argc, gchar ** argv) { GESTimelinePipeline *pipeline; GESTimeline *timeline; GESTrack *tracka; GESTimelineLayer *layer; GMainLoop *mainloop; guint i; if (argc < 2) { g_print ("Usage: %s <list of audio files>\n", argv[0]); return -1; } /* Initialize GStreamer (this will parse environment variables and commandline * arguments. */ gst_init (&argc, &argv); /* Initialize the GStreamer Editing Services */ ges_init (); /* Setup of an audio timeline */ /* This is our main GESTimeline */ timeline = ges_timeline_new (); tracka = ges_track_audio_raw_new (); /* We are only going to be doing one layer of timeline objects */ layer = (GESTimelineLayer *) ges_simple_timeline_layer_new (); /* Add the tracks and the layer to the timeline */ if (!ges_timeline_add_layer (timeline, layer)) return -1; if (!ges_timeline_add_track (timeline, tracka)) return -1; /* Here we've finished initializing our timeline, we're * ready to start using it... by solely working with the layer ! */ for (i = 1; i < argc; i++) { gchar *uri = gst_filename_to_uri (argv[i], NULL); GESTimelineFileSource *src = ges_timeline_filesource_new (uri); g_assert (src); g_free (uri); g_object_set (src, "duration", GST_SECOND, NULL); /* Since we're using a GESSimpleTimelineLayer, objects will be automatically * appended to the end of the layer */ ges_timeline_layer_add_object (layer, (GESTimelineObject *) src); } /* In order to view our timeline, let's grab a convenience pipeline to put * our timeline in. */ pipeline = ges_timeline_pipeline_new (); /* Add the timeline to that pipeline */ if (!ges_timeline_pipeline_add_timeline (pipeline, timeline)) return -1; /* The following is standard usage of a GStreamer pipeline (note how you haven't * had to care about GStreamer so far ?). * * We set the pipeline to playing ... */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); /* .. and we start a GMainLoop. GES **REQUIRES** a GMainLoop to be running in * order to function properly ! */ mainloop = g_main_loop_new (NULL, FALSE); /* Simple code to have the mainloop shutdown after 4s */ g_timeout_add_seconds (argc - 1, (GSourceFunc) g_main_loop_quit, mainloop); g_main_loop_run (mainloop); return 0; }
/* A simple timeline with 3 audio/video sources */ int main (int argc, gchar ** argv) { GESAsset *src_asset; GESTimelinePipeline *pipeline; GESTimeline *timeline; GESClip *source; GESTimelineLayer *layer; GMainLoop *mainloop; /* Initialize GStreamer (this will parse environment variables and commandline * arguments. */ gst_init (&argc, &argv); /* Initialize the GStreamer Editing Services */ ges_init (); /* Setup of a A/V timeline */ /* This is our main GESTimeline */ timeline = ges_timeline_new_audio_video (); /* We are only going to be doing one layer of clips */ layer = ges_timeline_layer_new (); /* Add the tracks and the layer to the timeline */ if (!ges_timeline_add_layer (timeline, layer)) return -1; /* We create a simple asset able to extract GESTestClip */ src_asset = ges_asset_request (GES_TYPE_TEST_CLIP, NULL, NULL); /* Add sources to our layer */ ges_timeline_layer_add_asset (layer, src_asset, 0, 0, GST_SECOND, 1, GES_TRACK_TYPE_UNKNOWN); source = ges_timeline_layer_add_asset (layer, src_asset, GST_SECOND, 0, GST_SECOND, 1, GES_TRACK_TYPE_UNKNOWN); g_object_set (source, "freq", 480.0, "vpattern", 2, NULL); ges_timeline_layer_add_asset (layer, src_asset, 2 * GST_SECOND, 0, GST_SECOND, 1, GES_TRACK_TYPE_UNKNOWN); /* In order to view our timeline, let's grab a convenience pipeline to put * our timeline in. */ pipeline = ges_timeline_pipeline_new (); /* Add the timeline to that pipeline */ if (!ges_timeline_pipeline_add_timeline (pipeline, timeline)) return -1; /* The following is standard usage of a GStreamer pipeline (note how you haven't * had to care about GStreamer so far ?). * * We set the pipeline to playing ... */ gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING); /* .. and we start a GMainLoop. GES **REQUIRES** a GMainLoop to be running in * order to function properly ! */ mainloop = g_main_loop_new (NULL, FALSE); /* Simple code to have the mainloop shutdown after 4s */ g_timeout_add_seconds (4, (GSourceFunc) g_main_loop_quit, mainloop); g_main_loop_run (mainloop); return 0; }