コード例 #1
0
QDeclarativeVideoEditor::QDeclarativeVideoEditor(QObject *parent) :
    QAbstractListModel(parent), m_position(0), m_positionTimer(this), m_rendering(false), m_size(0),
    m_width(0), m_height(0), m_fpsn(0), m_fpsd(0)
{
    QHash<int, QByteArray> roles;
    roles.insert( 33 , "uri" );
    roles.insert( 34 , "fileName" );
    roles.insert( 35 , "inPoint" );
    roles.insert( 36 , "duration" );
    setRoleNames(roles);

    connect(&m_positionTimer, SIGNAL(timeout()), SLOT(updatePosition()));

    m_timeline = ges_timeline_new_audio_video();
    m_timelineLayer = (GESTimelineLayer*) ges_simple_timeline_layer_new();
    ges_timeline_add_layer(m_timeline, m_timelineLayer);
    m_pipeline = ges_timeline_pipeline_new();

    GstBus *bus = gst_pipeline_get_bus (GST_PIPELINE (m_pipeline));
    gst_bus_add_watch (bus, bus_call, this);
    gst_object_unref (bus);

    /*
     * gst-dsp encoders seems to not proxy downstream caps correctly, this can make
     * GES fail to render some projects. We override the default getcaps on our own
     */
    g_signal_connect(m_pipeline, "element-added", (GCallback) gstcapstricks_pipeline_element_added, NULL);

    ges_timeline_pipeline_add_timeline (m_pipeline, m_timeline);

    m_vsink = gst_element_factory_make ("omapxvsink", "previewvsink");
    ges_timeline_pipeline_preview_set_video_sink (m_pipeline, m_vsink);
    gst_x_overlay_set_render_rectangle (GST_X_OVERLAY (m_vsink),
                                        171, 0,
                                        512, 288);

    ges_timeline_pipeline_set_mode (m_pipeline, TIMELINE_MODE_PREVIEW);
    gst_element_set_state ((GstElement*) m_pipeline, GST_STATE_PAUSED);
    m_duration = GST_CLOCK_TIME_NONE;
    m_progress = 0.0;
}
コード例 #2
0
ファイル: thumbnails.c プロジェクト: matasbbb/GES
static GESTimelinePipeline *
create_timeline (void)
{
  GESTimelinePipeline *pipeline;
  GESTimelineLayer *layer;
  GESTrack *tracka, *trackv;
  GESTimeline *timeline;
  GESTimelineObject *src;

  timeline = ges_timeline_new ();

  tracka = ges_track_audio_raw_new ();
  trackv = ges_track_video_raw_new ();

  layer = (GESTimelineLayer *) ges_simple_timeline_layer_new ();

  /* Add the tracks and the layer to the timeline */
  if (!ges_timeline_add_layer (timeline, layer) ||
      !ges_timeline_add_track (timeline, tracka) ||
      !ges_timeline_add_track (timeline, trackv))
    return NULL;

  /* Add the main audio/video file */
  src = GES_TIMELINE_OBJECT (ges_timeline_test_source_new ());
  g_object_set (src,
      "vpattern", GES_VIDEO_TEST_PATTERN_SNOW,
      "duration", 10 * GST_SECOND, NULL);

  ges_simple_timeline_layer_add_object ((GESSimpleTimelineLayer *) layer,
      GES_TIMELINE_OBJECT (src), 0);

  pipeline = ges_timeline_pipeline_new ();

  if (!ges_timeline_pipeline_add_timeline (pipeline, timeline))
    return NULL;

  return pipeline;
}
コード例 #3
0
ファイル: timeline.cpp プロジェクト: emdash/QT-GES-Demo
Timeline::
Timeline(QObject *parent) : QAbstractListModel(parent)
{
  timeline = ges_timeline_new_audio_video();
  layer = ges_simple_timeline_layer_new();
  ges_timeline_add_layer(timeline, GES_TIMELINE_LAYER(layer));

  g_signal_connect(G_OBJECT(layer), "object-added",
    G_CALLBACK(layer_object_added_cb), this);
  g_signal_connect(G_OBJECT(layer), "object-moved",
    G_CALLBACK(layer_object_moved_cb), this);
  g_signal_connect(G_OBJECT(layer), "object-removed",
    G_CALLBACK(layer_object_removed_cb), this);

  QHash <int, QByteArray> rolenames;
  rolenames[thumb_uri_role] = "thumb_uri";
  rolenames[media_uri_role] = "media_uri";
  rolenames[inpoint_role] = "in_point";
  rolenames[outpoint_role] = "out_point";
  rolenames[duration_role] = "duration";
  rolenames[duration_only_role] = "duration_only";
  setRoleNames(rolenames);
  row_count = 0;
}
コード例 #4
0
ファイル: test3.c プロジェクト: matasbbb/GES
int
main (int argc, gchar ** argv)
{
  GESTimelinePipeline *pipeline;
  GESTimeline *timeline;
  GESTrack *tracka;
  GESTimelineLayer *layer;
  GMainLoop *mainloop;
  guint i;

  if (argc < 2) {
    g_print ("Usage: %s <list of audio files>\n", argv[0]);
    return -1;
  }

  /* Initialize GStreamer (this will parse environment variables and commandline
   * arguments. */
  gst_init (&argc, &argv);

  /* Initialize the GStreamer Editing Services */
  ges_init ();

  /* Setup of an audio timeline */

  /* This is our main GESTimeline */
  timeline = ges_timeline_new ();

  tracka = ges_track_audio_raw_new ();

  /* We are only going to be doing one layer of timeline objects */
  layer = (GESTimelineLayer *) ges_simple_timeline_layer_new ();

  /* Add the tracks and the layer to the timeline */
  if (!ges_timeline_add_layer (timeline, layer))
    return -1;
  if (!ges_timeline_add_track (timeline, tracka))
    return -1;

  /* Here we've finished initializing our timeline, we're 
   * ready to start using it... by solely working with the layer ! */

  for (i = 1; i < argc; i++) {
    gchar *uri = gst_filename_to_uri (argv[i], NULL);
    GESTimelineFileSource *src = ges_timeline_filesource_new (uri);

    g_assert (src);
    g_free (uri);

    g_object_set (src, "duration", GST_SECOND, NULL);
    /* Since we're using a GESSimpleTimelineLayer, objects will be automatically
     * appended to the end of the layer */
    ges_timeline_layer_add_object (layer, (GESTimelineObject *) src);
  }

  /* In order to view our timeline, let's grab a convenience pipeline to put
   * our timeline in. */
  pipeline = ges_timeline_pipeline_new ();

  /* Add the timeline to that pipeline */
  if (!ges_timeline_pipeline_add_timeline (pipeline, timeline))
    return -1;

  /* The following is standard usage of a GStreamer pipeline (note how you haven't
   * had to care about GStreamer so far ?).
   *
   * We set the pipeline to playing ... */
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);

  /* .. and we start a GMainLoop. GES **REQUIRES** a GMainLoop to be running in
   * order to function properly ! */
  mainloop = g_main_loop_new (NULL, FALSE);

  /* Simple code to have the mainloop shutdown after 4s */
  g_timeout_add_seconds (argc - 1, (GSourceFunc) g_main_loop_quit, mainloop);
  g_main_loop_run (mainloop);

  return 0;
}
コード例 #5
0
ファイル: ges-launch.c プロジェクト: matasbbb/GES
static GESTimeline *
create_timeline (int nbargs, gchar ** argv, gchar * audio, gchar * video)
{
  GESTimelineLayer *layer;
  GESTrack *tracka = NULL, *trackv = NULL;
  GESTimeline *timeline;
  guint i;

  timeline = ges_timeline_new ();

  if (audio)
    tracka = ges_track_audio_raw_new ();
  if (video)
    trackv = ges_track_video_raw_new ();

  /* We are only going to be doing one layer of timeline objects */
  layer = (GESTimelineLayer *) ges_simple_timeline_layer_new ();

  /* Add the tracks and the layer to the timeline */
  if (!ges_timeline_add_layer (timeline, layer) ||
      !(!audio || ges_timeline_add_track (timeline, tracka)) ||
      !(!video || ges_timeline_add_track (timeline, trackv)))
    goto build_failure;

  /* Here we've finished initializing our timeline, we're 
   * ready to start using it... by solely working with the layer !*/

  for (i = 0; i < nbargs / 3; i++) {
    GESTimelineObject *obj;

    char *source = argv[i * 3];
    char *arg0 = argv[(i * 3) + 1];
    guint64 duration = str_to_time (argv[(i * 3) + 2]);

    if (!g_strcmp0 ("+pattern", source)) {
      obj = GES_TIMELINE_OBJECT (ges_timeline_test_source_new_for_nick (arg0));
      if (!obj) {
        g_error ("%s is an invalid pattern name!\n", arg0);
        goto build_failure;
      }

      g_object_set (G_OBJECT (obj), "duration", duration, NULL);

      g_printf ("Adding <pattern:%s> duration %" GST_TIME_FORMAT "\n", arg0,
          GST_TIME_ARGS (duration));
    }

    else if (!g_strcmp0 ("+transition", source)) {
      if (duration <= 0) {
        g_error ("durations must be greater than 0");
        goto build_failure;
      }

      obj =
          GES_TIMELINE_OBJECT (ges_timeline_standard_transition_new_for_nick
          (arg0));

      if (!obj) {
        g_error ("invalid transition type\n");
        goto build_failure;
      }

      g_object_set (G_OBJECT (obj), "duration", duration, NULL);

      g_printf ("Adding <transition:%s> duration %" GST_TIME_FORMAT "\n", arg0,
          GST_TIME_ARGS (duration));

    }

    else if (!g_strcmp0 ("+title", source)) {
      obj = GES_TIMELINE_OBJECT (ges_timeline_title_source_new ());

      g_object_set (obj, "duration", duration, "text", arg0, NULL);

      g_printf ("Adding <title:%s> duration %" GST_TIME_FORMAT "\n", arg0,
          GST_TIME_ARGS (duration));
    }

    else {
      gchar *uri;
      guint64 inpoint;

      if (!(uri = ensure_uri (source))) {
        GST_ERROR ("couldn't create uri for '%s'", source);
        goto build_failure;
      }

      inpoint = str_to_time (argv[i * 3 + 1]);
      obj = GES_TIMELINE_OBJECT (ges_timeline_filesource_new (uri));
      g_object_set (obj,
          "in-point", (guint64) inpoint, "duration", (guint64) duration, NULL);

      g_printf ("Adding clip %s inpoint:%" GST_TIME_FORMAT " duration:%"
          GST_TIME_FORMAT "\n", uri, GST_TIME_ARGS (inpoint),
          GST_TIME_ARGS (duration));

      g_free (uri);
    }

    /* Since we're using a GESSimpleTimelineLayer, objects will be automatically
     * appended to the end of the layer */
    ges_timeline_layer_add_object (layer, obj);
  }

  return timeline;

build_failure:
  {
    g_object_unref (timeline);
    return NULL;
  }
}