static gboolean
_load (GESFormatter * self, GESTimeline * timeline, const gchar * string,
    GError ** error)
{
  guint i;
  GList *tmp;
  GError *err;
  GESStructureParser *parser = _parse_structures (string);

  err = ges_structure_parser_get_error (parser);

  if (err) {
    if (error)
      *error = err;

    return FALSE;
  }

  g_object_set (timeline, "auto-transition", TRUE, NULL);
  if (!(ges_timeline_add_track (timeline, GES_TRACK (ges_video_track_new ()))))
    goto fail;

  if (!(ges_timeline_add_track (timeline, GES_TRACK (ges_audio_track_new ()))))
    goto fail;

  /* Here we've finished initializing our timeline, we're
   * ready to start using it... by solely working with the layer !*/
  for (tmp = parser->structures; tmp; tmp = tmp->next) {
    const gchar *name = gst_structure_get_name (tmp->data);
    if (g_str_has_prefix (name, "set-")) {
      EXEC (_set_child_property, tmp->data, &err);
      continue;
    }

    for (i = 0; i < G_N_ELEMENTS (timeline_parsing_options); i++) {
      if (gst_structure_has_name (tmp->data,
              timeline_parsing_options[i].long_name)
          || (strlen (name) == 1 &&
              *name == timeline_parsing_options[i].short_name)) {
        EXEC (((ActionFromStructureFunc) timeline_parsing_options[i].arg_data),
            tmp->data, &err);
      }
    }
  }

  gst_object_unref (parser);

  return TRUE;

fail:
  gst_object_unref (parser);
  if (err) {
    if (error)
      *error = err;
  }

  return FALSE;
}
Example #2
0
GESTimeline *
positionTestTL (void)
{
  GESTimeline *timeline;
  GESTrack *trackv;
  GError **error = NULL;
  GESAsset *asset;
  GESClip *clip;

  timeline = ges_timeline_new ();
  trackv = GES_TRACK (ges_video_track_new ());
  ges_timeline_add_track (timeline, trackv);

  GESLayer *layer = ges_layer_new ();
  ges_timeline_add_layer (timeline, layer);

  asset =
      GES_ASSET (ges_uri_clip_asset_request_sync (ges_renderer_get_absolute_path
          ("image/wallpaper720p.jpg"), error));

  clip =
      ges_layer_add_asset (layer, asset, 0, 0, 2 * GST_SECOND,
      GES_TRACK_TYPE_VIDEO);

  GESTrackElement *elem =
      ges_clip_find_track_element (clip, trackv, G_TYPE_NONE);

  ges_track_element_set_child_properties (elem, "posx", 100, "width", 100,
      NULL);

  ges_timeline_commit (timeline);

  return timeline;
}
Example #3
0
/**
 * ges_audio_track_new:
 *
 * Creates a new #GESAudioTrack of type #GES_TRACK_TYPE_AUDIO and with generic
 * raw audio caps ("audio/x-raw");
 *
 * Returns: (transfer floating): A new #GESTrack
 */
GESAudioTrack *
ges_audio_track_new (void)
{
  GESAudioTrack *ret;
  GstCaps *caps = gst_caps_from_string (DEFAULT_CAPS);
  GstCaps *restriction_caps = gst_caps_from_string (DEFAULT_RESTRICTION_CAPS);

  ret = g_object_new (GES_TYPE_AUDIO_TRACK, "caps", caps,
      "track-type", GES_TRACK_TYPE_AUDIO, NULL);

  ges_track_set_create_element_for_gap_func (GES_TRACK (ret),
      create_element_for_raw_audio_gap);

  ges_track_set_restriction_caps (GES_TRACK (ret), restriction_caps);

  gst_caps_unref (caps);
  gst_caps_unref (restriction_caps);

  return ret;
}
GESTimeline *
ges_timeline_new_audio_video (void)
{
  GESTrack *tracka, *trackv;
  GESTimeline *timeline;

  /* This is our main GESTimeline */
  timeline = ges_timeline_new ();

  tracka = GES_TRACK (ges_audio_track_new ());
  trackv = GES_TRACK (ges_video_track_new ());

  if (!ges_timeline_add_track (timeline, trackv) ||
      !ges_timeline_add_track (timeline, tracka)) {
    gst_object_unref (timeline);
    timeline = NULL;
  }

  return timeline;
}
Example #5
0
static gboolean
create_tracks (GESFormatter * self)
{
  GESPitiviFormatterPrivate *priv = GES_PITIVI_FORMATTER (self)->priv;
  GList *tracks = NULL;

  tracks = ges_timeline_get_tracks (self->timeline);

  GST_DEBUG ("Creating tracks, current number of tracks %d",
      g_list_length (tracks));

  if (tracks) {
    GList *tmp = NULL;
    GESTrack *track;
    for (tmp = tracks; tmp; tmp = tmp->next) {
      track = tmp->data;
      if (track->type == GES_TRACK_TYPE_AUDIO) {
        priv->tracka = track;
      } else {
        priv->trackv = track;
      }
    }
    g_list_foreach (tracks, (GFunc) gst_object_unref, NULL);
    g_list_free (tracks);
    return TRUE;
  }

  priv->tracka = GES_TRACK (ges_audio_track_new ());
  priv->trackv = GES_TRACK (ges_video_track_new ());

  if (!ges_timeline_add_track (self->timeline, priv->trackv)) {
    return FALSE;
  }

  if (!ges_timeline_add_track (self->timeline, priv->tracka)) {
    return FALSE;
  }

  return TRUE;
}
Example #6
0
GESTimeline *
compTL (void)
{
  GESTimeline *timeline;
  GESTrack *trackv;

  timeline = ges_timeline_new ();
  trackv = GES_TRACK (ges_video_track_new ());
  ges_timeline_add_track (timeline, trackv);

  const gchar *assets[] = { "image/vieh.png",
    "image/PNG_transparency_demonstration_1.png",
    "image/Ice_Cream.png",
    "image/Fish.png"
  };

  guint asset_count = 4;

  for (int i = 1; i <= asset_count; i++) {
    GESLayer *layer = ges_layer_new ();
    ges_timeline_add_layer (timeline, layer);
    g_object_set (layer, "priority", i - 1, NULL);

    GESClip *vieh = ges_clip_from_rel_path (assets[i - 1], layer, 0, 0, 10,
        GES_TRACK_TYPE_VIDEO);

    GESTrackElement *elem =
        ges_clip_find_track_element (vieh, trackv, G_TYPE_NONE);

    GESUriClipAsset *asset =
        GES_URI_CLIP_ASSET (ges_extractable_get_asset (GES_EXTRACTABLE (vieh)));

    guint width = ges_asset_get_width (asset);
    guint height = ges_asset_get_height (asset);

    g_print ("%s: %dx%d\n", assets[i - 1], width, height);

    ges_track_element_set_child_properties (elem,
        "posx", i * 100, "posy", i * 100,
        "width", i * 100 * width / height, "height", (i * 100) - 1, NULL);
  }

  GESLayer *backgroud_layer = ges_layer_new ();
  ges_timeline_add_layer (timeline, backgroud_layer);
  g_object_set (backgroud_layer, "priority", asset_count, NULL);
  ges_clip_from_rel_path ("image/wallpaper-2597248.jpg", backgroud_layer, 0, 0,
      10, GES_TRACK_TYPE_VIDEO);

  ges_timeline_commit (timeline);

  return timeline;
}
GESPipeline *
make_timeline (char *path, float duration, char *text, guint32 color,
    gdouble xpos, gdouble ypos)
{
  GESTimeline *timeline;
  GESTrack *trackv, *tracka;
  GESLayer *layer1;
  GESClip *srca;
  GESClip *overlay;
  GESPipeline *pipeline;
  guint64 aduration;

  pipeline = ges_pipeline_new ();

  ges_pipeline_set_mode (pipeline, TIMELINE_MODE_PREVIEW_VIDEO);

  timeline = ges_timeline_new ();
  ges_pipeline_add_timeline (pipeline, timeline);

  trackv = GES_TRACK (ges_video_track_new ());
  ges_timeline_add_track (timeline, trackv);

  tracka = GES_TRACK (ges_audio_track_new ());
  ges_timeline_add_track (timeline, tracka);

  layer1 = GES_LAYER (ges_layer_new ());
  g_object_set (layer1, "priority", (gint32) 0, NULL);

  if (!ges_timeline_add_layer (timeline, layer1))
    exit (-1);

  aduration = (guint64) (duration * GST_SECOND);
  srca = make_source (path, 0, aduration, 1);
  overlay = make_overlay (text, 0, aduration, 0, color, xpos, ypos);
  ges_layer_add_clip (layer1, srca);
  ges_layer_add_clip (layer1, overlay);

  return pipeline;
}
Example #8
0
static GESPipeline *
create_timeline (void)
{
  GESPipeline *pipeline;
  GESLayer *layer;
  GESTrack *tracka, *trackv;
  GESTimeline *timeline;
  GESClip *src;

  timeline = ges_timeline_new ();

  tracka = GES_TRACK (ges_audio_track_new ());
  trackv = GES_TRACK (ges_video_track_new ());

  layer = (GESLayer *) ges_simple_layer_new ();

  /* Add the tracks and the layer to the timeline */
  if (!ges_timeline_add_layer (timeline, layer) ||
      !ges_timeline_add_track (timeline, tracka) ||
      !ges_timeline_add_track (timeline, trackv))
    return NULL;

  /* Add the main audio/video file */
  src = GES_CLIP (ges_test_clip_new ());
  g_object_set (src,
      "vpattern", GES_VIDEO_TEST_PATTERN_SNOW,
      "duration", 10 * GST_SECOND, NULL);

  ges_simple_layer_add_object ((GESSimpleLayer *) layer, GES_CLIP (src), 0);

  pipeline = ges_pipeline_new ();

  if (!ges_pipeline_add_timeline (pipeline, timeline))
    return NULL;

  return pipeline;
}
static void
ges_track_set_property (GObject * object, guint property_id,
    const GValue * value, GParamSpec * pspec)
{
  GESTrack *track = GES_TRACK (object);

  switch (property_id) {
    case ARG_CAPS:
      ges_track_set_caps (track, gst_value_get_caps (value));
      break;
    case ARG_TYPE:
      track->type = g_value_get_flags (value);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
  }
}
Example #10
0
static void
asset_created_cb (GObject * source, GAsyncResult * res, gpointer udata)
{
  GList *tracks, *tmp;
  GESAsset *asset;
  GESLayer *layer;
  GESUriClip *tlfs;

  GError *error = NULL;

  asset = ges_asset_request_finish (res, &error);
  ASSERT_OBJECT_REFCOUNT (asset, "1 for us + for the cache + 1 taken "
      "by g_simple_async_result_complete_in_idle", 3);
  fail_unless (error == NULL);
  fail_if (asset == NULL);
  fail_if (g_strcmp0 (ges_asset_get_id (asset), av_uri));

  layer = GES_LAYER (g_async_result_get_user_data (res));
  tlfs = GES_URI_CLIP (ges_layer_add_asset (layer,
          asset, 0, 0, GST_CLOCK_TIME_NONE, GES_TRACK_TYPE_UNKNOWN));
  fail_unless (GES_IS_URI_CLIP (tlfs));
  fail_if (g_strcmp0 (ges_uri_clip_get_uri (tlfs), av_uri));
  assert_equals_uint64 (_DURATION (tlfs), GST_SECOND);

  fail_unless (ges_clip_get_supported_formats
      (GES_CLIP (tlfs)) & GES_TRACK_TYPE_VIDEO);
  fail_unless (ges_clip_get_supported_formats
      (GES_CLIP (tlfs)) & GES_TRACK_TYPE_AUDIO);

  tracks = ges_timeline_get_tracks (ges_layer_get_timeline (layer));
  for (tmp = tracks; tmp; tmp = tmp->next) {
    GList *trackelements = ges_track_get_elements (GES_TRACK (tmp->data));

    assert_equals_int (g_list_length (trackelements), 1);
    fail_unless (GES_IS_VIDEO_URI_SOURCE (trackelements->data)
        || GES_IS_AUDIO_URI_SOURCE (trackelements->data));
    g_list_free_full (trackelements, gst_object_unref);
  }
  g_list_free_full (tracks, gst_object_unref);

  gst_object_unref (asset);
  g_main_loop_quit (mainloop);
}
static void
ges_track_get_property (GObject * object, guint property_id,
    GValue * value, GParamSpec * pspec)
{
  GESTrack *track = GES_TRACK (object);

  switch (property_id) {
    case ARG_CAPS:
      gst_value_set_caps (value, track->priv->caps);
      break;
    case ARG_TYPE:
      g_value_set_flags (value, track->type);
      break;
    case ARG_DURATION:
      g_value_set_uint64 (value, track->priv->duration);
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec);
  }
}
Example #12
0
GESTimeline *
volumeTestTL (void)
{
  GESTimeline *timeline;

  GESTrack *tracka;
  timeline = ges_timeline_new ();

  tracka = GES_TRACK (ges_audio_track_new ());

  if (!ges_timeline_add_track (timeline, tracka)) {
    gst_object_unref (timeline);
    timeline = NULL;
  }

  GESLayer *layer1 = ges_layer_new ();
  GESLayer *layer2 = ges_layer_new ();

  ges_timeline_add_layer (timeline, layer1);
  ges_timeline_add_layer (timeline, layer2);

  g_object_set (layer1, "priority", 0, NULL);
  g_object_set (layer2, "priority", 1, NULL);

  GESClip *music1 =
      ges_clip_from_rel_path ("audio/02_Oliver_Huntemann_-_Rikarda.flac",
      layer1, 0, 0, 10,
      GES_TRACK_TYPE_AUDIO);
  ges_clip_from_rel_path ("audio/prof.ogg", layer2, 0, 0, 10,
      GES_TRACK_TYPE_AUDIO);

  GESTrackElement *elem =
      ges_clip_find_track_element (music1, tracka, G_TYPE_NONE);

  ges_track_element_set_child_properties (elem, "volume", 2.1, NULL);

  ges_timeline_commit (timeline);

  return timeline;
}
Example #13
0
GESTimeline *
alphaTestTL (void)
{
  GESTimeline *timeline;

  GESTrack *trackv;
  timeline = ges_timeline_new ();

  trackv = GES_TRACK (ges_video_track_new ());

  if (!ges_timeline_add_track (timeline, trackv)) {
    gst_object_unref (timeline);
    timeline = NULL;
  }

  GESLayer *layer1 = ges_layer_new ();
  GESLayer *layer2 = ges_layer_new ();

  ges_timeline_add_layer (timeline, layer1);
  ges_timeline_add_layer (timeline, layer2);

  g_object_set (layer1, "priority", 0, NULL);
  g_object_set (layer2, "priority", 1, NULL);

  GESClip *png = ges_clip_from_rel_path ("image/Fish.png", layer1, 0, 0, 10,
      GES_TRACK_TYPE_VIDEO);

  GESTrackElement *elem =
      ges_clip_find_track_element (png, trackv, G_TYPE_NONE);

  ges_track_element_set_child_properties (elem, "alpha", 0.5, NULL);

  ges_clip_from_rel_path ("hd/fluidsimulation.mp4", layer2, 0, 20, 10,
      GES_TRACK_TYPE_VIDEO);

  ges_timeline_commit (timeline);

  return timeline;
}
Example #14
0
int
main (int argc, gchar ** argv)
{
  GError *err = NULL;
  GOptionContext *ctx;
  GESPipeline *pipeline;
  GESTimeline *timeline;
  GESTrack *tracka, *trackv;
  GESLayer *layer1, *layer2;
  GESUriClip *src;
  GMainLoop *mainloop;

  gint inpoint = 0, duration = 10;
  gboolean mute = FALSE;
  gchar *audiofile = NULL;
  GOptionEntry options[] = {
    {"inpoint", 'i', 0, G_OPTION_ARG_INT, &inpoint,
        "in-point in the file (in seconds, default:0s)", "seconds"},
    {"duration", 'd', 0, G_OPTION_ARG_INT, &duration,
        "duration to use from the file (in seconds, default:10s)", "seconds"},
    {"mute", 'm', 0, G_OPTION_ARG_NONE, &mute,
        "Whether to mute the audio from the file",},
    {"audiofile", 'a', 0, G_OPTION_ARG_FILENAME, &audiofile,
          "Use this audiofile instead of the original audio from the file",
        "audiofile"},
    {NULL}
  };

  ctx =
      g_option_context_new
      ("- Plays an video file with sound (origin/muted/replaced)");
  g_option_context_add_main_entries (ctx, options, NULL);
  g_option_context_add_group (ctx, gst_init_get_option_group ());

  if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
    g_print ("Error initializing %s\n", err->message);
    exit (1);
  }

  if (argc == 1) {
    g_print ("%s", g_option_context_get_help (ctx, TRUE, NULL));
    exit (0);
  }
  g_option_context_free (ctx);

  ges_init ();

  /* Create an Audio/Video pipeline with two layers */
  pipeline = ges_pipeline_new ();

  timeline = ges_timeline_new ();

  tracka = GES_TRACK (ges_audio_track_new ());
  trackv = GES_TRACK (ges_video_track_new ());

  layer1 = ges_layer_new ();
  layer2 = ges_layer_new ();
  g_object_set (layer2, "priority", 1, NULL);

  if (!ges_timeline_add_layer (timeline, layer1) ||
      !ges_timeline_add_layer (timeline, layer2) ||
      !ges_timeline_add_track (timeline, tracka) ||
      !ges_timeline_add_track (timeline, trackv) ||
      !ges_pipeline_set_timeline (pipeline, timeline))
    return -1;

  if (1) {
    gchar *uri = gst_filename_to_uri (argv[1], NULL);
    /* Add the main audio/video file */
    src = ges_uri_clip_new (uri);
    g_free (uri);
    g_object_set (src, "start", 0, "in-point", inpoint * GST_SECOND,
        "duration", duration * GST_SECOND, "mute", mute, NULL);
    ges_layer_add_clip (layer1, GES_CLIP (src));
  }

  /* Play the pipeline */
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);
  mainloop = g_main_loop_new (NULL, FALSE);
  g_timeout_add_seconds (duration + 1, (GSourceFunc) g_main_loop_quit,
      mainloop);
  g_main_loop_run (mainloop);

  return 0;
}
/* A image sequence test */
int
main (int argc, gchar ** argv)
{
  GError *err = NULL;
  GOptionContext *ctx;
  GESPipeline *pipeline;
  GESTimeline *timeline;
  GESAsset *asset;
  GESLayer *layer;
  GMainLoop *mainloop;
  GESTrack *track;

  gint duration = 10;
  gchar *filepattern = NULL;

  GOptionEntry options[] = {
    {"duration", 'd', 0, G_OPTION_ARG_INT, &duration,
        "duration to use from the file (in seconds, default:10s)", "seconds"},
    {"pattern-url", 'u', 0, G_OPTION_ARG_FILENAME, &filepattern,
          "Pattern of the files. i.e. multifile:///foo/%04d.jpg",
        "pattern-url"},
    {NULL}
  };

  ctx = g_option_context_new ("- Plays an image sequence");
  g_option_context_add_main_entries (ctx, options, NULL);
  g_option_context_add_group (ctx, gst_init_get_option_group ());

  if (!g_option_context_parse (ctx, &argc, &argv, &err)) {
    g_print ("Error initializing %s\n", err->message);
    exit (1);
  }

  if (filepattern == NULL) {
    g_print ("%s", g_option_context_get_help (ctx, TRUE, NULL));
    exit (0);
  }
  g_option_context_free (ctx);

  gst_init (&argc, &argv);
  ges_init ();

  timeline = ges_timeline_new ();
  track = GES_TRACK (ges_video_track_new ());
  ges_timeline_add_track (timeline, track);

  layer = ges_layer_new ();
  if (!ges_timeline_add_layer (timeline, layer))
    return -1;

  asset = GES_ASSET (ges_uri_clip_asset_request_sync (filepattern, &err));

  ges_layer_add_asset (layer, asset, 0, 0, 5 * GST_SECOND,
      GES_TRACK_TYPE_VIDEO);

  pipeline = ges_pipeline_new ();

  if (!ges_pipeline_set_timeline (pipeline, timeline))
    return -1;

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);

  mainloop = g_main_loop_new (NULL, FALSE);

  g_timeout_add_seconds (4, (GSourceFunc) g_main_loop_quit, mainloop);
  g_main_loop_run (mainloop);

  return 0;
}
/**
 * ges_pipeline_set_mode:
 * @pipeline: a #GESPipeline
 * @mode: the #GESPipelineFlags to use
 *
 * switches the @pipeline to the specified @mode. The default mode when
 * creating a #GESPipeline is #GES_PIPELINE_MODE_PREVIEW.
 *
 * Note: The @pipeline will be set to #GST_STATE_NULL during this call due to
 * the internal changes that happen. The caller will therefore have to 
 * set the @pipeline to the requested state after calling this method.
 *
 * Returns: %TRUE if the mode was properly set, else %FALSE.
 **/
gboolean
ges_pipeline_set_mode (GESPipeline * pipeline, GESPipelineFlags mode)
{

  GList *tmp;
  g_return_val_if_fail (GES_IS_PIPELINE (pipeline), FALSE);

  GST_DEBUG_OBJECT (pipeline, "current mode : %d, mode : %d",
      pipeline->priv->mode, mode);

  /* fast-path, nothing to change */
  if (mode == pipeline->priv->mode)
    return TRUE;

  /* FIXME: It would be nice if we are only (de)activating preview
   * modes to not set the whole pipeline to NULL, but instead just
   * do the proper (un)linking to playsink. */

  /* Switch pipeline to NULL since we're changing the configuration */
  gst_element_set_state (GST_ELEMENT_CAST (pipeline), GST_STATE_NULL);


  if (pipeline->priv->timeline) {
    gboolean disabled =
        ! !(mode & (GES_PIPELINE_MODE_RENDER | GES_PIPELINE_MODE_SMART_RENDER));

    for (tmp = pipeline->priv->timeline->tracks; tmp; tmp = tmp->next)
      track_disable_last_gap (GES_TRACK (tmp->data), disabled);
  }

  /* remove no-longer needed components */
  if (pipeline->priv->mode & GES_PIPELINE_MODE_PREVIEW &&
      !(mode & GES_PIPELINE_MODE_PREVIEW)) {
    /* Disable playsink */
    GST_DEBUG ("Disabling playsink");
    gst_object_ref (pipeline->priv->playsink);
    gst_bin_remove (GST_BIN_CAST (pipeline), pipeline->priv->playsink);
  }
  if ((pipeline->priv->mode &
          (GES_PIPELINE_MODE_RENDER | GES_PIPELINE_MODE_SMART_RENDER)) &&
      !(mode & (GES_PIPELINE_MODE_RENDER | GES_PIPELINE_MODE_SMART_RENDER))) {
    GList *tmp;
    GstCaps *caps;

    for (tmp = pipeline->priv->timeline->tracks; tmp; tmp = tmp->next) {
      GESTrackType type = GES_TRACK (tmp->data)->type;

      if (type == GES_TRACK_TYPE_AUDIO)
        caps = gst_caps_new_empty_simple ("audio/x-raw");
      else if (type == GES_TRACK_TYPE_VIDEO)
        caps = gst_caps_new_empty_simple ("video/x-raw");
      else
        continue;

      ges_track_set_caps (GES_TRACK (tmp->data), caps);
      gst_caps_unref (caps);
    }

    /* Disable render bin */
    GST_DEBUG ("Disabling rendering bin");
    gst_object_ref (pipeline->priv->encodebin);
    gst_object_ref (pipeline->priv->urisink);
    gst_bin_remove_many (GST_BIN_CAST (pipeline),
        pipeline->priv->encodebin, pipeline->priv->urisink, NULL);
  }

  /* Add new elements */
  if (!(pipeline->priv->mode & GES_PIPELINE_MODE_PREVIEW) &&
      (mode & GES_PIPELINE_MODE_PREVIEW)) {
    /* Add playsink */
    GST_DEBUG ("Adding playsink");
    if (!gst_bin_add (GST_BIN_CAST (pipeline), pipeline->priv->playsink)) {
      GST_ERROR_OBJECT (pipeline, "Couldn't add playsink");
      return FALSE;
    }
  }
  if (!(pipeline->priv->mode &
          (GES_PIPELINE_MODE_RENDER | GES_PIPELINE_MODE_SMART_RENDER)) &&
      (mode & (GES_PIPELINE_MODE_RENDER | GES_PIPELINE_MODE_SMART_RENDER))) {
    /* Adding render bin */
    GST_DEBUG ("Adding render bin");

    if (G_UNLIKELY (pipeline->priv->urisink == NULL)) {
      GST_ERROR_OBJECT (pipeline, "Output URI not set !");
      return FALSE;
    }
    if (!gst_bin_add (GST_BIN_CAST (pipeline), pipeline->priv->encodebin)) {
      GST_ERROR_OBJECT (pipeline, "Couldn't add encodebin");
      return FALSE;
    }
    if (!gst_bin_add (GST_BIN_CAST (pipeline), pipeline->priv->urisink)) {
      GST_ERROR_OBJECT (pipeline, "Couldn't add URI sink");
      return FALSE;
    }
    g_object_set (pipeline->priv->encodebin, "avoid-reencoding",
        !(!(mode & GES_PIPELINE_MODE_SMART_RENDER)), NULL);

    gst_element_link_pads_full (pipeline->priv->encodebin, "src",
        pipeline->priv->urisink, "sink", GST_PAD_LINK_CHECK_NOTHING);
  }

  /* FIXUPS */
  /* FIXME
   * If we are rendering, set playsink to sync=False,
   * If we are NOT rendering, set playsink to sync=TRUE */

  pipeline->priv->mode = mode;

  return TRUE;
}
Example #17
0
int
main (int argc, gchar ** argv)
{
  GESPipeline *pipeline;
  GESTimeline *timeline;
  GESTrack *tracka;
  GESLayer *layer;
  GMainLoop *mainloop;
  GstClockTime offset = 0;
  guint i;

  if (argc < 2) {
    g_print ("Usage: %s <list of audio files>\n", argv[0]);
    return -1;
  }

  /* Initialize GStreamer (this will parse environment variables and commandline
   * arguments. */
  gst_init (&argc, &argv);

  /* Initialize the GStreamer Editing Services */
  ges_init ();

  /* Setup of an audio timeline */

  /* This is our main GESTimeline */
  timeline = ges_timeline_new ();

  tracka = GES_TRACK (ges_audio_track_new ());

  /* We are only going to be doing one layer of clips */
  layer = ges_layer_new ();

  /* Add the tracks and the layer to the timeline */
  if (!ges_timeline_add_layer (timeline, layer))
    return -1;
  if (!ges_timeline_add_track (timeline, tracka))
    return -1;

  /* Here we've finished initializing our timeline, we're 
   * ready to start using it... by solely working with the layer ! */

  for (i = 1; i < argc; i++, offset += GST_SECOND) {
    gchar *uri = gst_filename_to_uri (argv[i], NULL);
    GESUriClip *src = ges_uri_clip_new (uri);

    g_assert (src);
    g_free (uri);

    g_object_set (src, "start", offset, "duration", GST_SECOND, NULL);

    ges_layer_add_clip (layer, (GESClip *) src);
  }

  /* In order to listen our timeline, let's grab a convenience pipeline to put
   * our timeline in. */
  pipeline = ges_pipeline_new ();

  /* Add the timeline to that pipeline */
  if (!ges_pipeline_add_timeline (pipeline, timeline))
    return -1;

  /* The following is standard usage of a GStreamer pipeline (note how you
   * haven't had to care about GStreamer so far ?).
   *
   * We set the pipeline to playing ... */
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);

  /* ... and we start a GMainLoop. GES **REQUIRES** a GMainLoop to be running in
   * order to function properly ! */
  mainloop = g_main_loop_new (NULL, FALSE);

  /* Simple code to have the mainloop shutdown after 4s */
  g_timeout_add_seconds (argc - 1, (GSourceFunc) g_main_loop_quit, mainloop);
  g_main_loop_run (mainloop);

  return 0;
}