Exemplo n.º 1
0
GESTimeline *
positionTestTL (void)
{
  GESTimeline *timeline;
  GESTrack *trackv;
  GError **error = NULL;
  GESAsset *asset;
  GESClip *clip;

  timeline = ges_timeline_new ();
  trackv = GES_TRACK (ges_video_track_new ());
  ges_timeline_add_track (timeline, trackv);

  GESLayer *layer = ges_layer_new ();
  ges_timeline_add_layer (timeline, layer);

  asset =
      GES_ASSET (ges_uri_clip_asset_request_sync (ges_renderer_get_absolute_path
          ("image/wallpaper720p.jpg"), error));

  clip =
      ges_layer_add_asset (layer, asset, 0, 0, 2 * GST_SECOND,
      GES_TRACK_TYPE_VIDEO);

  GESTrackElement *elem =
      ges_clip_find_track_element (clip, trackv, G_TYPE_NONE);

  ges_track_element_set_child_properties (elem, "posx", 100, "width", 100,
      NULL);

  ges_timeline_commit (timeline);

  return timeline;
}
Exemplo n.º 2
0
/**
 * ges_title_clip_set_ypos:
 * @self: the #GESTitleClip* to set
 * @position: The vertical position @self is being set to
 *
 * Sets the vertical position of the text.
 *
 * Deprecated: use ges_track_element_get/set_children_properties on the
 * underlying GESTrackElement instead
 */
void
ges_title_clip_set_ypos (GESTitleClip * self, gdouble position)
{
  GSList *tmp;

  GST_DEBUG_OBJECT (self, "ypos:%f", position);

  for (tmp = self->priv->track_titles; tmp; tmp = tmp->next) {
    ges_track_element_set_child_properties (tmp->data, "ypos", position, NULL);
  }
}
Exemplo n.º 3
0
/**
 * ges_title_clip_set_color:
 * @self: the #GESTitleClip* to set
 * @color: The color @self is being set to
 *
 * Sets the color of the text.
 *
 * Deprecated: use ges_track_element_get/set_children_properties on the
 * underlying GESTrackElement instead
 */
void
ges_title_clip_set_color (GESTitleClip * self, guint32 color)
{
  GSList *tmp;

  GST_DEBUG_OBJECT (self, "color:%d", color);

  for (tmp = self->priv->track_titles; tmp; tmp = tmp->next) {
    ges_track_element_set_child_properties (tmp->data, "color", color, NULL);
  }
}
Exemplo n.º 4
0
/**
 * ges_title_clip_set_text:
 * @self: the #GESTitleClip* to set text on
 * @text: the text to render. an internal copy of this text will be
 * made.
 *
 * Sets the text this clip will render.
 *
 * Deprecated: use ges_track_element_get/set_children_properties on the
 * underlying GESTrackElement instead
 */
void
ges_title_clip_set_text (GESTitleClip * self, const gchar * text)
{
  GSList *tmp;

  GST_DEBUG_OBJECT (self, "text:%s", text);

  for (tmp = self->priv->track_titles; tmp; tmp = tmp->next) {
    ges_track_element_set_child_properties (tmp->data, "text", text, NULL);
  }
}
Exemplo n.º 5
0
/**
 * ges_title_clip_set_background:
 * @self: the #GESTitleClip* to set
 * @background: The color @self is being set to
 *
 * Sets the background of the text.
 *
 * Deprecated: use ges_track_element_get/set_children_properties on the
 * underlying GESTrackElement instead
 */
void
ges_title_clip_set_background (GESTitleClip * self, guint32 background)
{
  GSList *tmp;

  GST_DEBUG_OBJECT (self, "background:%d", background);

  for (tmp = self->priv->track_titles; tmp; tmp = tmp->next) {
    ges_track_element_set_child_properties (tmp->data,
        "foreground-color", background, NULL);
  }
}
Exemplo n.º 6
0
/**
 * ges_title_clip_set_valignment:
 * @self: the #GESTitleClip* to set vertical alignement of text on
 * @valign: #GESTextVAlign
 *
 * Sets the vertical aligment of the text.
 *
 * Deprecated: use ges_track_element_get/set_children_properties on the
 * underlying GESTrackElement instead
 */
void
ges_title_clip_set_valignment (GESTitleClip * self, GESTextVAlign valign)
{
  GSList *tmp;

  GST_DEBUG_OBJECT (self, "valign:%d", valign);

  for (tmp = self->priv->track_titles; tmp; tmp = tmp->next) {
    ges_track_element_set_child_properties (tmp->data,
        "valignment", valign, NULL);
  }
}
Exemplo n.º 7
0
/**
 * ges_title_clip_set_font_desc:
 * @self: the #GESTitleClip*
 * @font_desc: the pango font description
 *
 * Sets the pango font description of the text.
 *
 * Deprecated: use ges_track_element_get/set_children_properties on the
 * underlying GESTrackElement instead
 */
void
ges_title_clip_set_font_desc (GESTitleClip * self, const gchar * font_desc)
{
  GSList *tmp;

  GST_DEBUG_OBJECT (self, "font_desc:%s", font_desc);

  for (tmp = self->priv->track_titles; tmp; tmp = tmp->next) {
    ges_track_element_set_child_properties (tmp->data,
        "font-desc", font_desc, NULL);
  }
}
Exemplo n.º 8
0
GESTimeline *
compTL (void)
{
  GESTimeline *timeline;
  GESTrack *trackv;

  timeline = ges_timeline_new ();
  trackv = GES_TRACK (ges_video_track_new ());
  ges_timeline_add_track (timeline, trackv);

  const gchar *assets[] = { "image/vieh.png",
    "image/PNG_transparency_demonstration_1.png",
    "image/Ice_Cream.png",
    "image/Fish.png"
  };

  guint asset_count = 4;

  for (int i = 1; i <= asset_count; i++) {
    GESLayer *layer = ges_layer_new ();
    ges_timeline_add_layer (timeline, layer);
    g_object_set (layer, "priority", i - 1, NULL);

    GESClip *vieh = ges_clip_from_rel_path (assets[i - 1], layer, 0, 0, 10,
        GES_TRACK_TYPE_VIDEO);

    GESTrackElement *elem =
        ges_clip_find_track_element (vieh, trackv, G_TYPE_NONE);

    GESUriClipAsset *asset =
        GES_URI_CLIP_ASSET (ges_extractable_get_asset (GES_EXTRACTABLE (vieh)));

    guint width = ges_asset_get_width (asset);
    guint height = ges_asset_get_height (asset);

    g_print ("%s: %dx%d\n", assets[i - 1], width, height);

    ges_track_element_set_child_properties (elem,
        "posx", i * 100, "posy", i * 100,
        "width", i * 100 * width / height, "height", (i * 100) - 1, NULL);
  }

  GESLayer *backgroud_layer = ges_layer_new ();
  ges_timeline_add_layer (timeline, backgroud_layer);
  g_object_set (backgroud_layer, "priority", asset_count, NULL);
  ges_clip_from_rel_path ("image/wallpaper-2597248.jpg", backgroud_layer, 0, 0,
      10, GES_TRACK_TYPE_VIDEO);

  ges_timeline_commit (timeline);

  return timeline;
}
Exemplo n.º 9
0
GESTimeline *
volumeTestTL (void)
{
  GESTimeline *timeline;

  GESTrack *tracka;
  timeline = ges_timeline_new ();

  tracka = GES_TRACK (ges_audio_track_new ());

  if (!ges_timeline_add_track (timeline, tracka)) {
    gst_object_unref (timeline);
    timeline = NULL;
  }

  GESLayer *layer1 = ges_layer_new ();
  GESLayer *layer2 = ges_layer_new ();

  ges_timeline_add_layer (timeline, layer1);
  ges_timeline_add_layer (timeline, layer2);

  g_object_set (layer1, "priority", 0, NULL);
  g_object_set (layer2, "priority", 1, NULL);

  GESClip *music1 =
      ges_clip_from_rel_path ("audio/02_Oliver_Huntemann_-_Rikarda.flac",
      layer1, 0, 0, 10,
      GES_TRACK_TYPE_AUDIO);
  ges_clip_from_rel_path ("audio/prof.ogg", layer2, 0, 0, 10,
      GES_TRACK_TYPE_AUDIO);

  GESTrackElement *elem =
      ges_clip_find_track_element (music1, tracka, G_TYPE_NONE);

  ges_track_element_set_child_properties (elem, "volume", 2.1, NULL);

  ges_timeline_commit (timeline);

  return timeline;
}
Exemplo n.º 10
0
GESTimeline *
alphaTestTL (void)
{
  GESTimeline *timeline;

  GESTrack *trackv;
  timeline = ges_timeline_new ();

  trackv = GES_TRACK (ges_video_track_new ());

  if (!ges_timeline_add_track (timeline, trackv)) {
    gst_object_unref (timeline);
    timeline = NULL;
  }

  GESLayer *layer1 = ges_layer_new ();
  GESLayer *layer2 = ges_layer_new ();

  ges_timeline_add_layer (timeline, layer1);
  ges_timeline_add_layer (timeline, layer2);

  g_object_set (layer1, "priority", 0, NULL);
  g_object_set (layer2, "priority", 1, NULL);

  GESClip *png = ges_clip_from_rel_path ("image/Fish.png", layer1, 0, 0, 10,
      GES_TRACK_TYPE_VIDEO);

  GESTrackElement *elem =
      ges_clip_find_track_element (png, trackv, G_TYPE_NONE);

  ges_track_element_set_child_properties (elem, "alpha", 0.5, NULL);

  ges_clip_from_rel_path ("hd/fluidsimulation.mp4", layer2, 0, 20, 10,
      GES_TRACK_TYPE_VIDEO);

  ges_timeline_commit (timeline);

  return timeline;
}
Exemplo n.º 11
0
static void
make_source (GESFormatter * self, GList * reflist, GHashTable * source_table)
{
  GHashTable *props_table, *effect_table;
  gchar **prio_array;
  GESLayer *layer;
  GESPitiviFormatterPrivate *priv = GES_PITIVI_FORMATTER (self)->priv;

  gchar *fac_ref = NULL, *media_type = NULL, *filename = NULL, *prio_str;
  GList *tmp = NULL, *keys, *tmp_key;
  GESUriClip *src = NULL;
  gint prio;
  gboolean a_avail = FALSE, v_avail = FALSE, video;
  GHashTable *trackelement_table = priv->track_elements_table;

  for (tmp = reflist; tmp; tmp = tmp->next) {

    /* Get the layer */
    props_table = g_hash_table_lookup (trackelement_table, (gchar *) tmp->data);
    prio_str = (gchar *) g_hash_table_lookup (props_table, "priority");
    prio_array = g_strsplit (prio_str, ")", 0);
    prio = (gint) g_ascii_strtod (prio_array[1], NULL);
    g_strfreev (prio_array);

    /* If we do not have any layer with this priority, create it */
    if (!(layer = g_hash_table_lookup (priv->layers_table, &prio))) {
      layer = ges_layer_new ();
      g_object_set (layer, "auto-transition", TRUE, "priority", prio, NULL);
      ges_timeline_add_layer (self->timeline, layer);
      g_hash_table_insert (priv->layers_table, g_memdup (&prio,
              sizeof (guint64)), layer);
    }

    fac_ref = (gchar *) g_hash_table_lookup (props_table, "fac_ref");
    media_type = (gchar *) g_hash_table_lookup (props_table, "media_type");

    if (!g_strcmp0 (media_type, "pitivi.stream.VideoStream"))
      video = TRUE;
    else
      video = FALSE;

    /* FIXME I am sure we could reimplement this whole part
     * in a simpler way */

    if (g_strcmp0 (fac_ref, (gchar *) "effect")) {
      /* FIXME this is a hack to get a ref to the formatter when receiving
       * child-added */
      g_hash_table_insert (props_table, (gchar *) "current-formatter", self);
      if (a_avail && (!video)) {
        a_avail = FALSE;
      } else if (v_avail && (video)) {
        v_avail = FALSE;
      } else {

        /* If we only have audio or only video in the previous source,
         * set it has such */
        if (a_avail) {
          ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_VIDEO);
        } else if (v_avail) {
          ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_AUDIO);
        }

        filename = (gchar *) g_hash_table_lookup (source_table, "filename");

        src = ges_uri_clip_new (filename);

        if (!video) {
          v_avail = TRUE;
          a_avail = FALSE;
        } else {
          a_avail = TRUE;
          v_avail = FALSE;
        }

        set_properties (G_OBJECT (src), props_table);
        ges_layer_add_clip (layer, GES_CLIP (src));

        g_signal_connect (src, "child-added",
            G_CALLBACK (track_element_added_cb), props_table);

        priv->sources_to_load = g_list_prepend (priv->sources_to_load, src);
      }

    } else {
      GESEffect *effect;
      gchar *active = (gchar *) g_hash_table_lookup (props_table, "active");

      effect = ges_effect_new ((gchar *)
          g_hash_table_lookup (props_table, (gchar *) "effect_name"));
      ges_track_element_set_track_type (GES_TRACK_ELEMENT (effect),
          (video ? GES_TRACK_TYPE_VIDEO : GES_TRACK_TYPE_AUDIO));
      effect_table =
          g_hash_table_lookup (props_table, (gchar *) "effect_props");

      ges_container_add (GES_CONTAINER (src), GES_TIMELINE_ELEMENT (effect));

      if (!g_strcmp0 (active, (gchar *) "(bool)False"))
        ges_track_element_set_active (GES_TRACK_ELEMENT (effect), FALSE);

      /* Set effect properties */
      keys = g_hash_table_get_keys (effect_table);
      for (tmp_key = keys; tmp_key; tmp_key = tmp_key->next) {
        GstStructure *structure;
        const GValue *value;
        GParamSpec *spec;
        GstCaps *caps;
        gchar *prop_val;

        prop_val = (gchar *) g_hash_table_lookup (effect_table,
            (gchar *) tmp_key->data);

        if (g_strstr_len (prop_val, -1, "(GEnum)")) {
          gchar **val = g_strsplit (prop_val, ")", 2);

          ges_track_element_set_child_properties (GES_TRACK_ELEMENT (effect),
              (gchar *) tmp_key->data, atoi (val[1]), NULL);
          g_strfreev (val);

        } else if (ges_track_element_lookup_child (GES_TRACK_ELEMENT (effect),
                (gchar *) tmp->data, NULL, &spec)) {
          gchar *caps_str = g_strdup_printf ("structure1, property1=%s;",
              prop_val);

          caps = gst_caps_from_string (caps_str);
          g_free (caps_str);
          structure = gst_caps_get_structure (caps, 0);
          value = gst_structure_get_value (structure, "property1");

          ges_track_element_set_child_property_by_pspec (GES_TRACK_ELEMENT
              (effect), spec, (GValue *) value);
          gst_caps_unref (caps);
        }
      }
    }
  }

  if (a_avail) {
    ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_VIDEO);
  } else if (v_avail) {
    ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_AUDIO);
  }
}
Exemplo n.º 12
0
void
getClips (JsonReader * reader, GESLayer * layer, GESTrackType type,
    gboolean absolute_paths)
{
  int i;
  json_reader_read_member (reader, "clips");

  g_print ("= clips =\n");

  for (i = 0; i < json_reader_count_elements (reader); i++) {
    json_reader_read_element (reader, i);
    const char *src = getString (reader, "src");
    int start = getInt (reader, "start");
    int in = getInt (reader, "in");
    int dur = getInt (reader, "dur");
    g_print ("Clip: %s (start: %d, in: %d, dur: %d)\n", src, start, in, dur);

    GESClip *clip;

    if (is_in_members (reader, "multi") && getBool (reader, "multi")) {
      g_print ("multi on.\n");
      clip =
          ges_multi_clip_from_path (src, layer, start, in, dur, absolute_paths);
    } else {
      const char *path;
      if (absolute_paths == TRUE) {
        path = src;
      } else {
        path = ges_renderer_get_absolute_path (src);
      }
      clip = ges_clip_from_path (path, layer, start, in, dur, type);
    }

    GESTimeline *tl = ges_layer_get_timeline (layer);
    GList *tracks = ges_timeline_get_tracks (tl);
    GESTrack *trackv = g_list_first (tracks)->data;
    GESTrack *tracka = g_list_last (tracks)->data;

    if (is_in_members (reader, "volume")) {
      double volume = getDouble (reader, "volume");
      GESTrackElement *audioElement =
          ges_clip_find_track_element (clip, tracka, G_TYPE_NONE);
      if (audioElement != NULL) {
        ges_track_element_set_child_properties (audioElement, "volume", volume,
            NULL);
      }
    }

    GESTrackElement *videoElement =
        ges_clip_find_track_element (clip, trackv, G_TYPE_NONE);

    if (videoElement != NULL) {
      if (is_in_members (reader, "x")) {
        int x = getInt (reader, "x");
        ges_track_element_set_child_properties (videoElement, "posx", x, NULL);
      }
      if (is_in_members (reader, "y")) {
        int y = getInt (reader, "y");
        ges_track_element_set_child_properties (videoElement, "posy", y, NULL);
      }
      if (is_in_members (reader, "alpha")) {
        gdouble alpha = getDouble (reader, "alpha");
        ges_track_element_set_child_properties (videoElement, "alpha", alpha,
            NULL);
      }

      if (is_in_members (reader, "size")) {
        gdouble size = getDouble (reader, "size");
        GESUriClipAsset *asset =
            GES_URI_CLIP_ASSET (ges_extractable_get_asset (GES_EXTRACTABLE
                (clip)));
        guint width = ges_asset_get_width (asset);
        guint height = ges_asset_get_height (asset);

        if (width != 0 && height != 0) {
          double dw = width * size;
          double dh = height * size;
          g_print ("%dx%d => * %f => %dx%d\n", width, height, size, (int) dw,
              (int) dh);
          ges_track_element_set_child_properties (videoElement,
              "width", (int) dw, "height", (int) dh, NULL);
        }
      }

      if (is_in_members (reader, "effect")) {
        const char *effect_str = getString (reader, "effect");
        if (strcmp (effect_str, "") != 0) {
          g_print ("Using effect %s", effect_str);
          GESEffect *effect = ges_effect_new (effect_str);
          ges_container_add (GES_CONTAINER (clip),
              GES_TIMELINE_ELEMENT (effect));
        }
      }
    }

    json_reader_end_element (reader);
  }
  json_reader_end_member (reader);
}