コード例 #1
0
GESTimeline *
effectTL (void)
{
  GESTimeline *timeline;
  GESLayer *layer;
  GESClip *clip1, *clip2;
  GESEffect *effect1, *effect2;

  timeline = ges_timeline_new_audio_video ();
  layer = ges_layer_new ();

  ges_timeline_add_layer (timeline, layer);

  clip1 =
      ges_clip_unknown_from_rel_path ("sd/trailer_400p.ogg", layer, 0, 0, 5);
  clip2 =
      ges_clip_unknown_from_rel_path ("sd/sintel_trailer-480p.ogv", layer, 5,
      5, 5);

  effect1 = ges_effect_new ("agingtv");
  ges_container_add (GES_CONTAINER (clip1), GES_TIMELINE_ELEMENT (effect1));

  effect2 = ges_effect_new ("rippletv");
//  some cool Frei0r plugins
//  "frei0r-filter-pixeliz0r", "frei0r-filter-flippo", "frei0r-filter-twolay0r"

  ges_container_add (GES_CONTAINER (clip2), GES_TIMELINE_ELEMENT (effect2));

  ges_timeline_commit (timeline);

  return timeline;
}
コード例 #2
0
static void
_add_track_element (GESFormatter * self, GESClip * clip,
    GESTrackElement * trackelement, const gchar * track_id,
    GstStructure * children_properties, GstStructure * properties)
{
  GESBaseXmlFormatterPrivate *priv = _GET_PRIV (self);
  GESTrack *track = g_hash_table_lookup (priv->tracks, track_id);

  if (track == NULL) {
    GST_WARNING_OBJECT (self, "No track with id %s, can not add trackelement",
        track_id);
    gst_object_unref (trackelement);
    return;
  }

  GST_DEBUG_OBJECT (self, "Adding track_element: %" GST_PTR_FORMAT
      " To : %" GST_PTR_FORMAT, trackelement, clip);

  ges_container_add (GES_CONTAINER (clip), GES_TIMELINE_ELEMENT (trackelement));
  gst_structure_foreach (children_properties,
      (GstStructureForeachFunc) _set_child_property, trackelement);

  if (properties)
    gst_structure_foreach (properties,
        (GstStructureForeachFunc) set_property_foreach, trackelement);
}
コード例 #3
0
static void
_add_all_groups (GESFormatter * self)
{
  GList *tmp;
  GESTimelineElement *child;
  GESBaseXmlFormatterPrivate *priv = GES_BASE_XML_FORMATTER (self)->priv;

  for (tmp = priv->groups; tmp; tmp = tmp->next) {
    GList *lchild;
    PendingGroup *pgroup = tmp->data;

    for (lchild = ((PendingGroup *) tmp->data)->pending_children; lchild;
        lchild = lchild->next) {
      child = g_hash_table_lookup (priv->containers, lchild->data);

      GST_DEBUG_OBJECT (tmp->data, "Adding %s child %" GST_PTR_FORMAT " %s",
          (const gchar *) lchild->data, child,
          GES_TIMELINE_ELEMENT_NAME (child));
      ges_timeline_element_set_timeline (GES_TIMELINE_ELEMENT (pgroup->group), self->timeline);
      ges_container_add (GES_CONTAINER (pgroup->group), child);
    }
  }
}
コード例 #4
0
static GESTimelineElement *
_paste (GESTimelineElement * element, GESTimelineElement * ref,
        GstClockTime paste_position)
{
    GList *tmp;
    ChildMapping *map;
    GESContainer *ncontainer =
        GES_CONTAINER (ges_timeline_element_copy (element, FALSE));
    GESContainer *self = GES_CONTAINER (element);

    for (tmp = self->priv->copied_children; tmp; tmp = tmp->next) {
        GESTimelineElement *nchild;

        map = tmp->data;
        nchild =
            ges_timeline_element_paste (map->child,
                                        paste_position - map->start_offset);
        ges_timeline_element_set_timeline (GES_TIMELINE_ELEMENT (ncontainer),
                                           GES_TIMELINE_ELEMENT_TIMELINE (ref));
        ges_container_add (ncontainer, nchild);
    }

    return GES_TIMELINE_ELEMENT (ncontainer);
}
コード例 #5
0
ファイル: ges-pitivi-formatter.c プロジェクト: cfoch/ges
static void
make_source (GESFormatter * self, GList * reflist, GHashTable * source_table)
{
  GHashTable *props_table, *effect_table;
  gchar **prio_array;
  GESLayer *layer;
  GESPitiviFormatterPrivate *priv = GES_PITIVI_FORMATTER (self)->priv;

  gchar *fac_ref = NULL, *media_type = NULL, *filename = NULL, *prio_str;
  GList *tmp = NULL, *keys, *tmp_key;
  GESUriClip *src = NULL;
  gint prio;
  gboolean a_avail = FALSE, v_avail = FALSE, video;
  GHashTable *trackelement_table = priv->track_elements_table;

  for (tmp = reflist; tmp; tmp = tmp->next) {

    /* Get the layer */
    props_table = g_hash_table_lookup (trackelement_table, (gchar *) tmp->data);
    prio_str = (gchar *) g_hash_table_lookup (props_table, "priority");
    prio_array = g_strsplit (prio_str, ")", 0);
    prio = (gint) g_ascii_strtod (prio_array[1], NULL);
    g_strfreev (prio_array);

    /* If we do not have any layer with this priority, create it */
    if (!(layer = g_hash_table_lookup (priv->layers_table, &prio))) {
      layer = ges_layer_new ();
      g_object_set (layer, "auto-transition", TRUE, "priority", prio, NULL);
      ges_timeline_add_layer (self->timeline, layer);
      g_hash_table_insert (priv->layers_table, g_memdup (&prio,
              sizeof (guint64)), layer);
    }

    fac_ref = (gchar *) g_hash_table_lookup (props_table, "fac_ref");
    media_type = (gchar *) g_hash_table_lookup (props_table, "media_type");

    if (!g_strcmp0 (media_type, "pitivi.stream.VideoStream"))
      video = TRUE;
    else
      video = FALSE;

    /* FIXME I am sure we could reimplement this whole part
     * in a simpler way */

    if (g_strcmp0 (fac_ref, (gchar *) "effect")) {
      /* FIXME this is a hack to get a ref to the formatter when receiving
       * child-added */
      g_hash_table_insert (props_table, (gchar *) "current-formatter", self);
      if (a_avail && (!video)) {
        a_avail = FALSE;
      } else if (v_avail && (video)) {
        v_avail = FALSE;
      } else {

        /* If we only have audio or only video in the previous source,
         * set it has such */
        if (a_avail) {
          ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_VIDEO);
        } else if (v_avail) {
          ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_AUDIO);
        }

        filename = (gchar *) g_hash_table_lookup (source_table, "filename");

        src = ges_uri_clip_new (filename);

        if (!video) {
          v_avail = TRUE;
          a_avail = FALSE;
        } else {
          a_avail = TRUE;
          v_avail = FALSE;
        }

        set_properties (G_OBJECT (src), props_table);
        ges_layer_add_clip (layer, GES_CLIP (src));

        g_signal_connect (src, "child-added",
            G_CALLBACK (track_element_added_cb), props_table);

        priv->sources_to_load = g_list_prepend (priv->sources_to_load, src);
      }

    } else {
      GESEffect *effect;
      gchar *active = (gchar *) g_hash_table_lookup (props_table, "active");

      effect = ges_effect_new ((gchar *)
          g_hash_table_lookup (props_table, (gchar *) "effect_name"));
      ges_track_element_set_track_type (GES_TRACK_ELEMENT (effect),
          (video ? GES_TRACK_TYPE_VIDEO : GES_TRACK_TYPE_AUDIO));
      effect_table =
          g_hash_table_lookup (props_table, (gchar *) "effect_props");

      ges_container_add (GES_CONTAINER (src), GES_TIMELINE_ELEMENT (effect));

      if (!g_strcmp0 (active, (gchar *) "(bool)False"))
        ges_track_element_set_active (GES_TRACK_ELEMENT (effect), FALSE);

      /* Set effect properties */
      keys = g_hash_table_get_keys (effect_table);
      for (tmp_key = keys; tmp_key; tmp_key = tmp_key->next) {
        GstStructure *structure;
        const GValue *value;
        GParamSpec *spec;
        GstCaps *caps;
        gchar *prop_val;

        prop_val = (gchar *) g_hash_table_lookup (effect_table,
            (gchar *) tmp_key->data);

        if (g_strstr_len (prop_val, -1, "(GEnum)")) {
          gchar **val = g_strsplit (prop_val, ")", 2);

          ges_track_element_set_child_properties (GES_TRACK_ELEMENT (effect),
              (gchar *) tmp_key->data, atoi (val[1]), NULL);
          g_strfreev (val);

        } else if (ges_track_element_lookup_child (GES_TRACK_ELEMENT (effect),
                (gchar *) tmp->data, NULL, &spec)) {
          gchar *caps_str = g_strdup_printf ("structure1, property1=%s;",
              prop_val);

          caps = gst_caps_from_string (caps_str);
          g_free (caps_str);
          structure = gst_caps_get_structure (caps, 0);
          value = gst_structure_get_value (structure, "property1");

          ges_track_element_set_child_property_by_pspec (GES_TRACK_ELEMENT
              (effect), spec, (GValue *) value);
          gst_caps_unref (caps);
        }
      }
    }
  }

  if (a_avail) {
    ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_VIDEO);
  } else if (v_avail) {
    ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_AUDIO);
  }
}
コード例 #6
0
ファイル: ges-json.c プロジェクト: lubosz/ges-renderer
void
getClips (JsonReader * reader, GESLayer * layer, GESTrackType type,
    gboolean absolute_paths)
{
  int i;
  json_reader_read_member (reader, "clips");

  g_print ("= clips =\n");

  for (i = 0; i < json_reader_count_elements (reader); i++) {
    json_reader_read_element (reader, i);
    const char *src = getString (reader, "src");
    int start = getInt (reader, "start");
    int in = getInt (reader, "in");
    int dur = getInt (reader, "dur");
    g_print ("Clip: %s (start: %d, in: %d, dur: %d)\n", src, start, in, dur);

    GESClip *clip;

    if (is_in_members (reader, "multi") && getBool (reader, "multi")) {
      g_print ("multi on.\n");
      clip =
          ges_multi_clip_from_path (src, layer, start, in, dur, absolute_paths);
    } else {
      const char *path;
      if (absolute_paths == TRUE) {
        path = src;
      } else {
        path = ges_renderer_get_absolute_path (src);
      }
      clip = ges_clip_from_path (path, layer, start, in, dur, type);
    }

    GESTimeline *tl = ges_layer_get_timeline (layer);
    GList *tracks = ges_timeline_get_tracks (tl);
    GESTrack *trackv = g_list_first (tracks)->data;
    GESTrack *tracka = g_list_last (tracks)->data;

    if (is_in_members (reader, "volume")) {
      double volume = getDouble (reader, "volume");
      GESTrackElement *audioElement =
          ges_clip_find_track_element (clip, tracka, G_TYPE_NONE);
      if (audioElement != NULL) {
        ges_track_element_set_child_properties (audioElement, "volume", volume,
            NULL);
      }
    }

    GESTrackElement *videoElement =
        ges_clip_find_track_element (clip, trackv, G_TYPE_NONE);

    if (videoElement != NULL) {
      if (is_in_members (reader, "x")) {
        int x = getInt (reader, "x");
        ges_track_element_set_child_properties (videoElement, "posx", x, NULL);
      }
      if (is_in_members (reader, "y")) {
        int y = getInt (reader, "y");
        ges_track_element_set_child_properties (videoElement, "posy", y, NULL);
      }
      if (is_in_members (reader, "alpha")) {
        gdouble alpha = getDouble (reader, "alpha");
        ges_track_element_set_child_properties (videoElement, "alpha", alpha,
            NULL);
      }

      if (is_in_members (reader, "size")) {
        gdouble size = getDouble (reader, "size");
        GESUriClipAsset *asset =
            GES_URI_CLIP_ASSET (ges_extractable_get_asset (GES_EXTRACTABLE
                (clip)));
        guint width = ges_asset_get_width (asset);
        guint height = ges_asset_get_height (asset);

        if (width != 0 && height != 0) {
          double dw = width * size;
          double dh = height * size;
          g_print ("%dx%d => * %f => %dx%d\n", width, height, size, (int) dw,
              (int) dh);
          ges_track_element_set_child_properties (videoElement,
              "width", (int) dw, "height", (int) dh, NULL);
        }
      }

      if (is_in_members (reader, "effect")) {
        const char *effect_str = getString (reader, "effect");
        if (strcmp (effect_str, "") != 0) {
          g_print ("Using effect %s", effect_str);
          GESEffect *effect = ges_effect_new (effect_str);
          ges_container_add (GES_CONTAINER (clip),
              GES_TIMELINE_ELEMENT (effect));
        }
      }
    }

    json_reader_end_element (reader);
  }
  json_reader_end_member (reader);
}