static void extractable_set_asset (GESExtractable * self, GESAsset * asset) { /* FIXME That should go into #GESTrackElement, but * some work is needed to make sure it works properly */ if (ges_track_element_get_track_type (GES_TRACK_ELEMENT (self)) == GES_TRACK_TYPE_UNKNOWN) { ges_track_element_set_track_type (GES_TRACK_ELEMENT (self), ges_track_element_asset_get_track_type (GES_TRACK_ELEMENT_ASSET (asset))); } }
static void ges_track_element_dispose (GObject * object) { GESTrackElement *element = GES_TRACK_ELEMENT (object); GESTrackElementPrivate *priv = element->priv; if (priv->bindings_hashtable) g_hash_table_destroy (priv->bindings_hashtable); if (priv->nleobject) { GstState cstate; if (priv->track != NULL) { g_error ("%p Still in %p, this means that you forgot" " to remove it from the GESTrack it is contained in. You always need" " to remove a GESTrackElement from its track before dropping the last" " reference\n" "This problem may also be caused by a refcounting bug in" " the application or GES itself.", object, priv->track); gst_element_get_state (priv->nleobject, &cstate, NULL, 0); if (cstate != GST_STATE_NULL) gst_element_set_state (priv->nleobject, GST_STATE_NULL); } g_object_set_qdata (G_OBJECT (priv->nleobject), NLE_OBJECT_TRACK_ELEMENT_QUARK, NULL); gst_object_unref (priv->nleobject); priv->nleobject = NULL; } G_OBJECT_CLASS (ges_track_element_parent_class)->dispose (object); }
static gboolean _set_child_property (GstValidateScenario * scenario, GstValidateAction * action) { const GValue *value; GESTimeline *timeline; GESTimelineElement *element; const gchar *property_name, *element_name; element_name = gst_structure_get_string (action->structure, "element-name"); timeline = get_timeline (scenario); g_return_val_if_fail (timeline, FALSE); element = ges_timeline_get_element (timeline, element_name); g_return_val_if_fail (GES_IS_TRACK_ELEMENT (element), FALSE); property_name = gst_structure_get_string (action->structure, "property"); value = gst_structure_get_value (action->structure, "value"); GST_DEBUG ("%s Setting %s property to %p", element->name, property_name, value); ges_track_element_set_child_property (GES_TRACK_ELEMENT (element), property_name, (GValue *) value); g_object_unref(timeline); return TRUE; }
/** * ges_video_test_source_get_pattern: * @source: a #GESVideoTestPattern * * Get the video pattern used by the @source. * * Returns: The video pattern used by the @source. */ GESVideoTestPattern ges_video_test_source_get_pattern (GESVideoTestSource * source) { GValue val = { 0 }; ges_track_element_get_child_property (GES_TRACK_ELEMENT (source), "pattern", &val); return g_value_get_enum (&val); }
/** * ges_video_test_source_set_pattern: * @self: a #GESVideoTestSource * @pattern: a #GESVideoTestPattern * * Sets the source to use the given @pattern. */ void ges_video_test_source_set_pattern (GESVideoTestSource * self, GESVideoTestPattern pattern) { GstElement *element = ges_track_element_get_element (GES_TRACK_ELEMENT (self)); self->priv->pattern = pattern; if (element) { GValue val = { 0 }; g_value_init (&val, GES_VIDEO_TEST_PATTERN_TYPE); g_value_set_enum (&val, pattern); ges_track_element_set_child_property (GES_TRACK_ELEMENT (self), "pattern", &val); } }
/** * ges_title_source_get_valignment: * @source: a #GESTitleSource * * Get the vertical aligment used by @source. * * Returns: The vertical aligment used by @source. */ GESTextVAlign ges_title_source_get_valignment (GESTitleSource * source) { GESTextVAlign valign; ges_track_element_get_child_properties (GES_TRACK_ELEMENT (source), "valignment", &valign, NULL); return valign; }
/** * ges_title_source_get_font_desc: * @source: a #GESTitleSource * * Get the pango font description used by @source. * * Returns: (transfer none): The pango font description used by this * @source. */ const gchar * ges_title_source_get_font_desc (GESTitleSource * source) { gchar *font_desc; ges_track_element_get_child_properties (GES_TRACK_ELEMENT (source), "font-desc", &font_desc, NULL); return font_desc; }
/** * ges_title_source_get_text: * @source: a #GESTitleSource * * Get the text currently set on the @source. * * Returns: (transfer none): The text currently set on the @source. */ const gchar * ges_title_source_get_text (GESTitleSource * source) { gchar *text; ges_track_element_get_child_properties (GES_TRACK_ELEMENT (source), "text", &text, NULL); return text; }
/** * ges_title_source_get_ypos: * @source: a #GESTitleSource * * Get the vertical position used by @source. * * Returns: The vertical position used by @source. */ const gdouble ges_title_source_get_ypos (GESTitleSource * source) { gdouble ypos; ges_track_element_get_child_properties (GES_TRACK_ELEMENT (source), "ypos", &ypos, NULL); return ypos; }
/** * ges_title_source_get_background_color: * @source: a #GESTitleSource * * Get the background used by @source. * * Returns: The background used by @source. */ const guint32 ges_title_source_get_background_color (GESTitleSource * source) { guint32 color; ges_track_element_get_child_properties (GES_TRACK_ELEMENT (source), "foreground-color", &color, NULL); return color; }
/** * ges_video_test_source_set_pattern: * @self: a #GESVideoTestSource * @pattern: a #GESVideoTestPattern * * Sets the source to use the given @pattern. */ void ges_video_test_source_set_pattern (GESVideoTestSource * self, GESVideoTestPattern pattern) { GstElement *element = ges_track_element_get_element (GES_TRACK_ELEMENT (self)); self->priv->pattern = pattern; if (element) g_object_set (element, "pattern", (gint) pattern, NULL); }
static GESTrackElement * _create_track_element (GESClip * clip, GESTrackType type) { GESTransitionClip *transition = (GESTransitionClip *) clip; GESTrackElement *res = NULL; GESTrackType supportedformats; GST_DEBUG ("Creating a GESTransition"); supportedformats = ges_clip_get_supported_formats (clip); if (type == GES_TRACK_TYPE_VIDEO) { if (supportedformats == GES_TRACK_TYPE_UNKNOWN || supportedformats & GES_TRACK_TYPE_VIDEO) { GESVideoTransition *trans; trans = ges_video_transition_new (); ges_video_transition_set_transition_type (trans, transition->vtype); res = GES_TRACK_ELEMENT (trans); } else { GST_DEBUG ("Not creating transition as video track not on" " supportedformats"); } } else if (type == GES_TRACK_TYPE_AUDIO) { if (supportedformats == GES_TRACK_TYPE_UNKNOWN || supportedformats & GES_TRACK_TYPE_AUDIO) res = GES_TRACK_ELEMENT (ges_audio_transition_new ()); else GST_DEBUG ("Not creating transition as audio track" " not on supportedformats"); } else GST_WARNING ("Transitions don't handle this track type"); return res; }
static gboolean _set_start (GESTimelineElement * element, GstClockTime start) { GESTrackElement *object = GES_TRACK_ELEMENT (element); if (object->priv->gnlobject != NULL) { if (G_UNLIKELY (start == _START (object))) return FALSE; g_object_set (object->priv->gnlobject, "start", start, NULL); } else object->priv->pending_start = start; return TRUE; }
static void ges_track_element_set_property (GObject * object, guint property_id, const GValue * value, GParamSpec * pspec) { GESTrackElement *track_element = GES_TRACK_ELEMENT (object); switch (property_id) { case PROP_ACTIVE: ges_track_element_set_active (track_element, g_value_get_boolean (value)); break; case PROP_TRACK_TYPE: track_element->priv->track_type = g_value_get_flags (value); break; default: G_OBJECT_WARN_INVALID_PROPERTY_ID (object, property_id, pspec); } }
static gboolean _set_inpoint (GESTimelineElement * element, GstClockTime inpoint) { GESTrackElement *object = GES_TRACK_ELEMENT (element); if (object->priv->gnlobject != NULL) { if (G_UNLIKELY (inpoint == _INPOINT (object))) return FALSE; g_object_set (object->priv->gnlobject, "inpoint", inpoint, NULL); } else object->priv->pending_inpoint = inpoint; _update_control_bindings (element, inpoint, GST_CLOCK_TIME_NONE); return TRUE; }
static gboolean _set_duration (GESTimelineElement * element, GstClockTime duration) { GESTrackElement *object = GES_TRACK_ELEMENT (element); GESTrackElementPrivate *priv = object->priv; if (GST_CLOCK_TIME_IS_VALID (_MAXDURATION (element)) && duration > _INPOINT (object) + _MAXDURATION (element)) duration = _MAXDURATION (element) - _INPOINT (object); if (priv->gnlobject != NULL) { if (G_UNLIKELY (duration == _DURATION (object))) return FALSE; g_object_set (priv->gnlobject, "duration", duration, NULL); } else priv->pending_duration = duration; _update_control_bindings (element, ges_timeline_element_get_inpoint (element), duration); return TRUE; }
static gboolean _set_priority (GESTimelineElement * element, guint32 priority) { GESTrackElement *object = GES_TRACK_ELEMENT (element); if (priority < MIN_GNL_PRIO) { GST_INFO_OBJECT (element, "Priority (%d) < MIN_GNL_PRIO, setting it to %d", priority, MIN_GNL_PRIO); priority = MIN_GNL_PRIO; } GST_DEBUG ("object:%p, priority:%" G_GUINT32_FORMAT, object, priority); if (object->priv->gnlobject != NULL) { if (G_UNLIKELY (priority == _PRIORITY (object))) return FALSE; g_object_set (object->priv->gnlobject, "priority", priority, NULL); } else object->priv->pending_priority = priority; return TRUE; }
static void _update_control_bindings (GESTimelineElement * element, GstClockTime inpoint, GstClockTime duration) { GParamSpec **specs; guint n, n_specs; GstControlBinding *binding; GstTimedValueControlSource *source; GESTrackElement *self = GES_TRACK_ELEMENT (element); specs = ges_track_element_list_children_properties (self, &n_specs); for (n = 0; n < n_specs; ++n) { GList *values, *tmp; GstTimedValue *last, *first, *prev = NULL, *next = NULL; gfloat value_at_pos; binding = ges_track_element_get_control_binding (self, specs[n]->name); if (!binding) continue; g_object_get (binding, "control_source", &source, NULL); if (duration == 0) { gst_timed_value_control_source_unset_all (GST_TIMED_VALUE_CONTROL_SOURCE (source)); continue; } values = gst_timed_value_control_source_get_all (GST_TIMED_VALUE_CONTROL_SOURCE (source)); if (g_list_length (values) == 0) continue; first = values->data; for (tmp = values->next; tmp; tmp = tmp->next) { next = tmp->data; if (next->timestamp > inpoint) break; } value_at_pos = interpolate_values_for_position (first, next, inpoint); gst_timed_value_control_source_unset (source, first->timestamp); gst_timed_value_control_source_set (source, inpoint, value_at_pos); values = gst_timed_value_control_source_get_all (GST_TIMED_VALUE_CONTROL_SOURCE (source)); if (duration != GST_CLOCK_TIME_NONE) { last = g_list_last (values)->data; for (tmp = g_list_last (values)->prev; tmp; tmp = tmp->prev) { prev = tmp->data; if (prev->timestamp < duration + inpoint) break; } value_at_pos = interpolate_values_for_position (prev, last, duration + inpoint); gst_timed_value_control_source_unset (source, last->timestamp); gst_timed_value_control_source_set (source, duration + inpoint, value_at_pos); values = gst_timed_value_control_source_get_all (GST_TIMED_VALUE_CONTROL_SOURCE (source)); } for (tmp = values; tmp; tmp = tmp->next) { GstTimedValue *value = tmp->data; if (value->timestamp < inpoint) gst_timed_value_control_source_unset (source, value->timestamp); else if (duration != GST_CLOCK_TIME_NONE && value->timestamp > duration + inpoint) gst_timed_value_control_source_unset (source, value->timestamp); } } g_free (specs); }
void ges_base_xml_formatter_add_track_element (GESBaseXmlFormatter * self, GType track_element_type, const gchar * asset_id, const gchar * track_id, const gchar * timeline_obj_id, GstStructure * children_properties, GstStructure * properties, const gchar * metadatas, GError ** error) { GESTrackElement *trackelement; GError *err = NULL; GESAsset *asset = NULL; GESBaseXmlFormatterPrivate *priv = _GET_PRIV (self); if (priv->check_only) return; if (g_type_is_a (track_element_type, GES_TYPE_TRACK_ELEMENT) == FALSE) { GST_DEBUG_OBJECT (self, "%s is not a TrackElement, can not create it", g_type_name (track_element_type)); goto out; } if (g_type_is_a (track_element_type, GES_TYPE_BASE_EFFECT) == FALSE) { GST_FIXME_OBJECT (self, "%s currently not supported", g_type_name (track_element_type)); goto out; } asset = ges_asset_request (track_element_type, asset_id, &err); if (asset == NULL) { GST_DEBUG_OBJECT (self, "Can not create trackelement %s", asset_id); GST_FIXME_OBJECT (self, "Check if missing plugins etc %s", err ? err->message : ""); goto out; } trackelement = GES_TRACK_ELEMENT (ges_asset_extract (asset, NULL)); if (trackelement) { GESClip *clip; if (metadatas) ges_meta_container_add_metas_from_string (GES_META_CONTAINER (trackelement), metadatas); clip = g_hash_table_lookup (priv->containers, timeline_obj_id); if (clip) { _add_track_element (GES_FORMATTER (self), clip, trackelement, track_id, children_properties, properties); } else { PendingEffects *peffect; PendingClip *pend = g_hash_table_lookup (priv->clipid_pendings, timeline_obj_id); if (pend == NULL) { GST_WARNING_OBJECT (self, "No Clip with id: %s can not " "add TrackElement", timeline_obj_id); goto out; } peffect = g_slice_new0 (PendingEffects); peffect->trackelement = trackelement; peffect->track_id = g_strdup (track_id); peffect->properties = properties ? gst_structure_copy (properties) : NULL; peffect->children_properties = children_properties ? gst_structure_copy (children_properties) : NULL; pend->effects = g_list_append (pend->effects, peffect); } priv->current_track_element = trackelement; } ges_project_add_asset (GES_FORMATTER (self)->project, asset); out: if (asset) gst_object_unref (asset); if (err) g_error_free (err); return; }
static void make_source (GESFormatter * self, GList * reflist, GHashTable * source_table) { GHashTable *props_table, *effect_table; gchar **prio_array; GESLayer *layer; GESPitiviFormatterPrivate *priv = GES_PITIVI_FORMATTER (self)->priv; gchar *fac_ref = NULL, *media_type = NULL, *filename = NULL, *prio_str; GList *tmp = NULL, *keys, *tmp_key; GESUriClip *src = NULL; gint prio; gboolean a_avail = FALSE, v_avail = FALSE, video; GHashTable *trackelement_table = priv->track_elements_table; for (tmp = reflist; tmp; tmp = tmp->next) { /* Get the layer */ props_table = g_hash_table_lookup (trackelement_table, (gchar *) tmp->data); prio_str = (gchar *) g_hash_table_lookup (props_table, "priority"); prio_array = g_strsplit (prio_str, ")", 0); prio = (gint) g_ascii_strtod (prio_array[1], NULL); g_strfreev (prio_array); /* If we do not have any layer with this priority, create it */ if (!(layer = g_hash_table_lookup (priv->layers_table, &prio))) { layer = ges_layer_new (); g_object_set (layer, "auto-transition", TRUE, "priority", prio, NULL); ges_timeline_add_layer (self->timeline, layer); g_hash_table_insert (priv->layers_table, g_memdup (&prio, sizeof (guint64)), layer); } fac_ref = (gchar *) g_hash_table_lookup (props_table, "fac_ref"); media_type = (gchar *) g_hash_table_lookup (props_table, "media_type"); if (!g_strcmp0 (media_type, "pitivi.stream.VideoStream")) video = TRUE; else video = FALSE; /* FIXME I am sure we could reimplement this whole part * in a simpler way */ if (g_strcmp0 (fac_ref, (gchar *) "effect")) { /* FIXME this is a hack to get a ref to the formatter when receiving * child-added */ g_hash_table_insert (props_table, (gchar *) "current-formatter", self); if (a_avail && (!video)) { a_avail = FALSE; } else if (v_avail && (video)) { v_avail = FALSE; } else { /* If we only have audio or only video in the previous source, * set it has such */ if (a_avail) { ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_VIDEO); } else if (v_avail) { ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_AUDIO); } filename = (gchar *) g_hash_table_lookup (source_table, "filename"); src = ges_uri_clip_new (filename); if (!video) { v_avail = TRUE; a_avail = FALSE; } else { a_avail = TRUE; v_avail = FALSE; } set_properties (G_OBJECT (src), props_table); ges_layer_add_clip (layer, GES_CLIP (src)); g_signal_connect (src, "child-added", G_CALLBACK (track_element_added_cb), props_table); priv->sources_to_load = g_list_prepend (priv->sources_to_load, src); } } else { GESEffect *effect; gchar *active = (gchar *) g_hash_table_lookup (props_table, "active"); effect = ges_effect_new ((gchar *) g_hash_table_lookup (props_table, (gchar *) "effect_name")); ges_track_element_set_track_type (GES_TRACK_ELEMENT (effect), (video ? GES_TRACK_TYPE_VIDEO : GES_TRACK_TYPE_AUDIO)); effect_table = g_hash_table_lookup (props_table, (gchar *) "effect_props"); ges_container_add (GES_CONTAINER (src), GES_TIMELINE_ELEMENT (effect)); if (!g_strcmp0 (active, (gchar *) "(bool)False")) ges_track_element_set_active (GES_TRACK_ELEMENT (effect), FALSE); /* Set effect properties */ keys = g_hash_table_get_keys (effect_table); for (tmp_key = keys; tmp_key; tmp_key = tmp_key->next) { GstStructure *structure; const GValue *value; GParamSpec *spec; GstCaps *caps; gchar *prop_val; prop_val = (gchar *) g_hash_table_lookup (effect_table, (gchar *) tmp_key->data); if (g_strstr_len (prop_val, -1, "(GEnum)")) { gchar **val = g_strsplit (prop_val, ")", 2); ges_track_element_set_child_properties (GES_TRACK_ELEMENT (effect), (gchar *) tmp_key->data, atoi (val[1]), NULL); g_strfreev (val); } else if (ges_track_element_lookup_child (GES_TRACK_ELEMENT (effect), (gchar *) tmp->data, NULL, &spec)) { gchar *caps_str = g_strdup_printf ("structure1, property1=%s;", prop_val); caps = gst_caps_from_string (caps_str); g_free (caps_str); structure = gst_caps_get_structure (caps, 0); value = gst_structure_get_value (structure, "property1"); ges_track_element_set_child_property_by_pspec (GES_TRACK_ELEMENT (effect), spec, (GValue *) value); gst_caps_unref (caps); } } } } if (a_avail) { ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_VIDEO); } else if (v_avail) { ges_clip_set_supported_formats (GES_CLIP (src), GES_TRACK_TYPE_AUDIO); } }
g_object_set_qdata (G_OBJECT (priv->nleobject), NLE_OBJECT_TRACK_ELEMENT_QUARK, NULL); gst_object_unref (priv->nleobject); priv->nleobject = NULL; } G_OBJECT_CLASS (ges_track_element_parent_class)->dispose (object); } static void ges_track_element_constructed (GObject * gobject) { GESTrackElementClass *class; GstElement *nleobject; gchar *tmp; GESTrackElement *object = GES_TRACK_ELEMENT (gobject); GST_DEBUG_OBJECT (object, "Creating NleObject"); class = GES_TRACK_ELEMENT_GET_CLASS (object); g_assert (class->create_gnl_object); nleobject = class->create_gnl_object (object); if (G_UNLIKELY (nleobject == NULL)) { GST_ERROR_OBJECT (object, "Could not create NleObject"); return; } tmp = g_strdup_printf ("%s:%s", G_OBJECT_TYPE_NAME (object), GST_OBJECT_NAME (nleobject));