Ejemplo n.º 1
0
static GstElement *
ges_effect_create_element (GESTrackElement * object)
{
  GstElement *effect;
  gchar *bin_desc;

  GError *error = NULL;
  GESEffect *self = GES_EFFECT (object);
  GESTrack *track = ges_track_element_get_track (object);
  const gchar *wanted_categories[] = { "Effect", NULL };

  if (!track) {
    GST_WARNING
        ("The object %p should be in a Track for the element to be created",
        object);
    return NULL;
  }

  if (track->type == GES_TRACK_TYPE_VIDEO) {
    bin_desc = g_strconcat ("videoconvert name=pre_video_convert ! ",
        self->priv->bin_description, " ! videoconvert name=post_video_convert",
        NULL);
  } else if (track->type == GES_TRACK_TYPE_AUDIO) {
    bin_desc =
        g_strconcat ("audioconvert ! audioresample !",
        self->priv->bin_description, NULL);
  } else {
    GST_DEBUG ("Track type not supported");
    return NULL;
  }

  effect = gst_parse_bin_from_description (bin_desc, TRUE, &error);

  g_free (bin_desc);

  if (error != NULL) {
    GST_ERROR ("An error occured while creating the GstElement: %s",
        error->message);
    g_error_free (error);
    return NULL;
  }

  GST_DEBUG ("Created effect %p", effect);

  ges_track_element_add_children_props (object, effect, wanted_categories,
      NULL, NULL);

  return effect;
}
Ejemplo n.º 2
0
/**
 * ges_test_clip_set_mute:
 * @self: the #GESTestClip on which to mute or unmute the audio track
 * @mute: %TRUE to mute the audio track, %FALSE to unmute it
 *
 * Sets whether the audio track of this clip is muted or not.
 *
 */
void
ges_test_clip_set_mute (GESTestClip * self, gboolean mute)
{
  GList *tmp;

  GST_DEBUG ("self:%p, mute:%d", self, mute);

  self->priv->mute = mute;

  /* Go over tracked objects, and update 'active' status on all audio objects */
  for (tmp = GES_CONTAINER_CHILDREN (self); tmp; tmp = tmp->next) {
    GESTrackElement *trackelement = (GESTrackElement *) tmp->data;

    if (ges_track_element_get_track (trackelement)->type ==
        GES_TRACK_TYPE_AUDIO)
      ges_track_element_set_active (trackelement, !mute);
  }
}
/* GESSource VMethod */
static GstElement *
ges_audio_uri_source_create_source (GESTrackElement * trksrc)
{
  GESAudioUriSource *self;
  GESTrack *track;
  GstElement *decodebin;

  self = (GESAudioUriSource *) trksrc;

  track = ges_track_element_get_track (trksrc);

  decodebin = gst_element_factory_make ("uridecodebin", NULL);

  g_object_set (decodebin, "caps", ges_track_get_caps (track),
      "expose-all-streams", FALSE, "uri", self->uri, NULL);

  return decodebin;
}
/**
 * ges_text_overlay_clip_set_ypos:
 * @self: the #GESTextOverlayClip* to set
 * @position: The vertical position @self is being set to
 *
 * Sets the vertical position of the text.
 */
void
ges_text_overlay_clip_set_ypos (GESTextOverlayClip * self, gdouble position)
{
  GList *tmp;

  GST_DEBUG ("self:%p, ypos:%f", self, position);

  self->priv->ypos = position;

  for (tmp = GES_CONTAINER_CHILDREN (self); tmp; tmp = tmp->next) {
    GESTrackElement *trackelement = (GESTrackElement *) tmp->data;

    if (ges_track_element_get_track (trackelement)->type ==
        GES_TRACK_TYPE_VIDEO)
      ges_text_overlay_set_ypos (GES_TEXT_OVERLAY (trackelement),
          self->priv->ypos);
  }
}
/**
 * ges_text_overlay_clip_set_color:
 * @self: the #GESTextOverlayClip* to set
 * @color: The color @self is being set to
 *
 * Sets the color of the text.
 */
void
ges_text_overlay_clip_set_color (GESTextOverlayClip * self, guint32 color)
{
  GList *tmp;

  GST_DEBUG ("self:%p, color:%d", self, color);

  self->priv->color = color;

  for (tmp = GES_CONTAINER_CHILDREN (self); tmp; tmp = tmp->next) {
    GESTrackElement *trackelement = (GESTrackElement *) tmp->data;

    if (ges_track_element_get_track (trackelement)->type ==
        GES_TRACK_TYPE_VIDEO)
      ges_text_overlay_set_color (GES_TEXT_OVERLAY (trackelement),
          self->priv->color);
  }
}
/**
 * ges_text_overlay_clip_set_valign:
 * @self: the #GESTextOverlayClip* to set vertical alignement of text on
 * @valign: #GESTextVAlign
 *
 * Sets the vertical aligment of the text.
 *
 */
void
ges_text_overlay_clip_set_valign (GESTextOverlayClip * self,
    GESTextVAlign valign)
{
  GList *tmp;

  GST_DEBUG ("self:%p, valign:%d", self, valign);

  self->priv->valign = valign;

  for (tmp = GES_CONTAINER_CHILDREN (self); tmp; tmp = tmp->next) {
    GESTrackElement *trackelement = (GESTrackElement *) tmp->data;

    if (ges_track_element_get_track (trackelement)->type ==
        GES_TRACK_TYPE_VIDEO)
      ges_text_overlay_set_valignment (GES_TEXT_OVERLAY
          (trackelement), self->priv->valign);
  }

}
/**
 * ges_text_overlay_clip_set_text:
 * @self: the #GESTextOverlayClip* to set text on
 * @text: the text to render. an internal copy of this text will be
 * made.
 *
 * Sets the text this clip will render.
 *
 */
void
ges_text_overlay_clip_set_text (GESTextOverlayClip * self, const gchar * text)
{
  GList *tmp;

  GST_DEBUG ("self:%p, text:%s", self, text);

  if (self->priv->text)
    g_free (self->priv->text);

  self->priv->text = g_strdup (text);

  for (tmp = GES_CONTAINER_CHILDREN (self); tmp; tmp = tmp->next) {
    GESTrackElement *trackelement = (GESTrackElement *) tmp->data;

    if (ges_track_element_get_track (trackelement)->type ==
        GES_TRACK_TYPE_VIDEO)
      ges_text_overlay_set_text (GES_TEXT_OVERLAY (trackelement),
          self->priv->text);
  }
}
Ejemplo n.º 8
0
static gboolean
extractable_set_asset (GESExtractable * self, GESAsset * asset)
{
  gboolean res = TRUE;
  GESUriClip *uriclip = GES_URI_CLIP (self);
  GESUriClipAsset *uri_clip_asset;
  GESClip *clip = GES_CLIP (self);
  GESLayer *layer = ges_clip_get_layer (clip);
  GList *tmp;
  GESTimelineElement *audio_source = NULL, *video_source = NULL;

  g_return_val_if_fail (GES_IS_URI_CLIP_ASSET (asset), FALSE);

  uri_clip_asset = GES_URI_CLIP_ASSET (asset);
  if (GST_CLOCK_TIME_IS_VALID (GES_TIMELINE_ELEMENT_DURATION (clip)) == FALSE)
    _set_duration0 (GES_TIMELINE_ELEMENT (uriclip),
        ges_uri_clip_asset_get_duration (uri_clip_asset));

  ges_timeline_element_set_max_duration (GES_TIMELINE_ELEMENT (uriclip),
      ges_uri_clip_asset_get_duration (uri_clip_asset));
  ges_uri_clip_set_is_image (uriclip,
      ges_uri_clip_asset_is_image (uri_clip_asset));

  if (ges_clip_get_supported_formats (clip) == GES_TRACK_TYPE_UNKNOWN) {
    ges_clip_set_supported_formats (clip,
        ges_clip_asset_get_supported_formats (GES_CLIP_ASSET (uri_clip_asset)));
  }

  GES_TIMELINE_ELEMENT (uriclip)->asset = asset;

  if (layer) {
    GList *children = ges_container_get_children (GES_CONTAINER (self), TRUE);

    for (tmp = children; tmp; tmp = tmp->next) {
      if (GES_IS_SOURCE (tmp->data)) {
        GESTrack *track = ges_track_element_get_track (tmp->data);

        if (track->type == GES_TRACK_TYPE_AUDIO)
          audio_source = gst_object_ref (tmp->data);
        else if (track->type == GES_TRACK_TYPE_VIDEO)
          video_source = gst_object_ref (tmp->data);

        ges_track_remove_element (track, tmp->data);
        ges_container_remove (GES_CONTAINER (self), tmp->data);
      }
    }
    g_list_free_full (children, g_object_unref);

    gst_object_ref (clip);

    ges_layer_remove_clip (layer, clip);
    res = ges_layer_add_clip (layer, clip);

    for (tmp = GES_CONTAINER_CHILDREN (self); tmp; tmp = tmp->next) {
      if (GES_IS_SOURCE (tmp->data)) {
        GESTrack *track = ges_track_element_get_track (tmp->data);

        if (track->type == GES_TRACK_TYPE_AUDIO && audio_source)
          ges_track_element_copy_properties (audio_source, tmp->data);
        else if (track->type == GES_TRACK_TYPE_VIDEO && video_source)
          ges_track_element_copy_properties (video_source, tmp->data);
      }
    }

    g_clear_object (&audio_source);
    g_clear_object (&video_source);
    gst_object_unref (clip);
    gst_object_unref (layer);
  }

  if (res) {
    g_free (uriclip->priv->uri);
    uriclip->priv->uri = g_strdup (ges_asset_get_id (asset));
  }

  return res;
}