Ejemplo n.º 1
0
static void disconnect_loading_messages(TPMediaPlayer * mp)
{
    USERDATA(mp);
    CM(ud);

    if (!ud->load_signal)
        return;

#if (CLUTTER_GST_MAJOR_VERSION<1)
    GstElement * pipeline=clutter_gst_video_texture_get_playbin(CLUTTER_GST_VIDEO_TEXTURE(cm));
#else
    GstElement * pipeline=clutter_gst_video_texture_get_pipeline(CLUTTER_GST_VIDEO_TEXTURE(cm));
#endif

    if (!pipeline)
        return;

    GstBus * bus=gst_pipeline_get_bus(GST_PIPELINE(pipeline));

    if (!bus)
        return;

    g_signal_handler_disconnect(bus,ud->load_signal);
    ud->load_signal=0;

    gst_object_unref(GST_OBJECT(bus));
}
Ejemplo n.º 2
0
static int mp_load(TPMediaPlayer * mp,const char * uri,const char * extra)
{
    USERDATA(mp);
    CM(ud);

    clutter_media_set_uri(cm,uri);

#if (CLUTTER_GST_MAJOR_VERSION<1)
    GstElement * pipeline=clutter_gst_video_texture_get_playbin(CLUTTER_GST_VIDEO_TEXTURE(cm));
#else
    GstElement * pipeline=clutter_gst_video_texture_get_pipeline(CLUTTER_GST_VIDEO_TEXTURE(cm));
#endif

    if (!pipeline)
        return 1;

    GstStateChangeReturn r=gst_element_set_state(pipeline,GST_STATE_PAUSED);

    g_debug("STATE CHANGE RETURN IS %d",r);

    switch(r)
    {
        case GST_STATE_CHANGE_FAILURE:
        {
            return 2;
        }

        case GST_STATE_CHANGE_SUCCESS:
        case GST_STATE_CHANGE_NO_PREROLL:
        {
            get_stream_information(mp);
            tp_media_player_loaded(mp);
            break;
        }

        case GST_STATE_CHANGE_ASYNC:
        {
            // The state change happens asynchronously, so we connect a signal
            // handler to see when it is done

            GstBus * bus=gst_pipeline_get_bus(GST_PIPELINE(pipeline));

            if (!bus)
                return 3;

            ud->load_signal=g_signal_connect(bus,"message",G_CALLBACK(loading_messages),mp);

            gst_object_unref(GST_OBJECT(bus));

            break;
        }
    }

    return 0;
}
Ejemplo n.º 3
0
static void
mex_get_stream_cb (MexProgram   *program,
                   const gchar  *url,
                   const GError *error,
                   gpointer      user_data)
{
  MexPlayer *player = user_data;
  MexPlayerPrivate *priv = player->priv;
  MexGenericContent  *generic_content;
#ifdef USE_PLAYER_CLUTTER_GST
  ClutterGstVideoTexture *video_texture;
#endif

  /* if idle mode has been set before the program stream was found */
  if (priv->idle_mode)
    return;

  if (G_UNLIKELY (error))
    {
      g_warning ("Could not play content: %s (%s)", error->message, url);
      return;
    }

#ifdef USE_PLAYER_CLUTTER_GST
  /* We seek at the precise time when the file is local, but we
   * seek to key frame when streaming */
  video_texture = CLUTTER_GST_VIDEO_TEXTURE (priv->media);
  if (g_str_has_prefix (url, "file://"))
    {
      clutter_gst_video_texture_set_seek_flags (video_texture,
                                                CLUTTER_GST_SEEK_FLAG_ACCURATE);
    }
  else
    {
      clutter_gst_video_texture_set_seek_flags (video_texture,
                                                CLUTTER_GST_SEEK_FLAG_NONE);
    }

  /* TODO when we have settings we can configure this feature */

  if (g_str_has_prefix (mex_content_get_metadata (priv->content,
                                                  MEX_CONTENT_METADATA_MIMETYPE),
                        "audio/"))
    {
      GstElement *gst_element, *visual;
      gint gst_flags;

      gst_element = clutter_gst_video_texture_get_pipeline (video_texture);
      g_object_get (G_OBJECT (gst_element), "flags", &gst_flags, NULL);

      gst_flags = (GST_PLAY_FLAG_VIS | gst_flags);

      g_object_set (G_OBJECT (gst_element), "flags", gst_flags, NULL);

      visual = gst_element_factory_make ("libvisual_infinite", NULL);

      if (visual)
        g_object_set (G_OBJECT (gst_element), "vis-plugin", visual, NULL);
    }
#endif

  clutter_media_set_uri (CLUTTER_MEDIA (priv->media), url);
  generic_content = MEX_GENERIC_CONTENT (priv->content);
  if (mex_generic_content_get_last_position_start (generic_content))
    clutter_media_set_progress (CLUTTER_MEDIA (priv->media), priv->position);
  clutter_media_set_playing (CLUTTER_MEDIA (priv->media), TRUE);
}
Ejemplo n.º 4
0
static void get_stream_information(TPMediaPlayer * mp)
{
    USERDATA(mp);
    CM(ud);

#if (CLUTTER_GST_MAJOR_VERSION < 1)
    GstElement * pipeline=clutter_gst_video_texture_get_playbin(CLUTTER_GST_VIDEO_TEXTURE(cm));
#else
    GstElement *pipeline=clutter_gst_video_texture_get_pipeline(CLUTTER_GST_VIDEO_TEXTURE(cm));
#endif

    if (!pipeline)
        return;

    //.........................................................................
    // Use stream info to get the type of each stream

#if (CLUTTER_GST_MAJOR_VERSION < 1)
    GValueArray * info_array=NULL;

    g_object_get(G_OBJECT(pipeline),"stream-info-value-array",&info_array,NULL);

    if (info_array)
    {
        // Each entry in the array is information for a single stream

        guint i;

        for (i=0;i<info_array->n_values;++i)
        {
            GValue * info_value=g_value_array_get_nth(info_array,i);

            if (G_VALUE_HOLDS(info_value,G_TYPE_OBJECT))
            {
                GObject * stream_info=g_value_get_object(info_value);

                if (stream_info)
                {
                    gint type = -1;

                    g_object_get(stream_info,"type",&type,NULL);

                    switch (type)
                    {
                        case 1:
                            ud->media_type|=TP_MEDIA_TYPE_AUDIO;
                            break;

                        case 2:
                            ud->media_type|=TP_MEDIA_TYPE_VIDEO;
                            break;
                    }
#if 0
                    // This lets you get the enum value associated with the stream type

                    GParamSpec *pspec;
                    GEnumValue *value;

                    pspec = g_object_class_find_property(G_OBJECT_GET_CLASS(stream_info),"type");

                    value = g_enum_get_value(G_PARAM_SPEC_ENUM(pspec)->enum_class,type);

                    g_debug("  STREAM TYPE IS %d %s",type,value->value_nick);
#endif
                }
            }
        }

        g_value_array_free(info_array);
    }
#else
    gint n_audio, n_video;
    g_object_get(G_OBJECT(pipeline), "n-video", &n_video, NULL);
    g_object_get(G_OBJECT(pipeline), "n-audio", &n_audio, NULL);

    if(n_video) ud->media_type|=TP_MEDIA_TYPE_VIDEO;
    if(n_audio) ud->media_type|=TP_MEDIA_TYPE_AUDIO;
#endif

    //.........................................................................
    // If there is a video stream, we get the video sink and try to find the
    // video size

    if (ud->media_type&TP_MEDIA_TYPE_VIDEO)
    {
        GstElement * video_sink=NULL;

        g_object_get(G_OBJECT(pipeline),"video-sink",&video_sink,NULL);

        if (video_sink)
        {
            GstPad * pad=gst_element_get_static_pad(video_sink,"sink");

            if (pad)
            {
                // Get its video width and height

                gint width;
                gint height;

                if (gst_video_get_size(pad,&width,&height))
                {
                    ud->video_width=width;
                    ud->video_height=height;
                }
                gst_object_unref(GST_OBJECT(pad));
            }
            gst_object_unref(GST_OBJECT(video_sink));
        }
    }

#if 1

    if ( ud->media_type & TP_MEDIA_TYPE_AUDIO )
    {
        GstElement * audio_sink= gst_element_factory_make( "autoaudiosink", "TPAudioSink" );

        if(!audio_sink)
        {
        	g_debug("Failed to create autoaudiosink");
        }
        else
        {
			g_object_set(G_OBJECT(pipeline),"audio-sink",audio_sink,NULL);
		}
    }

#endif
}
Ejemplo n.º 5
0
static gboolean
_start_video_preview (MexContentTile *self)
{
  MexContentTilePrivate *priv = self->priv;
  GstElement *pipeline;
  gint gst_flags;

  const gchar *mimetype, *uri;

  /* Check we're still focused */
  if (!mex_actor_has_focus (CLUTTER_ACTOR (self)))
    return FALSE;

  /* Don't play if the main player is still playing..
   * too many videos spoil the broth.
   */
  if (clutter_media_get_playing (mex_player_get_clutter_media (mex_player_get_default ())))
    return FALSE;

  mimetype = mex_content_get_metadata (priv->content,
                                       MEX_CONTENT_METADATA_MIMETYPE);

  if ((mimetype) && strncmp (mimetype, "video/", 6) != 0)
    return FALSE;

  if (!(uri = mex_content_get_metadata (priv->content,
                                        MEX_CONTENT_METADATA_STREAM)))
    return FALSE;

  priv->video_preview = clutter_gst_video_texture_new ();

  pipeline = clutter_gst_video_texture_get_pipeline (CLUTTER_GST_VIDEO_TEXTURE (priv->video_preview));
  g_object_get (G_OBJECT (pipeline), "flags", &gst_flags, NULL);

  gst_flags = 1;//GST_PLAY_FLAG_VIDEO;

  g_object_set (G_OBJECT (pipeline), "flags", gst_flags, NULL);


  clutter_gst_video_texture_set_idle_material (CLUTTER_GST_VIDEO_TEXTURE (priv->video_preview),
                                               NULL);
  g_signal_connect (priv->video_preview, "eos",
                    G_CALLBACK (_stop_video_eos),
                    self);

  clutter_actor_set_opacity (priv->video_preview, 0);

  g_object_ref (priv->image);
  clutter_actor_remove_child (CLUTTER_ACTOR (self), priv->image);
  clutter_actor_add_child (CLUTTER_ACTOR (self), priv->video_preview);


  clutter_actor_animate (priv->video_preview, CLUTTER_LINEAR, 500,
                         "opacity", 0xff, NULL);

  clutter_actor_set_size (priv->video_preview,
                          (gfloat)priv->thumb_width,
                          (gfloat)priv->thumb_height);

  clutter_media_set_uri (CLUTTER_MEDIA (priv->video_preview), uri);
  clutter_media_set_playing (CLUTTER_MEDIA (priv->video_preview), TRUE);

  if (priv->stop_video_preview <= 0)
    priv->stop_video_preview =
      g_timeout_add_seconds (180, (GSourceFunc)_stop_video_preview, self);

  return FALSE;
}