bool ofGstVideoPlayer::allocate(int bpp){
	if(bIsAllocated) return true;

	guint64 durationNanos = videoUtils.getDurationNanos();

	nFrames		  = 0;
	if(GstPad* pad = gst_element_get_static_pad(videoUtils.getSink(), "sink")){
#if GST_VERSION_MAJOR==0
		int width,height;
		if(gst_video_get_size(GST_PAD(pad), &width, &height)){
			if(!videoUtils.allocate(width,height,bpp)) return false;
		}else{
			ofLogError("ofGstVideoPlayer") << "allocate(): couldn't query width and height";
			return false;
		}

		const GValue *framerate = gst_video_frame_rate(pad);
		fps_n=0;
		fps_d=0;
		if(framerate && GST_VALUE_HOLDS_FRACTION (framerate)){
			fps_n = gst_value_get_fraction_numerator (framerate);
			fps_d = gst_value_get_fraction_denominator (framerate);
			nFrames = (float)(durationNanos / (float)GST_SECOND) * (float)fps_n/(float)fps_d;
			ofLogVerbose("ofGstVideoPlayer") << "allocate(): framerate: " << fps_n << "/" << fps_d;
		}else{
			ofLogWarning("ofGstVideoPlayer") << "allocate(): cannot get framerate, frame seek won't work";
		}
		bIsAllocated = true;
#else
		if(GstCaps *caps = gst_pad_get_current_caps (GST_PAD (pad))){
			GstVideoInfo info;
			gst_video_info_init (&info);
			if (gst_video_info_from_caps (&info, caps)){
				if(!videoUtils.allocate(info.width,info.height,bpp)) return false;
			}else{
				ofLogError("ofGstVideoPlayer") << "allocate(): couldn't query width and height";
				return false;
			}

			fps_n = info.fps_n;
			fps_d = info.fps_d;
			nFrames = (float)(durationNanos / (float)GST_SECOND) * (float)fps_n/(float)fps_d;
			gst_caps_unref(caps);
			bIsAllocated = true;
		}else{
			ofLogError("ofGstVideoPlayer") << "allocate(): cannot get pipeline caps";
			bIsAllocated = false;
		}
#endif
		gst_object_unref(GST_OBJECT(pad));
	}else{
		ofLogError("ofGstVideoPlayer") << "allocate(): cannot get sink pad";
		bIsAllocated = false;
	}

	return bIsAllocated;
}
// Returns the size of the video
IntSize MediaPlayerPrivate::naturalSize() const
{
    if (!hasVideo())
        return IntSize();

    int x = 0, y = 0;
    if (GstPad* pad = gst_element_get_static_pad(m_videoSink, "sink")) {
        gst_video_get_size(GST_PAD(pad), &x, &y);
        gst_object_unref(GST_OBJECT(pad));
    }

    return IntSize(x, y);
}
void GStreamerWrapper::retrieveVideoInfo()
{
	////////////////////////////////////////////////////////////////////////// Media Duration
	// Nanoseconds
	GstFormat gstFormat = GST_FORMAT_TIME;
	gst_element_query_duration( GST_ELEMENT( m_GstPipeline ), &gstFormat, &m_iDurationInNs );

	// Milliseconds
	m_dDurationInMs = GST_TIME_AS_MSECONDS( m_iDurationInNs );

	////////////////////////////////////////////////////////////////////////// Stream Info
	// Number of Video Streams
	g_object_get( m_GstPipeline, "n-video", &m_iNumVideoStreams, NULL );

	// Number of Audio Streams
	g_object_get( m_GstPipeline, "n-audio", &m_iNumAudioStreams, NULL );

	// Set Content Type according to the number of available Video and Audio streams
	if ( m_iNumVideoStreams > 0 && m_iNumAudioStreams > 0 )
		m_ContentType = VIDEO_AND_AUDIO;
	else if ( m_iNumVideoStreams > 0 )
		m_ContentType = VIDEO;
	else if ( m_iNumAudioStreams > 0 )
		m_ContentType = AUDIO;

	////////////////////////////////////////////////////////////////////////// Video Data
	if ( m_iNumVideoStreams > 0 )
	{
		GstPad* gstPad = gst_element_get_static_pad( m_GstVideoSink, "sink" );
		if ( gstPad )
		{
			// Video Size
			gst_video_get_size( GST_PAD( gstPad ), &m_iWidth, &m_iHeight );

			// Frame Rate
			const GValue* framerate = gst_video_frame_rate( gstPad );

			int iFpsNumerator = gst_value_get_fraction_numerator( framerate );
			int iFpsDenominator = gst_value_get_fraction_denominator( framerate );

			// Number of frames
			m_iNumberOfFrames = (float)( m_iDurationInNs / GST_SECOND ) * (float)iFpsNumerator / (float)iFpsDenominator;

			// FPS
			m_fFps = (float)iFpsNumerator / (float)iFpsDenominator;


			gst_object_unref( gstPad );
		}
	}
}
bool ofGstVideoPlayer::allocate(int bpp){
	if(bIsAllocated) return true;

	guint64 durationNanos = videoUtils.getDurationNanos();

	nFrames		  = 0;
	if(GstPad* pad = gst_element_get_static_pad(videoUtils.getSink(), "sink")){
		int width,height;
		if(gst_video_get_size(GST_PAD(pad), &width, &height)){
			if(!videoUtils.allocate(width,height,bpp)) return false;
		}else{
			ofLog(OF_LOG_ERROR,"GStreamer: cannot query width and height");
			return false;
		}

		const GValue *framerate;
		framerate = gst_video_frame_rate(pad);
		fps_n=0;
		fps_d=0;
		if(framerate && GST_VALUE_HOLDS_FRACTION (framerate)){
			fps_n = gst_value_get_fraction_numerator (framerate);
			fps_d = gst_value_get_fraction_denominator (framerate);
			nFrames = (float)(durationNanos / GST_SECOND) * (float)fps_n/(float)fps_d;
			ofLog(OF_LOG_VERBOSE,"ofGstUtils: framerate: %i/%i",fps_n,fps_d);
		}else{
			ofLog(OF_LOG_WARNING,"Gstreamer: cannot get framerate, frame seek won't work");
		}
		gst_object_unref(GST_OBJECT(pad));
		bIsAllocated = true;
	}else{
		ofLog(OF_LOG_ERROR,"GStreamer: cannot get sink pad");
		bIsAllocated = false;
	}

	return bIsAllocated;
}
Exemple #5
0
bool ofGstUtils::allocate(){
	// wait for paused state to query the duration
	if(!bIsStream){
		GstState state = GST_STATE_PAUSED;
		gst_element_get_state(gstPipeline,&state,NULL,2*GST_SECOND);
	}
	if(!bIsCamera){
		GstFormat format=GST_FORMAT_TIME;
		if(!gst_element_query_duration(gstPipeline,&format,&durationNanos))
			ofLog(OF_LOG_WARNING,"GStreamer: cannot query time duration");

		gstData.durationNanos = durationNanos;
		gstData.nFrames		  = 0;
	}

	// query width, height, fps and do data allocation
	if (bIsCamera) {
		pixels=new unsigned char[width*height*bpp];
		gstData.pixels=new unsigned char[width*height*bpp];
		memset(pixels,0,width*height*bpp);
		memset(gstData.pixels,0,width*height*bpp);
		gstData.width = width;
		gstData.height = height;
		gstData.totalsize = 0;
		gstData.lastFrame = 0;
	}else if(gstSink!=NULL){
		if(GstPad* pad = gst_element_get_static_pad(gstSink, "sink")){
			if(gst_video_get_size(GST_PAD(pad), &width, &height)){
				pixels=new unsigned char[width*height*bpp];
				gstData.pixels=new unsigned char[width*height*bpp];;
				memset(pixels,0,width*height*bpp);
				memset(gstData.pixels,0,width*height*bpp);
				gstData.width = width;
				gstData.height = height;
				gstData.totalsize = 0;
				gstData.lastFrame = 0;
			}else{
				ofLog(OF_LOG_ERROR,"GStreamer: cannot query width and height");
				return false;
			}

			const GValue *framerate;
			framerate = gst_video_frame_rate(pad);
			fps_n=0;
			fps_d=0;
			if(framerate && GST_VALUE_HOLDS_FRACTION (framerate)){
				fps_n = gst_value_get_fraction_numerator (framerate);
				fps_d = gst_value_get_fraction_denominator (framerate);
				gstData.nFrames = (float)(durationNanos / GST_SECOND) * (float)fps_n/(float)fps_d;
				ofLog(OF_LOG_VERBOSE,"ofGstUtils: framerate: %i/%i",fps_n,fps_d);
			}else{
				ofLog(OF_LOG_WARNING,"Gstreamer: cannot get framerate, frame seek won't work");
			}
			gst_object_unref(GST_OBJECT(pad));
		}else{
			ofLog(OF_LOG_ERROR,"GStreamer: cannot get sink pad");
			return false;
		}
	}


	bLoaded = true;
	bHavePixelsChanged = true;
	bStarted = true;
	return bLoaded;
}
Exemple #6
0
static void get_stream_information(TPMediaPlayer * mp)
{
    USERDATA(mp);
    CM(ud);

#if (CLUTTER_GST_MAJOR_VERSION < 1)
    GstElement * pipeline=clutter_gst_video_texture_get_playbin(CLUTTER_GST_VIDEO_TEXTURE(cm));
#else
    GstElement *pipeline=clutter_gst_video_texture_get_pipeline(CLUTTER_GST_VIDEO_TEXTURE(cm));
#endif

    if (!pipeline)
        return;

    //.........................................................................
    // Use stream info to get the type of each stream

#if (CLUTTER_GST_MAJOR_VERSION < 1)
    GValueArray * info_array=NULL;

    g_object_get(G_OBJECT(pipeline),"stream-info-value-array",&info_array,NULL);

    if (info_array)
    {
        // Each entry in the array is information for a single stream

        guint i;

        for (i=0;i<info_array->n_values;++i)
        {
            GValue * info_value=g_value_array_get_nth(info_array,i);

            if (G_VALUE_HOLDS(info_value,G_TYPE_OBJECT))
            {
                GObject * stream_info=g_value_get_object(info_value);

                if (stream_info)
                {
                    gint type = -1;

                    g_object_get(stream_info,"type",&type,NULL);

                    switch (type)
                    {
                        case 1:
                            ud->media_type|=TP_MEDIA_TYPE_AUDIO;
                            break;

                        case 2:
                            ud->media_type|=TP_MEDIA_TYPE_VIDEO;
                            break;
                    }
#if 0
                    // This lets you get the enum value associated with the stream type

                    GParamSpec *pspec;
                    GEnumValue *value;

                    pspec = g_object_class_find_property(G_OBJECT_GET_CLASS(stream_info),"type");

                    value = g_enum_get_value(G_PARAM_SPEC_ENUM(pspec)->enum_class,type);

                    g_debug("  STREAM TYPE IS %d %s",type,value->value_nick);
#endif
                }
            }
        }

        g_value_array_free(info_array);
    }
#else
    gint n_audio, n_video;
    g_object_get(G_OBJECT(pipeline), "n-video", &n_video, NULL);
    g_object_get(G_OBJECT(pipeline), "n-audio", &n_audio, NULL);

    if(n_video) ud->media_type|=TP_MEDIA_TYPE_VIDEO;
    if(n_audio) ud->media_type|=TP_MEDIA_TYPE_AUDIO;
#endif

    //.........................................................................
    // If there is a video stream, we get the video sink and try to find the
    // video size

    if (ud->media_type&TP_MEDIA_TYPE_VIDEO)
    {
        GstElement * video_sink=NULL;

        g_object_get(G_OBJECT(pipeline),"video-sink",&video_sink,NULL);

        if (video_sink)
        {
            GstPad * pad=gst_element_get_static_pad(video_sink,"sink");

            if (pad)
            {
                // Get its video width and height

                gint width;
                gint height;

                if (gst_video_get_size(pad,&width,&height))
                {
                    ud->video_width=width;
                    ud->video_height=height;
                }
                gst_object_unref(GST_OBJECT(pad));
            }
            gst_object_unref(GST_OBJECT(video_sink));
        }
    }

#if 1

    if ( ud->media_type & TP_MEDIA_TYPE_AUDIO )
    {
        GstElement * audio_sink= gst_element_factory_make( "autoaudiosink", "TPAudioSink" );

        if(!audio_sink)
        {
        	g_debug("Failed to create autoaudiosink");
        }
        else
        {
			g_object_set(G_OBJECT(pipeline),"audio-sink",audio_sink,NULL);
		}
    }

#endif
}