Beispiel #1
0
static void gst_vlc_video_sink_init( GstVlcVideoSink *p_vlc_video_sink )
{
    gst_base_sink_set_sync( GST_BASE_SINK( p_vlc_video_sink), FALSE );
}
bool ofGstVideoPlayer::loadMovie(string name){
	close();
	if( name.find( "file://",0 ) != string::npos){
		bIsStream		= false;
	}else if( name.find( "://",0 ) == string::npos){
		GError * err = NULL;
		name = gst_filename_to_uri(ofToDataPath(name).c_str(),&err);
		bIsStream		= false;
	}else{
		bIsStream		= true;
	}
	ofLogVerbose("ofGstVideoPlayer") << "loadMovie(): loading \"" << name << "\"";

	ofGstUtils::startGstMainLoop();

#if GST_VERSION_MAJOR==0
	GstElement * gstPipeline = gst_element_factory_make("playbin2","player");
#else
	GstElement * gstPipeline = gst_element_factory_make("playbin","player");
#endif
	g_object_set(G_OBJECT(gstPipeline), "uri", name.c_str(), (void*)NULL);

	// create the oF appsink for video rgb without sync to clock
	GstElement * gstSink = gst_element_factory_make("appsink", "app_sink");

	gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true);
	gst_app_sink_set_max_buffers(GST_APP_SINK(gstSink), 8);
	gst_app_sink_set_drop (GST_APP_SINK(gstSink),true);
	gst_base_sink_set_max_lateness  (GST_BASE_SINK(gstSink), -1);

#if GST_VERSION_MAJOR==0
	GstCaps *caps;
	int bpp;
	switch(internalPixelFormat){
	case OF_PIXELS_MONO:
		bpp = 8;
		caps = gst_caps_new_simple("video/x-raw-gray",
			"bpp", G_TYPE_INT, bpp,
			"depth", G_TYPE_INT, 8,
			NULL);
		break;
	case OF_PIXELS_RGB:
		bpp = 24;
		caps = gst_caps_new_simple("video/x-raw-rgb",
			"bpp", G_TYPE_INT, bpp,
			"depth", G_TYPE_INT, 24,
			"endianness",G_TYPE_INT,4321,
			"red_mask",G_TYPE_INT,0xff0000,
			"green_mask",G_TYPE_INT,0x00ff00,
			"blue_mask",G_TYPE_INT,0x0000ff,
			NULL);
		break;
	case OF_PIXELS_RGBA:
		bpp = 32;
		caps = gst_caps_new_simple("video/x-raw-rgb",
			"bpp", G_TYPE_INT, bpp,
			"depth", G_TYPE_INT, 32,
			"endianness",G_TYPE_INT,4321,
			"red_mask",G_TYPE_INT,0xff000000,
			"green_mask",G_TYPE_INT,0x00ff0000,
			"blue_mask",G_TYPE_INT,0x0000ff00,
			"alpha_mask",G_TYPE_INT,0x000000ff,
			NULL);
	case OF_PIXELS_BGRA:
		bpp = 32;
		caps = gst_caps_new_simple("video/x-raw-rgb",
			"bpp", G_TYPE_INT, bpp,
			"depth", G_TYPE_INT, 32,
			"endianness",G_TYPE_INT,4321,
			"red_mask",G_TYPE_INT,0x0000ff00,
			"green_mask",G_TYPE_INT,0x00ff0000,
			"blue_mask",G_TYPE_INT,0xff000000,
			"alpha_mask",G_TYPE_INT,0x000000ff,
			NULL);
		break;
	default:
		bpp = 32;
		caps = gst_caps_new_simple("video/x-raw-rgb",
			"bpp", G_TYPE_INT, bpp,
			"depth", G_TYPE_INT, 24,
			"endianness",G_TYPE_INT,4321,
			"red_mask",G_TYPE_INT,0xff0000,
			"green_mask",G_TYPE_INT,0x00ff00,
			"blue_mask",G_TYPE_INT,0x0000ff,
			NULL);
		break;
	}
#else
	int bpp;
	string mime="video/x-raw";
	string format;
	switch(internalPixelFormat){
	case OF_PIXELS_MONO:
		format = "GRAY8";
		bpp = 8;
		break;
	case OF_PIXELS_RGB:
		format = "RGB";
		bpp = 24;
		break;
	case OF_PIXELS_RGBA:
		format = "RGBA";
		bpp = 32;
		break;
	case OF_PIXELS_BGRA:
		format = "BGRA";
		bpp = 32;
		break;
	default:
		format = "RGB";
		bpp=24;
		break;
	}

	GstCaps *caps = gst_caps_new_simple(mime.c_str(),
										"format", G_TYPE_STRING, format.c_str(),
										/*"bpp", G_TYPE_INT, bpp,
										"depth", G_TYPE_INT, 24,
										"endianness",G_TYPE_INT,4321,
										"red_mask",G_TYPE_INT,0xff0000,
										"green_mask",G_TYPE_INT,0x00ff00,
										"blue_mask",G_TYPE_INT,0x0000ff,
										"alpha_mask",G_TYPE_INT,0x000000ff,*/
										NULL);
#endif


	gst_app_sink_set_caps(GST_APP_SINK(gstSink), caps);
	gst_caps_unref(caps);

	if(threadAppSink){
		GstElement * appQueue = gst_element_factory_make("queue","appsink_queue");
		g_object_set(G_OBJECT(appQueue), "leaky", 0, "silent", 1, (void*)NULL);
		GstElement* appBin = gst_bin_new("app_bin");
		gst_bin_add(GST_BIN(appBin), appQueue);
		GstPad* appQueuePad = gst_element_get_static_pad(appQueue, "sink");
		GstPad* ghostPad = gst_ghost_pad_new("app_bin_sink", appQueuePad);
		gst_object_unref(appQueuePad);
		gst_element_add_pad(appBin, ghostPad);

		gst_bin_add_many(GST_BIN(appBin), gstSink, NULL);
		gst_element_link_many(appQueue, gstSink, NULL);

		g_object_set (G_OBJECT(gstPipeline),"video-sink",appBin,(void*)NULL);
	}else{
		g_object_set (G_OBJECT(gstPipeline),"video-sink",gstSink,(void*)NULL);
	}

#ifdef TARGET_WIN32
	GstElement *audioSink = gst_element_factory_make("directsoundsink", NULL);
	g_object_set (G_OBJECT(gstPipeline),"audio-sink",audioSink,(void*)NULL);

#endif


	videoUtils.setPipelineWithSink(gstPipeline,gstSink,bIsStream);
	videoUtils.startPipeline();
	if(!bIsStream) return allocate(bpp);
	else return true;
}
Beispiel #3
0
// ----------------------------------------------------------------------------
// Handle the "pad-added" message
void
GStreamerImportFileHandle::OnPadAdded(GstPad *pad)
{
   // Retrieve the stream caps...skip stream if unavailable
   GstCaps *caps = gst_pad_get_current_caps(pad);
   if (!caps)
   {
      WARN(mPipeline, ("OnPadAdded: unable to retrieve stream caps"));
      return;
   }

   // Get the caps structure...no need to release
   GstStructure *str = gst_caps_get_structure(caps, 0);
   if (!str)
   {
      WARN(mPipeline, ("OnPadAdded: unable to retrieve caps structure"));
      gst_caps_unref(caps);
      return;
   }

   // Only accept audio streams...no need to release
   const gchar *name = gst_structure_get_name(str);
   if (!g_strrstr(name, "audio"))
   {
      WARN(mPipeline, ("OnPadAdded: bypassing '%s' stream", name));
      gst_caps_unref(caps);
      return;
   }

   // Allocate a new stream context
   GStreamContext *c = g_new0(GStreamContext, 1);
   if (!c)
   {
      WARN(mPipeline, ("OnPadAdded: unable to allocate stream context"));
      gst_caps_unref(caps);
      return;
   }

   // Set initial state
   c->mUse = true;

   // Always add it to the context list to keep the number of contexts
   // in sync with the number of streams
   g_mutex_lock(&mStreamsLock);
   g_ptr_array_add(mStreams, c);
   g_mutex_unlock(&mStreamsLock);

   // Need pointer to context during pad removal (pad-remove signal)
   SETCTX(pad, c);

   // Save the stream's start time and duration
   gst_pad_query_position(pad, GST_FORMAT_TIME, &c->mPosition);
   gst_pad_query_duration(pad, GST_FORMAT_TIME, &c->mDuration);

   // Retrieve the number of channels and validate
   gint channels = -1;
   gst_structure_get_int(str, "channels", &channels);
   if (channels <= 0)
   {
      WARN(mPipeline, ("OnPadAdded: channel count is invalid %d", channels));
      gst_caps_unref(caps);
      return;
   }
   c->mNumChannels = channels;

   // Retrieve the sample rate and validate
   gint rate = -1;
   gst_structure_get_int(str, "rate", &rate);
   if (rate <= 0)
   {
      WARN(mPipeline, ("OnPadAdded: sample rate is invalid %d", rate));
      gst_caps_unref(caps);
      return;
   }
   c->mSampleRate = (double) rate;

   c->mType = g_strdup(name);
   if (c->mType == NULL)
   {
      WARN(mPipeline, ("OnPadAdded: unable to allocate audio type"));
      gst_caps_unref(caps);
      return;
   }

   // Done with capabilities
   gst_caps_unref(caps);

   // Create audioconvert element
   c->mConv = gst_element_factory_make("audioconvert", NULL);
   if (!c->mConv)
   {
      WARN(mPipeline, ("OnPadAdded: failed to create audioconvert element"));
      return;
   }

   // Create appsink element
   c->mSink = gst_element_factory_make("appsink", NULL);
   if (!c->mSink)
   {
      WARN(mPipeline, ("OnPadAdded: failed to create appsink element"));
      return;
   }
   SETCTX(c->mSink, c);

   // Set the appsink callbacks and add the context pointer
   gst_app_sink_set_callbacks(GST_APP_SINK(c->mSink), &AppSinkCallbacks, this, NULL);

   // Set the capabilities that we desire
   caps = gst_static_caps_get(&supportedCaps);
   if (!caps)
   {
      WARN(mPipeline, ("OnPadAdded: failed to create static caps"));
      return;
   }
   gst_app_sink_set_caps(GST_APP_SINK(c->mSink), caps);
   gst_caps_unref(caps);

   // Do not sync to the clock...process as quickly as possible
   gst_base_sink_set_sync(GST_BASE_SINK(c->mSink), FALSE);

   // Don't drop buffers...allow queue to build unfettered
   gst_app_sink_set_drop(GST_APP_SINK(c->mSink), FALSE);

   // Add both elements to the pipeline
   gst_bin_add_many(GST_BIN(mPipeline), c->mConv, c->mSink, NULL);

   // Link them together
   if (!gst_element_link(c->mConv, c->mSink))
   {
      WARN(mPipeline, ("OnPadAdded: failed to link autioconvert and appsink"));
      return;
   }

   // Link the audiconvert sink pad to the src pad
   GstPadLinkReturn ret = GST_PAD_LINK_OK;
   GstPad *convsink = gst_element_get_static_pad(c->mConv, "sink");
   if (convsink)
   {
      ret = gst_pad_link(pad, convsink);
      gst_object_unref(convsink);
   }
   if (!convsink || ret != GST_PAD_LINK_OK)
   {
      WARN(mPipeline, ("OnPadAdded: failed to link uridecodebin to audioconvert - %d", ret));
      return;
   }

   // Synchronize audioconvert state with parent
   if (!gst_element_sync_state_with_parent(c->mConv))
   {
      WARN(mPipeline, ("OnPadAdded: unable to sync audioconvert state"));
      return;
   }

   // Synchronize appsink state with parent
   if (!gst_element_sync_state_with_parent(c->mSink))
   {
      WARN(mPipeline, ("OnPadAdded: unable to sync appaink state"));
      return;
   }

   return;
}
Beispiel #4
0
static CoglBool
cogl_gst_source_dispatch (GSource *source,
                          GSourceFunc callback,
                          void *user_data)
{
  CoglGstSource *gst_source= (CoglGstSource*) source;
  CoglGstVideoSinkPrivate *priv = gst_source->sink->priv;
  GstBuffer *buffer;
  gboolean pipeline_ready = FALSE;

  g_mutex_lock (&gst_source->buffer_lock);

  if (G_UNLIKELY (gst_source->has_new_caps))
    {
      GstCaps *caps =
        gst_pad_get_current_caps (GST_BASE_SINK_PAD ((GST_BASE_SINK
                (gst_source->sink))));

      if (!cogl_gst_video_sink_parse_caps (caps, gst_source->sink, TRUE))
        goto negotiation_fail;

      gst_source->has_new_caps = FALSE;
      priv->free_layer = priv->custom_start + priv->renderer->n_layers;

      dirty_default_pipeline (gst_source->sink);

      /* We are now in a state where we could generate the pipeline if
       * the application requests it so we can emit the signal.
       * However we'll actually generate the pipeline lazily only if
       * the application actually asks for it. */
      pipeline_ready = TRUE;
    }

  buffer = gst_source->buffer;
  gst_source->buffer = NULL;

  g_mutex_unlock (&gst_source->buffer_lock);

  if (buffer)
    {
      if (!priv->renderer->upload (gst_source->sink, buffer))
        goto fail_upload;

      gst_buffer_unref (buffer);
    }
  else
    GST_WARNING_OBJECT (gst_source->sink, "No buffers available for display");

  if (G_UNLIKELY (pipeline_ready))
    g_signal_emit (gst_source->sink,
                   video_sink_signals[PIPELINE_READY_SIGNAL],
                   0 /* detail */);
  g_signal_emit (gst_source->sink,
                 video_sink_signals[NEW_FRAME_SIGNAL], 0,
                 NULL);

  return TRUE;


negotiation_fail:
  {
    GST_WARNING_OBJECT (gst_source->sink,
        "Failed to handle caps. Stopping GSource");
    priv->flow_return = GST_FLOW_NOT_NEGOTIATED;
    g_mutex_unlock (&gst_source->buffer_lock);

    return FALSE;
  }

fail_upload:
  {
    GST_WARNING_OBJECT (gst_source->sink, "Failed to upload buffer");
    priv->flow_return = GST_FLOW_ERROR;
    gst_buffer_unref (buffer);
    return FALSE;
  }
}
Beispiel #5
0
bool ofGstUtils::initGrabber(int w, int h, int framerate){
	bpp = 24;
	if(!camData.bInited) get_video_devices(camData);

	if(camData.webcam_devices.size()==0){
		ofLog(OF_LOG_ERROR,"ofGstUtils: no devices found exiting without initializing");
		return false;
	}

	ofGstVideoFormat & format = selectFormat(w, h, framerate);

	ofLog(OF_LOG_NOTICE,"ofGstUtils: selected format: " + ofToString(format.width) + "x" + ofToString(format.height) + " " + format.mimetype + " framerate: " + ofToString(format.choosen_framerate.numerator) + "/" + ofToString(format.choosen_framerate.denominator));

	bIsCamera = true;
	bHavePixelsChanged 	= false;

	width = w;
	height = h;

	gstData.loop		= g_main_loop_new (NULL, FALSE);


	const char * decodebin = "";
	if(format.mimetype == "video/x-raw-bayer")
		decodebin = "bayer2rgb !";
	else if(format.mimetype != "video/x-raw-yuv" && format.mimetype != "video/x-raw-rgb")
		decodebin = "decodebin !";

	const char * scale = "ffmpegcolorspace !";
	if( w!=format.width || h!=format.height )	scale = "ffvideoscale method=2 !";


	string format_str_pipeline = string("%s name=video_source device=%s ! ") +
								 "%s,width=%d,height=%d,framerate=%d/%d ! " +
								 "%s %s " +
								 "appsink name=sink  caps=\"video/x-raw-rgb, width=%d, height=%d, bpp=24\"";

	gchar* pipeline_string =g_strdup_printf (
				      format_str_pipeline.c_str(),
				      camData.webcam_devices[deviceID].gstreamer_src.c_str(),
				      camData.webcam_devices[deviceID].video_device.c_str(),
				      format.mimetype.c_str(),
				      format.width,
				      format.height,
				      format.choosen_framerate.numerator,
				      format.choosen_framerate.denominator,
				      decodebin, scale,
				      w,h);

	ofLog(OF_LOG_NOTICE, "gstreamer pipeline: %s", pipeline_string);

	GError * error = NULL;
	gstPipeline = gst_parse_launch (pipeline_string, &error);

	gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),"sink");

	gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true);


	if(startPipeline()){
		play();
		return true;
	}else{
		return false;
	}
}
static gboolean
gst_multi_file_sink_event (GstBaseSink * sink, GstEvent * event)
{
  GstMultiFileSink *multifilesink;
  gchar *filename;

  multifilesink = GST_MULTI_FILE_SINK (sink);

  switch (GST_EVENT_TYPE (event)) {
    case GST_EVENT_CUSTOM_DOWNSTREAM:
    {
      GstClockTime timestamp, duration;
      GstClockTime running_time, stream_time;
      guint64 offset, offset_end;
      gboolean all_headers;
      guint count;

      if (multifilesink->next_file != GST_MULTI_FILE_SINK_NEXT_KEY_UNIT_EVENT ||
          !gst_video_event_is_force_key_unit (event))
        goto out;

      gst_video_event_parse_downstream_force_key_unit (event, &timestamp,
          &stream_time, &running_time, &all_headers, &count);

      if (multifilesink->force_key_unit_count != -1 &&
          multifilesink->force_key_unit_count == count)
        goto out;

      multifilesink->force_key_unit_count = count;

      if (multifilesink->file) {
        duration = GST_CLOCK_TIME_NONE;
        offset = offset_end = -1;
        filename = g_strdup_printf (multifilesink->filename,
            multifilesink->index);

        gst_multi_file_sink_close_file (multifilesink, NULL);

        gst_multi_file_sink_post_message_full (multifilesink, timestamp,
            duration, offset, offset_end, running_time, stream_time, filename);
        g_free (filename);
      }

      if (multifilesink->file == NULL) {
        if (!gst_multi_file_sink_open_next_file (multifilesink))
          goto stdio_write_error;
      }

      break;
    }
    case GST_EVENT_EOS:
      if (multifilesink->aggregate_gops) {
        GstBuffer *buf = gst_buffer_new ();

        /* push key unit buffer to force writing out the pending GOP data */
        GST_INFO_OBJECT (sink, "EOS, write pending GOP data");
        GST_BUFFER_FLAG_UNSET (buf, GST_BUFFER_FLAG_DELTA_UNIT);
        gst_multi_file_sink_render (sink, buf);
        gst_buffer_unref (buf);
      }
      if (multifilesink->file) {
        gchar *filename;

        filename = g_strdup_printf (multifilesink->filename,
            multifilesink->index);

        gst_multi_file_sink_close_file (multifilesink, NULL);

        gst_multi_file_sink_post_message_from_time (multifilesink,
            GST_BASE_SINK (multifilesink)->segment.position, -1, filename);
        g_free (filename);
      }
      break;
    default:
      break;
  }

out:
  return GST_BASE_SINK_CLASS (parent_class)->event (sink, event);

  /* ERRORS */
stdio_write_error:
  {
    GST_ELEMENT_ERROR (multifilesink, RESOURCE, WRITE,
        ("Error while writing to file."), (NULL));
    gst_event_unref (event);
    return FALSE;
  }
}
static void
gst_vdp_sink_expose (GstXOverlay * overlay)
{
  gst_vdp_sink_show_frame (GST_BASE_SINK (overlay), NULL);
}
bool GStreamerWrapper::open( std::string strFilename, bool bGenerateVideoBuffer, bool bGenerateAudioBuffer )
{
	if( m_bFileIsOpen )
	{
		stop();
		close();
	}

	// init property variables
	m_iNumVideoStreams = 0;
	m_iNumAudioStreams = 0;
	m_iCurrentVideoStream = 0;
	m_iCurrentAudioStream = 0;
	m_iWidth = m_iHeight = 0;
	m_iCurrentFrameNumber = 0;		// set to invalid, as it is not decoded yet
	m_dCurrentTimeInMs = 0;			// set to invalid, as it is not decoded yet
	m_bIsAudioSigned = false;
	m_bIsNewVideoFrame = false;
	m_iNumAudioChannels = 0;
	m_iAudioSampleRate = 0;
	m_iAudioBufferSize = 0;
	m_iAudioWidth = 0;
	m_AudioEndianness = LITTLE_ENDIAN;
	m_fFps = 0;
	m_dDurationInMs = 0;
	m_iNumberOfFrames = 0;

	m_fVolume = 1.0f;
	m_fSpeed = 1.0f;
	m_PlayDirection = FORWARD;
	m_CurrentPlayState = NOT_INITIALIZED;
	m_LoopMode = LOOP;
	m_strFilename = strFilename;

#ifdef THREADED_MESSAGE_HANDLER
		m_MsgHandlingThread = std::thread( std::bind( threadedMessageHandler, this ) );
#endif


	////////////////////////////////////////////////////////////////////////// PIPELINE
	// Init main pipeline --> playbin2
	m_GstPipeline = gst_element_factory_make( "playbin2", "pipeline" );

	// Check and re-arrange filename string
	if ( strFilename.find( "file:/", 0 ) == std::string::npos &&
		 strFilename.find( "file:///", 0 ) == std::string::npos &&
		 strFilename.find( "http://", 0 ) == std::string::npos )
	{
		strFilename = "file:/" + strFilename;
	}

	// Open Uri
	g_object_set( m_GstPipeline, "uri", strFilename.c_str(), NULL );


	////////////////////////////////////////////////////////////////////////// VIDEO SINK
	// Extract and Config Video Sink
	if ( bGenerateVideoBuffer )
	{
		// Create the video appsink and configure it
		m_GstVideoSink = gst_element_factory_make( "appsink", "videosink" );
		gst_base_sink_set_sync( GST_BASE_SINK( m_GstVideoSink ), true );
		gst_app_sink_set_max_buffers( GST_APP_SINK( m_GstVideoSink ), 8 );
		gst_app_sink_set_drop( GST_APP_SINK( m_GstVideoSink ),true );
		gst_base_sink_set_max_lateness( GST_BASE_SINK( m_GstVideoSink ), -1);

		// Set some fix caps for the video sink
		// It would seem that GStreamer then tries to transform any incoming video stream according to these caps
		GstCaps* caps = gst_caps_new_simple( "video/x-raw-rgb",
			"bpp", G_TYPE_INT, 24,
			"depth", G_TYPE_INT, 24,
			"endianness",G_TYPE_INT,4321,
			"red_mask",G_TYPE_INT,0xff0000,
			"green_mask",G_TYPE_INT,0x00ff00,
			"blue_mask",G_TYPE_INT,0x0000ff,
			"alpha_mask",G_TYPE_INT,0x000000ff,
			NULL );


		gst_app_sink_set_caps( GST_APP_SINK( m_GstVideoSink ), caps );
		gst_caps_unref( caps );

		// Set the configured video appsink to the main pipeline
		g_object_set( m_GstPipeline, "video-sink", m_GstVideoSink, (void*)NULL );
		// Tell the video appsink that it should not emit signals as the buffer retrieving is handled via callback methods
		g_object_set( m_GstVideoSink, "emit-signals", false, "sync", true, (void*)NULL );

		// Set Video Sink callback methods
		m_GstVideoSinkCallbacks.eos = &GStreamerWrapper::onEosFromVideoSource;
		m_GstVideoSinkCallbacks.new_preroll = &GStreamerWrapper::onNewPrerollFromVideoSource;
		m_GstVideoSinkCallbacks.new_buffer = &GStreamerWrapper::onNewBufferFromVideoSource;
		gst_app_sink_set_callbacks( GST_APP_SINK( m_GstVideoSink ), &m_GstVideoSinkCallbacks, this, NULL );
	}
	else
	{
#if defined _WIN32 // Use direct show as playback plugin if on Windows; Needed for features like play direction and playback speed to work correctly
		GstElement* videoSink = gst_element_factory_make( "directdrawsink", NULL );
		g_object_set( m_GstPipeline, "video-sink", videoSink, NULL );
#elif defined LINUX
		GstElement* videoSink = gst_element_factory_make( "xvimagesink", NULL );    //possible alternatives: ximagesink (no (gpu) fancy stuff) or better: cluttersink
		g_object_set( m_GstPipeline, "video-sink", videoSink, NULL );
#else // Use Mac OSX plugin otherwise
		GstElement* videoSink = gst_element_factory_make( "osxvideosink", NULL );
		g_object_set( m_GstPipeline, "video-sink", videoSink, NULL );
#endif
	}

	////////////////////////////////////////////////////////////////////////// AUDIO SINK
	// Extract and config Audio Sink
	if ( bGenerateAudioBuffer )
	{
		// Create and configure audio appsink
		m_GstAudioSink = gst_element_factory_make( "appsink", "audiosink" );
		gst_base_sink_set_sync( GST_BASE_SINK( m_GstAudioSink ), true );
		// Set the configured audio appsink to the main pipeline
		g_object_set( m_GstPipeline, "audio-sink", m_GstAudioSink, (void*)NULL );
		// Tell the video appsink that it should not emit signals as the buffer retrieving is handled via callback methods
		g_object_set( m_GstAudioSink, "emit-signals", false, "sync", true, (void*)NULL );

		// Set Audio Sink callback methods
		m_GstAudioSinkCallbacks.eos = &GStreamerWrapper::onEosFromAudioSource;
		m_GstAudioSinkCallbacks.new_preroll = &GStreamerWrapper::onNewPrerollFromAudioSource;
		m_GstAudioSinkCallbacks.new_buffer = &GStreamerWrapper::onNewBufferFromAudioSource;
		gst_app_sink_set_callbacks( GST_APP_SINK( m_GstAudioSink ), &m_GstAudioSinkCallbacks, this, NULL );
	}
	else
	{
#if defined _WIN32 // Use direct sound plugin if on Windows; Needed for features like play direction and playback speed to work correctly
		GstElement* audioSink = gst_element_factory_make( "directsoundsink", NULL );
		g_object_set ( m_GstPipeline, "audio-sink", audioSink, NULL );
#elif defined LINUX
		GstElement* audioSink = gst_element_factory_make( "pulsesink", NULL );  //alternative: alsasink
		g_object_set ( m_GstPipeline, "audio-sink", audioSink, NULL );
#else // Use Mac OSC plugin otherwise
		GstElement* audioSink = gst_element_factory_make( "osxaudiosink", NULL );
		g_object_set ( m_GstPipeline,"audio-sink", audioSink, NULL );
#endif
	}

	////////////////////////////////////////////////////////////////////////// BUS
	// Set GstBus
	m_GstBus = gst_pipeline_get_bus( GST_PIPELINE( m_GstPipeline ) );

	if ( m_GstPipeline != NULL )
	{
//just add this callback for threaded message handling
#ifdef THREADED_MESSAGE_HANDLER
		gst_bus_add_watch (m_GstBus, onHandleGstMessages, this );
#endif
		// We need to stream the file a little bit in order to be able to retrieve information from it
		gst_element_set_state( m_GstPipeline, GST_STATE_READY );
		gst_element_set_state( m_GstPipeline, GST_STATE_PAUSED );

		// For some reason this is needed in order to gather video information such as size, framerate etc ...
		GstState state;
		gst_element_get_state( m_GstPipeline, &state, NULL, 2 * GST_SECOND );
		m_CurrentPlayState = OPENED;
	}

	// Retrieve and store all relevant Media Information
	retrieveVideoInfo();

	if( !hasVideo() && !hasAudio() )	// is a valid multimedia file?
	{
		close();
		return false;
	}

	// Print Media Info
	printMediaFileInfo();

	// TODO: Check if everything was initialized correctly
	// A file has been opened
	m_bFileIsOpen = true;

	return true;
}
Beispiel #9
0
	bool GSPipe::init_stream() {
		if(!gst_is_initialized()) {
		  // Initialize gstreamer pipeline
		  ROS_DEBUG_STREAM( "Initializing gstreamer..." );
		  gst_init(0,0);
		}

		ROS_DEBUG_STREAM( "Gstreamer Version: " << gst_version_string() );

		GError *error = 0; // Assignment to zero is a gst requirement

		//pipeline_ = gst_parse_launch(gsconfig_.c_str(), &error);
		pipeline_ = gst_parse_launch(pipeline_str.c_str(), &error);
		if (pipeline_ == NULL) {
		  ROS_FATAL_STREAM( error->message );
		  return false;
		}

		sink_ = gst_element_factory_make("appsink",NULL);
		GstCaps * caps = image_encoding_ == sensor_msgs::image_encodings::RGB8 ?
			gst_caps_new_simple("video/x-raw-rgb", NULL) :
			gst_caps_new_simple("video/x-raw-gray", NULL);
		gst_app_sink_set_caps(GST_APP_SINK(sink_), caps);
		gst_caps_unref(caps);

		gst_base_sink_set_sync(
		        GST_BASE_SINK(sink_),
		        (sync_sink_) ? TRUE : FALSE);

		if(GST_IS_PIPELINE(pipeline_)) {
		  GstPad *outpad = gst_bin_find_unlinked_pad(GST_BIN(pipeline_), GST_PAD_SRC);
		  g_assert(outpad);

		  GstElement *outelement = gst_pad_get_parent_element(outpad);
		  g_assert(outelement);
		  gst_object_unref(outpad);

		  if(!gst_bin_add(GST_BIN(pipeline_), sink_)) {
			ROS_FATAL("gst_bin_add() failed");
			gst_object_unref(outelement);
			gst_object_unref(pipeline_);
			return false;
		  }

		  if(!gst_element_link(outelement, sink_)) {
			ROS_FATAL("GStreamer: cannot link outelement(\"%s\") -> sink\n", gst_element_get_name(outelement));
			gst_object_unref(outelement);
			gst_object_unref(pipeline_);
			return false;
		  }

		  gst_object_unref(outelement);
		} else {
		  GstElement* launchpipe = pipeline_;
		  pipeline_ = gst_pipeline_new(NULL);
		  g_assert(pipeline_);

		  gst_object_unparent(GST_OBJECT(launchpipe));

		  gst_bin_add_many(GST_BIN(pipeline_), launchpipe, sink_, NULL);

		  if(!gst_element_link(launchpipe, sink_)) {
			ROS_FATAL("GStreamer: cannot link launchpipe -> sink");
			gst_object_unref(pipeline_);
			return false;
		  }
		}

		gst_element_set_state(pipeline_, GST_STATE_PAUSED);

		if (gst_element_get_state(pipeline_, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
		  ROS_FATAL("Failed to PAUSE stream, check your gstreamer configuration.");
		  return false;
		} else {
		  ROS_DEBUG_STREAM("Stream is PAUSED.");
		}
		// Create ROS camera interface
		camera_pub_ = image_transport_.advertiseCamera("camera/image_raw", 1);

		return true;
	}
Beispiel #10
0
static CvCapture_GStreamer * icvCreateCapture_GStreamer(int type, const char *filename)
{
	CvCapture_GStreamer *capture = 0;
	CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

	__BEGIN__;

//	teststreamer(filename);

//	return 0;

	if(!isInited) {
		printf("gst_init\n");
		gst_init (NULL, NULL);

// according to the documentation this is the way to register a plugin now
// unfortunately, it has not propagated into my distribution yet...
// 		gst_plugin_register_static(GST_VERSION_MAJOR, GST_VERSION_MINOR,
// 			"opencv-appsink", "Element application sink",
// 			"0.1", appsink_plugin_init, "LGPL", "highgui", "opencv",
// 			"http://opencvlibrary.sourceforge.net/");

		isInited = true;
	}

	const char *sourcetypes[] = {"dv1394src", "v4lsrc", "v4l2src", "filesrc"};
//	printf("entered capturecreator %s\n", sourcetypes[type]);

	GstElement *source = gst_element_factory_make(sourcetypes[type], NULL);
	if(!source)
		return 0;

	if(type == CV_CAP_GSTREAMER_FILE)
		g_object_set(G_OBJECT(source), "location", filename, NULL);

	GstElement *colour = gst_element_factory_make("ffmpegcolorspace", NULL);

	GstElement *sink = gst_element_factory_make("opencv-appsink", NULL);
	GstCaps *caps = gst_caps_new_simple("video/x-raw-rgb", NULL);
	gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
//	gst_caps_unref(caps);
	gst_base_sink_set_sync(GST_BASE_SINK(sink), false);
//	g_signal_connect(sink, "new-buffer", G_CALLBACK(newbuffer), NULL);

	GstElement *decodebin = gst_element_factory_make("decodebin", NULL);
	g_signal_connect(decodebin, "new-decoded-pad", G_CALLBACK(icvNewPad), colour);

	GstElement *pipeline = gst_pipeline_new (NULL);

	gst_bin_add_many(GST_BIN(pipeline), source, decodebin, colour, sink, NULL);

//	printf("added many\n");

	switch(type) {
	case CV_CAP_GSTREAMER_V4L2: // default to 640x480, 30 fps
		caps = gst_caps_new_simple("video/x-raw-rgb",
					   "width", G_TYPE_INT, 640,
					   "height", G_TYPE_INT, 480,
					   "framerate", GST_TYPE_FRACTION, 30, 1,
					   NULL);
		if(!gst_element_link_filtered(source, decodebin, caps)) {
			CV_ERROR(CV_StsError, "GStreamer: cannot link v4l2src -> decodebin\n");
			gst_object_unref(pipeline);
			return 0;
		}
		gst_caps_unref(caps);
		break;
	case CV_CAP_GSTREAMER_V4L:
	case CV_CAP_GSTREAMER_1394:
	case CV_CAP_GSTREAMER_FILE:
		if(!gst_element_link(source, decodebin)) {
			CV_ERROR(CV_StsError, "GStreamer: cannot link filesrc -> decodebin\n");
			gst_object_unref(pipeline);
			return 0;
		}
		break;
	}

	if(!gst_element_link(colour, sink)) {
		CV_ERROR(CV_StsError, "GStreamer: cannot link colour -> sink\n");
		gst_object_unref(pipeline);
		return 0;
	}

//	printf("linked, pausing\n");

	if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED) ==
	   GST_STATE_CHANGE_FAILURE) {
		CV_WARN("GStreamer: unable to set pipeline to paused\n");
//		icvHandleMessage(capture);
//		cvReleaseCapture((CvCapture **)(void *)&capture);
		gst_object_unref(pipeline);
		return 0;
	}

//	printf("state now paused\n");

	// construct capture struct
	capture = (CvCapture_GStreamer *)cvAlloc(sizeof(CvCapture_GStreamer));
	memset(capture, 0, sizeof(CvCapture_GStreamer));
	capture->type = type;
	capture->pipeline = pipeline;
	capture->source = source;
	capture->decodebin = decodebin;
	capture->colour = colour;
	capture->appsink = sink;

	icvHandleMessage(capture);

	OPENCV_ASSERT(capture,
                      "cvCaptureFromFile_GStreamer( const char * )", "couldn't create capture");

//	GstClock *clock = gst_pipeline_get_clock(GST_PIPELINE(pipeline));
//	printf("clock %s\n", gst_object_get_name(GST_OBJECT(clock)));

	__END__;

	return capture;
}
static void
gst_sphinx_sink_init (GstSphinxSink * sphinxsink, GstSphinxSinkClass * g_class)
{
  GST_BASE_SINK (sphinxsink)->sync = FALSE;
  sphinxsink->adapter = gst_adapter_new ();
}
Beispiel #12
0
bool ofGstUtils::initGrabber(int w, int h) {
    bpp = 3;
    if(!camData.bInited) get_video_devices(camData);

    ofGstVideoFormat * format = selectFormat(w,h);
    if(! format) {
        return false;
    }

    /*	ofLog(OF_LOG_VERBOSE,"ofGstUtils: selected format: " + ofToString(format->width) + "x" + ofToString(format->height) + " " + format->mimetype + " framerate: " + ofToString(format->highest_framerate.numerator) + "/" + ofToString(format->highest_framerate.denominator));*/


    camData.webcam_devices[deviceID].current_format = format;
    bIsCamera = true;
    bHavePixelsChanged 	= false;

    width = w;
    height = h;

    gstData.loop		= g_main_loop_new (NULL, FALSE);


    const char * decodebin = "";
    if(strcmp(format->mimetype,"video/x-raw-yuv")!=0 && strcmp(format->mimetype,"video/x-raw-rgb")!=0)
        decodebin = "decodebin !";

    const char * scale = "";
    if( strcmp(format->mimetype,"video/x-raw-rgb")!=0 ) scale = "ffmpegcolorspace !";
    if( w!=format->width || h!=format->height )	scale = "ffvideoscale method=2 !";

    string format_str_pipeline = string("%s name=video_source device=%s ! ") +
                                 "%s,width=%d,height=%d,framerate=%d/%d ! " +
                                 "%s %s " +
                                 "video/x-raw-rgb, width=%d, height=%d, depth=24 ! appsink name=sink  caps=video/x-raw-rgb";
    gchar* pipeline_string =g_strdup_printf (
                                format_str_pipeline.c_str(),
                                camData.webcam_devices[deviceID].gstreamer_src,
                                camData.webcam_devices[deviceID].video_device,
                                format->mimetype,
                                format->width,
                                format->height,
                                format->highest_framerate.numerator,
                                format->highest_framerate.denominator,
                                decodebin, scale,
                                w,h);

//	ofLog(OF_LOG_NOTICE, "gstreamer pipeline: %s", pipeline_string);

    GError * error = NULL;
    gstPipeline = gst_parse_launch (pipeline_string, &error);

    gstSink = gst_bin_get_by_name(GST_BIN(gstPipeline),"sink");

    gst_base_sink_set_sync(GST_BASE_SINK(gstSink), true);


    if(startPipeline()) {
        play();
        return true;
    } else {
        return false;
    }
}
Beispiel #13
0
static void
gst_nle_source_pad_added_cb (GstElement * element, GstPad * pad,
    GstNleSource * nlesrc)
{
  GstCaps *caps;
  const GstStructure *s;
  const gchar *mime;
  GstElement *appsink = NULL;
  GstPad *sink_pad;
  GstAppSinkCallbacks appsink_cbs;
  GstNleSrcItem *item;

  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);

  caps = gst_pad_get_caps_reffed (pad);
  s = gst_caps_get_structure (caps, 0);
  mime = gst_structure_get_name (s);
  GST_DEBUG_OBJECT (nlesrc, "Found mime type: %s", mime);

  if (g_strrstr (mime, "video") && !nlesrc->video_linked) {
    appsink = gst_element_factory_make ("appsink", NULL);
    memset (&appsink_cbs, 0, sizeof (appsink_cbs));
    appsink_cbs.eos = gst_nle_source_on_video_eos;
    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;
    appsink_cbs.new_buffer = gst_nle_source_on_video_buffer;
    nlesrc->video_linked = TRUE;
    if (!nlesrc->video_srcpad_added) {
      gst_pad_set_active (nlesrc->video_srcpad, TRUE);
      gst_element_add_pad (GST_ELEMENT (nlesrc),
          gst_object_ref (nlesrc->video_srcpad));
      nlesrc->video_srcpad_added = TRUE;
    }
    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),
        (GCallback) gst_nle_source_video_pad_probe_cb, nlesrc);
    nlesrc->video_eos = FALSE;
  } else if (g_strrstr (mime, "audio") && nlesrc->with_audio
      && !nlesrc->audio_linked && (item ? item->rate == 1.0 : TRUE)) {
    appsink = gst_element_factory_make ("appsink", NULL);
    memset (&appsink_cbs, 0, sizeof (appsink_cbs));
    appsink_cbs.eos = gst_nle_source_on_audio_eos;
    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;
    appsink_cbs.new_buffer = gst_nle_source_on_audio_buffer;
    nlesrc->audio_linked = TRUE;
    if (!nlesrc->audio_srcpad_added) {
      gst_pad_set_active (nlesrc->audio_srcpad, TRUE);
      gst_element_add_pad (GST_ELEMENT (nlesrc),
          gst_object_ref (nlesrc->audio_srcpad));
      nlesrc->audio_srcpad_added = TRUE;
    }
    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),
        (GCallback) gst_nle_source_audio_pad_probe_cb, nlesrc);
    nlesrc->audio_eos = FALSE;
  }
  if (appsink != NULL) {
    g_object_set (appsink, "sync", FALSE, NULL);
    gst_app_sink_set_callbacks (GST_APP_SINK (appsink), &appsink_cbs, nlesrc,
        NULL);
    gst_bin_add (GST_BIN (nlesrc->decoder), appsink);
    sink_pad = gst_element_get_static_pad (appsink, "sink");
    gst_pad_link (pad, sink_pad);
    gst_element_sync_state_with_parent (appsink);
    gst_object_unref (sink_pad);
  }
}
Beispiel #14
0
int main(int argc, char** argv) {
	char *config = getenv("GSCAM_CONFIG");
	if (config == NULL) {
		std::cout << "Problem getting GSCAM_CONFIG variable." << std::endl;
		exit(-1);
	}

	gst_init(0,0);
	std::cout << "Gstreamer Version: " << gst_version_string() << std::endl;

	GError *error = 0; //assignment to zero is a gst requirement
	GstElement *pipeline = gst_parse_launch(config,&error);
	if (pipeline == NULL) {
		std::cout << error->message << std::endl;
		exit(-1);
	}
	GstElement * sink = gst_element_factory_make("appsink",NULL);
	GstCaps * caps = gst_caps_new_simple("video/x-raw-rgb", NULL);
	gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
	gst_caps_unref(caps);

	gst_base_sink_set_sync(GST_BASE_SINK(sink), TRUE);

	if(GST_IS_PIPELINE(pipeline)) {
	    GstPad *outpad = gst_bin_find_unlinked_pad(GST_BIN(pipeline), GST_PAD_SRC);
	    g_assert(outpad);
	    GstElement *outelement = gst_pad_get_parent_element(outpad);
	    g_assert(outelement);
	    gst_object_unref(outpad);


	    if(!gst_bin_add(GST_BIN(pipeline), sink)) {
		fprintf(stderr, "gst_bin_add() failed\n"); // TODO: do some unref
		gst_object_unref(outelement);
		gst_object_unref(pipeline);
		return -1;
	    }

	    if(!gst_element_link(outelement, sink)) {
		fprintf(stderr, "GStreamer: cannot link outelement(\"%s\") -> sink\n", gst_element_get_name(outelement));
		gst_object_unref(outelement);
		gst_object_unref(pipeline);
		return -1;
	    }

	    gst_object_unref(outelement);
	} else {
	    GstElement* launchpipe = pipeline;
	    pipeline = gst_pipeline_new(NULL);
	    g_assert(pipeline);

	    gst_object_unparent(GST_OBJECT(launchpipe));

	    gst_bin_add_many(GST_BIN(pipeline), launchpipe, sink, NULL);

	    if(!gst_element_link(launchpipe, sink)) {
		fprintf(stderr, "GStreamer: cannot link launchpipe -> sink\n");
		gst_object_unref(pipeline);
		return -1;
	    }
	}

	gst_element_set_state(pipeline, GST_STATE_PAUSED);

	if (gst_element_get_state(pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
		std::cout << "Failed to PAUSE." << std::endl;
		exit(-1);
	} else {
		std::cout << "stream is PAUSED." << std::endl;
	}

	// We could probably do something with the camera name, check
	// errors or something, but at the moment, we don't care.
	std::string camera_name;
	if (camera_calibration_parsers::readCalibrationIni("../camera_parameters.txt", camera_name, camera_info)) {
	  ROS_INFO("Successfully read camera calibration.  Rerun camera calibrator if it is incorrect.");
	}
	else {
	  ROS_ERROR("No camera_parameters.txt file found.  Use default file if no other is available.");
	}

	ros::init(argc, argv, "gscam_publisher");
	ros::NodeHandle nh;

	int preroll;
	nh.param("brown/gscam/preroll", preroll, 0);
	if (preroll) {
		//The PAUSE, PLAY, PAUSE, PLAY cycle is to ensure proper pre-roll
		//I am told this is needed and am erring on the side of caution.
		gst_element_set_state(pipeline, GST_STATE_PLAYING);

		if (gst_element_get_state(pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
			std::cout << "Failed to PLAY." << std::endl;
			exit(-1);
		} else {
			std::cout << "stream is PLAYING." << std::endl;
		}

		gst_element_set_state(pipeline, GST_STATE_PAUSED);

		if (gst_element_get_state(pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
			std::cout << "Failed to PAUSE." << std::endl;
			exit(-1);
		} else {
			std::cout << "stream is PAUSED." << std::endl;
		}
	}

	image_transport::ImageTransport it(nh);
//	image_transport::CameraPublisher pub = it.advertiseCamera("gscam/image_raw", 1);

//------------------------------------
// Added by jschoi, 2012-08-13
	char topic_name[32];
	if(argc==1)
		sprintf(topic_name, "gscam/image_raw");
	else
		sprintf(topic_name,"%s",argv[1]);	// To get the name of topic from the first arguement
//------------------------------------
	image_transport::CameraPublisher pub = it.advertiseCamera(topic_name, 1);

	ros::ServiceServer set_camera_info = nh.advertiseService("gscam/set_camera_info", setCameraInfo);

	std::cout << "Processing..." << std::endl;

	//processVideo
	rosPad = false;
	gstreamerPad = true;
	gst_element_set_state(pipeline, GST_STATE_PLAYING);
	while(nh.ok()) {
                // This should block until a new frame is awake, this way, we'll run at the 
                // actual capture framerate of the device.
		GstBuffer* buf = gst_app_sink_pull_buffer(GST_APP_SINK(sink));
		if (!buf) break;

		GstPad* pad = gst_element_get_static_pad(sink, "sink");
		const GstCaps *caps = gst_pad_get_negotiated_caps(pad);
		GstStructure *structure = gst_caps_get_structure(caps,0);
		gst_structure_get_int(structure,"width",&width);
		gst_structure_get_int(structure,"height",&height);

		sensor_msgs::Image msg;
		msg.width = width; 
		msg.height = height;
		msg.encoding = "rgb8";
		msg.is_bigendian = false;
		msg.step = width*3;
		msg.data.resize(width*height*3);
		std::copy(buf->data, buf->data+(width*height*3), msg.data.begin());

		pub.publish(msg, camera_info);

                gst_buffer_unref(buf);

		ros::spinOnce();

	}

	//close out
	std::cout << "\nquitting..." << std::endl;
	gst_element_set_state(pipeline, GST_STATE_NULL);
	gst_object_unref(pipeline);

	return 0;
}
Beispiel #15
0
static void
gst_checksum_sink_init (GstChecksumSink * checksumsink,
    GstChecksumSinkClass * checksumsink_class)
{
  gst_base_sink_set_sync (GST_BASE_SINK (checksumsink), FALSE);
}