コード例 #1
0
ファイル: cogl-basic-video-player.c プロジェクト: 3v1n0/cogl
static CoglGstVideoSink *
find_cogl_gst_video_sink (GstElement *element)
{
  GstElement *sink_element = NULL;
  GstIterator *iterator;
  GstElement *iterator_value;
  GValue value;

  if (!GST_IS_BIN (element))
    return NULL;

  iterator = gst_bin_iterate_recurse (GST_BIN (element));

  g_value_init (&value, GST_TYPE_ELEMENT);

  while (gst_iterator_next (iterator, &value) == GST_ITERATOR_OK)
    {
      iterator_value = g_value_get_object (&value);

      g_value_reset (&value);

      if (COGL_GST_IS_VIDEO_SINK (iterator_value))
        {
          sink_element = iterator_value;
          break;
        }
    }

  g_value_unset (&value);

  gst_iterator_free (iterator);

  return COGL_GST_VIDEO_SINK (sink_element);
}
コード例 #2
0
ファイル: rbgst-bin.c プロジェクト: benolee/ruby-gnome2
/*
 * Method: children_recurse()
 *
 * Returns: an array of all Gst::Element objects in the container and child
 * bins.
 */
static VALUE
rb_gst_bin_get_children_recurse(VALUE self)
{
    GstIterator *iter;
    VALUE children;

    iter = gst_bin_iterate_recurse(SELF(self));
    children = _rbgst_collect_elements(iter);

    return children;
}
コード例 #3
0
ファイル: nleobject.c プロジェクト: MathieuDuponchelle/ges2
void
nle_object_seek_all_children (NleObject * object, GstEvent * seek_event)
{
  GstIterator *it = gst_bin_iterate_recurse (GST_BIN (object));

  while (gst_iterator_foreach (it, _send_seek_event,
          seek_event) == GST_ITERATOR_RESYNC)
    gst_iterator_resync (it);

  gst_iterator_free (it);
  gst_event_unref (seek_event);
}
コード例 #4
0
ファイル: gstreamer.c プロジェクト: BorodaZizitopa/gstplay
static GstElement *find_xvimagesink() {
	GstIterator *iterator = gst_bin_iterate_recurse(GST_BIN(pipeline));
	GstElement *xvimagesink = NULL;
	gboolean done = FALSE;
#if GST_CHECK_VERSION(1, 0, 0)
	GValue item = G_VALUE_INIT;
#else
	gpointer item;
#endif
	while (!done) {
		switch (gst_iterator_next(iterator, &item)) {
		case GST_ITERATOR_OK : {
#if GST_CHECK_VERSION(1, 0, 0)
			GstElement *element = g_value_get_object(&item);
#else
			GstElement *element = GST_ELEMENT(item);
#endif
			GstElementClass *klass = GST_ELEMENT_GET_CLASS(element);
#if GST_CHECK_VERSION(1, 0, 0)
			char *s = gst_element_class_get_metadata(klass, "description");
#else
			char *s = klass->details.description;
#endif
			/* This is not the best way to check. */
			if (strstr(s, "A Xv based videosink") != NULL ||
			strstr(GST_OBJECT_NAME(element), "xvimage") != NULL) {
				xvimagesink = element;
				done = TRUE;
			}
#if GST_CHECK_VERSION(1, 0, 0)
			g_value_reset(&item);
#endif
        		break;
		}
		case GST_ITERATOR_RESYNC :
			gst_iterator_resync(iterator);
			xvimagesink = NULL;
			done = FALSE;
			break;
		case GST_ITERATOR_DONE:
		case GST_ITERATOR_ERROR:
		default:
			done = TRUE;
		}
	}
#if GST_CHECK_VERSION(1, 0, 0)
	g_value_unset(&item);
#endif
	gst_iterator_free(iterator);
	return xvimagesink;
}
コード例 #5
0
nsresult GStreamerReader::CheckSupportedFormats()
{
  bool done = false;
  bool unsupported = false;

  GstIterator *it = gst_bin_iterate_recurse(GST_BIN(mPlayBin));
  while (!done) {
    GstElement* element;
    GstIteratorResult res = gst_iterator_next(it, (void **)&element);
    switch(res) {
      case GST_ITERATOR_OK:
      {
        GstElementFactory* factory = gst_element_get_factory(element);
        if (factory) {
          const char* klass = gst_element_factory_get_klass(factory);
          GstPad* pad = gst_element_get_pad(element, "sink");
          if (pad) {
            GstCaps* caps = gst_pad_get_negotiated_caps(pad);

            if (caps) {
              /* check for demuxers but ignore elements like id3demux */
              if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps);
              else if (strstr (klass, "Decoder"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps);

              gst_caps_unref(caps);
            }
            gst_object_unref(pad);
          }
        }

        gst_object_unref(element);
        done = unsupported;
        break;
      }
      case GST_ITERATOR_RESYNC:
        unsupported = false;
        done = false;
        break;
      case GST_ITERATOR_ERROR:
        done = true;
        break;
      case GST_ITERATOR_DONE:
        done = true;
        break;
    }
  }

  return unsupported ? NS_ERROR_FAILURE : NS_OK;
}
コード例 #6
0
ファイル: backgroundsource.c プロジェクト: matasbbb/GES
static GstElement *
find_composition (GESTrack * track)
{
  GstIterator *it = gst_bin_iterate_recurse (GST_BIN (track));
  GValue val = { 0, };
  GstElement *ret = NULL;

  if (gst_iterator_find_custom (it, (GCompareFunc) find_composition_func, &val,
          NULL))
    ret = g_value_get_object (&val);

  g_value_unset (&val);
  gst_iterator_free (it);

  return ret;
}
コード例 #7
0
ファイル: utility.cpp プロジェクト: dv1/nxplay
GstElement *find_element_by_factory_name(GstBin *p_bin, std::string const &p_factory_name)
{
	GstIterator *iter = gst_bin_iterate_recurse(p_bin);
	if (iter == nullptr)
		return nullptr;

	GValue result = G_VALUE_INIT;
	gboolean found = gst_iterator_find_custom(iter, find_by_factory_func, &result, (gpointer)(&p_factory_name));
	gst_iterator_free(iter);

	if (found)
	{
		GstElement *elem = GST_ELEMENT(g_value_dup_object(&result));
		g_value_unset(&result);
		return elem;
	}
	else
		return nullptr;
}
コード例 #8
0
GstElement *
rb_player_gst_find_element_with_property (GstElement *element, const char *property)
{
	GstIterator *iter;
	GstElement *result;

	if (GST_IS_BIN (element) == FALSE) {
		if (g_object_class_find_property (G_OBJECT_GET_CLASS (element),
						  property) != NULL) {
			return g_object_ref (element);
		}
		return NULL;
	}

	rb_debug ("iterating bin looking for property %s", property);
	iter = gst_bin_iterate_recurse (GST_BIN (element));
	result = gst_iterator_find_custom (iter,
					   (GCompareFunc) find_property_element,
					   (gpointer) property);
	gst_iterator_free (iter);
	return result;
}
コード例 #9
0
ファイル: GStreamerReader.cpp プロジェクト: msliu/gecko-dev
nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo,
                                       MetadataTags** aTags)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  nsresult ret = NS_OK;

  /*
   * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there
   * might be concurrent stream operations happening on both decoding and gstreamer
   * threads which will screw the GStreamer state machine.
   */
  bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3);
  if (isMP3) {
    ParseMP3Headers();
  }


  /* We do 3 attempts here: decoding audio and video, decoding video only,
   * decoding audio only. This allows us to play streams that have one broken
   * stream but that are otherwise decodeable.
   */
  guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO,
    static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)};
  guint default_flags, current_flags;
  g_object_get(mPlayBin, "flags", &default_flags, nullptr);

  GstMessage* message = nullptr;
  for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) {
    current_flags = default_flags & flags[i];
    g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr);

    /* reset filter caps to ANY */
    GstCaps* caps = gst_caps_new_any();
    GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);

    filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);
    gst_caps_unref(caps);
    filter = nullptr;

    if (!(current_flags & GST_PLAY_FLAG_AUDIO))
      filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    else if (!(current_flags & GST_PLAY_FLAG_VIDEO))
      filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");

    if (filter) {
      /* Little trick: set the target caps to "skip" so that playbin2 fails to
       * find a decoder for the stream we want to skip.
       */
      GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr);
      g_object_set(filter, "caps", filterCaps, nullptr);
      gst_caps_unref(filterCaps);
      gst_object_unref(filter);
    }

    LOG(PR_LOG_DEBUG, "starting metadata pipeline");
    if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
      LOG(PR_LOG_DEBUG, "metadata pipeline state change failed");
      ret = NS_ERROR_FAILURE;
      continue;
    }

    /* Wait for ASYNC_DONE, which is emitted when the pipeline is built,
     * prerolled and ready to play. Also watch for errors.
     */
    message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
                 (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) {
      LOG(PR_LOG_DEBUG, "read metadata pipeline prerolled");
      gst_message_unref(message);
      ret = NS_OK;
      break;
    } else {
      LOG(PR_LOG_DEBUG, "read metadata pipeline failed to preroll: %s",
            gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

      if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
        GError* error;
        gchar* debug;
        gst_message_parse_error(message, &error, &debug);
        LOG(PR_LOG_ERROR, "read metadata error: %s: %s", error->message, debug);
        g_error_free(error);
        g_free(debug);
      }
      /* Unexpected stream close/EOS or other error. We'll give up if all
       * streams are in error/eos. */
      gst_element_set_state(mPlayBin, GST_STATE_NULL);
      gst_message_unref(message);
      ret = NS_ERROR_FAILURE;
    }
  }

  if (NS_SUCCEEDED(ret))
    ret = CheckSupportedFormats();

  if (NS_FAILED(ret))
    /* we couldn't get this to play */
    return ret;

  /* report the duration */
  gint64 duration;

  if (isMP3 && mMP3FrameParser.IsMP3()) {
    // The MP3FrameParser has reported a duration; use that over the gstreamer
    // reported duration for inter-platform consistency.
    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
    mUseParserDuration = true;
    mLastParserDuration = mMP3FrameParser.GetDuration();
    mDecoder->SetMediaDuration(mLastParserDuration);
  } else {
    LOG(PR_LOG_DEBUG, "querying duration");
    // Otherwise use the gstreamer duration.
#if GST_VERSION_MAJOR >= 1
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
          GST_FORMAT_TIME, &duration)) {
#else
    GstFormat format = GST_FORMAT_TIME;
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
      &format, &duration) && format == GST_FORMAT_TIME) {
#endif
      ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
      LOG(PR_LOG_DEBUG, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
      duration = GST_TIME_AS_USECONDS (duration);
      mDecoder->SetMediaDuration(duration);
    }
  }

  int n_video = 0, n_audio = 0;
  g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr);
  mInfo.mVideo.mHasVideo = n_video != 0;
  mInfo.mAudio.mHasAudio = n_audio != 0;

  *aInfo = mInfo;

  *aTags = nullptr;

  // Watch the pipeline for fatal errors
#if GST_VERSION_MAJOR >= 1
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr);
#else
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this);
#endif

  /* set the pipeline to PLAYING so that it starts decoding and queueing data in
   * the appsinks */
  gst_element_set_state(mPlayBin, GST_STATE_PLAYING);

  return NS_OK;
}

bool
GStreamerReader::IsMediaSeekable()
{
  if (mUseParserDuration) {
    return true;
  }

  gint64 duration;
#if GST_VERSION_MAJOR >= 1
  if (gst_element_query_duration(GST_ELEMENT(mPlayBin), GST_FORMAT_TIME,
                                 &duration)) {
#else
  GstFormat format = GST_FORMAT_TIME;
  if (gst_element_query_duration(GST_ELEMENT(mPlayBin), &format, &duration) &&
      format == GST_FORMAT_TIME) {
#endif
    return true;
  }

  return false;
}

nsresult GStreamerReader::CheckSupportedFormats()
{
  bool done = false;
  bool unsupported = false;

  GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin));
  while (!done) {
    GstIteratorResult res;
    GstElement* element;

#if GST_VERSION_MAJOR >= 1
    GValue value = {0,};
    res = gst_iterator_next(it, &value);
#else
    res = gst_iterator_next(it, (void **) &element);
#endif
    switch(res) {
      case GST_ITERATOR_OK:
      {
#if GST_VERSION_MAJOR >= 1
        element = GST_ELEMENT (g_value_get_object (&value));
#endif
        GstElementFactory* factory = gst_element_get_factory(element);
        if (factory) {
          const char* klass = gst_element_factory_get_klass(factory);
          GstPad* pad = gst_element_get_static_pad(element, "sink");
          if (pad) {
            GstCaps* caps;

#if GST_VERSION_MAJOR >= 1
            caps = gst_pad_get_current_caps(pad);
#else
            caps = gst_pad_get_negotiated_caps(pad);
#endif

            if (caps) {
              /* check for demuxers but ignore elements like id3demux */
              if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps);
              else if (strstr (klass, "Decoder") && !strstr(klass, "Generic"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps);

              gst_caps_unref(caps);
            }
            gst_object_unref(pad);
          }
        }

#if GST_VERSION_MAJOR >= 1
        g_value_unset (&value);
#else
        gst_object_unref(element);
#endif
        done = unsupported;
        break;
      }
      case GST_ITERATOR_RESYNC:
        unsupported = false;
        done = false;
        break;
      case GST_ITERATOR_ERROR:
        done = true;
        break;
      case GST_ITERATOR_DONE:
        done = true;
        break;
    }
  }

  return unsupported ? NS_ERROR_FAILURE : NS_OK;
}

nsresult GStreamerReader::ResetDecode()
{
  nsresult res = NS_OK;

  LOG(PR_LOG_DEBUG, "reset decode");

  if (NS_FAILED(MediaDecoderReader::ResetDecode())) {
    res = NS_ERROR_FAILURE;
  }

  mVideoQueue.Reset();
  mAudioQueue.Reset();

  mVideoSinkBufferCount = 0;
  mAudioSinkBufferCount = 0;
  mReachedAudioEos = false;
  mReachedVideoEos = false;
#if GST_VERSION_MAJOR >= 1
  mConfigureAlignment = true;
#endif

  LOG(PR_LOG_DEBUG, "reset decode done");

  return res;
}

bool GStreamerReader::DecodeAudioData()
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedAudioEos && !mAudioSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mAudioSinkBufferCount) {
      if(!mVideoSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mAudioSinkBufferCount) {
          /* There is still no audio data available, so either there is video data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it.
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mAudioAppSink);
#endif

    mAudioSinkBufferCount--;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mAudioSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  timestamp = GST_TIME_AS_USECONDS(timestamp);

  int64_t offset = GST_BUFFER_OFFSET(buffer);
  guint8* data;
#if GST_VERSION_MAJOR >= 1
  GstMapInfo info;
  gst_buffer_map(buffer, &info, GST_MAP_READ);
  unsigned int size = info.size;
  data = info.data;
#else
  unsigned int size = GST_BUFFER_SIZE(buffer);
  data = GST_BUFFER_DATA(buffer);
#endif
  int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels;

  typedef AudioCompactor::NativeCopy GstCopy;
  mAudioCompactor.Push(offset,
                       timestamp,
                       mInfo.mAudio.mRate,
                       frames,
                       mInfo.mAudio.mChannels,
                       GstCopy(data,
                               size,
                               mInfo.mAudio.mChannels));
#if GST_VERSION_MAJOR >= 1
  gst_buffer_unmap(buffer, &info);
#endif

  gst_buffer_unref(buffer);

  return true;
}

bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
                                       int64_t aTimeThreshold)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedVideoEos && !mVideoSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mVideoSinkBufferCount) {
      if (!mAudioSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mVideoSinkBufferCount) {
          /* There is still no video data available, so either there is audio data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

    mDecoder->NotifyDecodedFrames(0, 1);

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mVideoAppSink);
#endif
    mVideoSinkBufferCount--;
  }

  bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if ((aKeyFrameSkip && !isKeyframe)) {
    gst_buffer_unref(buffer);
    return true;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mVideoSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp),
               "frame has invalid timestamp");

  timestamp = GST_TIME_AS_USECONDS(timestamp);
  int64_t duration = 0;
  if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
    duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
  else if (fpsNum && fpsDen)
    /* add 1-frame duration */
    duration = gst_util_uint64_scale(GST_USECOND, fpsDen, fpsNum);

  if (timestamp < aTimeThreshold) {
    LOG(PR_LOG_DEBUG, "skipping frame %" GST_TIME_FORMAT
                      " threshold %" GST_TIME_FORMAT,
                      GST_TIME_ARGS(timestamp * 1000),
                      GST_TIME_ARGS(aTimeThreshold * 1000));
    gst_buffer_unref(buffer);
    return true;
  }

  if (!buffer)
    /* no more frames */
    return true;

#if GST_VERSION_MAJOR >= 1
  if (mConfigureAlignment && buffer->pool) {
    GstStructure *config = gst_buffer_pool_get_config(buffer->pool);
    GstVideoAlignment align;
    if (gst_buffer_pool_config_get_video_alignment(config, &align))
      gst_video_info_align(&mVideoInfo, &align);
    gst_structure_free(config);
    mConfigureAlignment = false;
  }
#endif

  nsRefPtr<PlanarYCbCrImage> image = GetImageFromBuffer(buffer);
  if (!image) {
    /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to
     * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy.
     */
    GstBuffer* tmp = nullptr;
    CopyIntoImageBuffer(buffer, &tmp, image);
    gst_buffer_unref(buffer);
    buffer = tmp;
  }

  int64_t offset = mDecoder->GetResource()->Tell(); // Estimate location in media.
  VideoData* video = VideoData::CreateFromImage(mInfo.mVideo,
                                                mDecoder->GetImageContainer(),
                                                offset, timestamp, duration,
                                                static_cast<Image*>(image.get()),
                                                isKeyframe, -1, mPicture);
  mVideoQueue.Push(video);

  gst_buffer_unref(buffer);

  return true;
}
static gboolean
hls_test_bus_message (InsanityGstPipelineTest * ptest, GstMessage * msg)
{
  switch (GST_MESSAGE_TYPE (msg)) {
    case GST_MESSAGE_BUFFERING:
    {
      gint per;

      gst_message_parse_buffering (msg, &per);

      /* First buffering happend properly, this is requiered to be able to
       * start seeking */
      if (G_UNLIKELY (glob_buffered == FALSE)) {
        if (per == 100) {
          insanity_test_validate_checklist_item (INSANITY_TEST (ptest),
              "done-buffering", TRUE, NULL);
          glob_buffered = TRUE;

          if (glob_buffering_timeout != 0) {
            g_source_remove (glob_buffering_timeout);
            glob_buffering_timeout = 0;
          }
        } else {
          glob_buffering_timeout = g_timeout_add (250,
              (GSourceFunc) buffering_timeout, INSANITY_TEST (ptest));
        }
      }

      break;
    }
    case GST_MESSAGE_STATE_CHANGED:
      if (GST_MESSAGE_SRC (msg) == GST_OBJECT (glob_pipeline)) {
        const char *validate_checklist_item = glob_validate_on_playing;
        GstState oldstate, newstate, pending;

        gst_message_parse_state_changed (msg, &oldstate, &newstate, &pending);
        if (newstate == GST_STATE_PAUSED && oldstate == GST_STATE_READY) {
          GstIterator *it;
          GValue v = { 0, };
          gboolean queried;
          InsanityTest *test = INSANITY_TEST (ptest);
          GstQuery *query = gst_query_new_latency ();
          const gchar *step_message = "Could not query seeking\n";

          if ((queried = gst_element_query (glob_pipeline, query))) {
            gst_query_parse_latency (query, &glob_is_live, NULL, NULL);
            step_message = NULL;
          } else
            insanity_test_printf (test, "Could not query\n");

          insanity_gst_pipeline_test_set_live (ptest, glob_is_live);
          insanity_test_validate_checklist_item (test, "queried-live", queried,
              step_message);
          gst_query_unref (query);

          step_message = "Could not query seekable\n";
          query = gst_query_new_seeking (GST_FORMAT_TIME);
          if ((queried = gst_element_query (glob_pipeline, query))) {
            gst_query_parse_seeking (query, NULL, &glob_is_seekable, NULL,
                NULL);
            step_message = NULL;
          } else
            insanity_test_printf (test, "Could not query\n");

          insanity_test_validate_checklist_item (test, "queried-seekable",
              queried, step_message);
          gst_query_unref (query);

          /* Iterate over the bins to find a hlsdemux */
          it = gst_bin_iterate_recurse (GST_BIN (glob_pipeline));
          if (gst_iterator_find_custom (it, (GCompareFunc) find_hlsdemux, &v,
                  NULL)) {
            glob_hlsdemux = g_value_dup_object (&v);
          }
          g_value_unset (&v);
          gst_iterator_free (it);

          if (glob_hlsdemux != NULL) {
            insanity_test_validate_checklist_item (test, "protocol-is-hls",
                TRUE, "HLS protocol in use");

            gst_object_unref (glob_hlsdemux);
          } else {
            insanity_test_validate_checklist_item (test, "protocol-is-hls",
                FALSE, "HLS protocol in use");
            insanity_test_done (test);
          }

          /* Watch pipeline only if seekable */
          if (glob_is_seekable)
            watch_pipeline (ptest);

        } else if (newstate == GST_STATE_PLAYING
            && pending == GST_STATE_VOID_PENDING && validate_checklist_item) {
          glob_validate_on_playing = NULL;
          insanity_test_validate_checklist_item (INSANITY_TEST (ptest),
              validate_checklist_item, TRUE, NULL);
          /* let it run a couple seconds */
          glob_wait_time = hls_test_get_wait_time (INSANITY_TEST (ptest));
          glob_timer_id =
              g_timeout_add (250, (GSourceFunc) & wait_and_end_step,
              INSANITY_TEST (ptest));
        }
      }
      break;
    case GST_MESSAGE_EOS:
      return FALSE;
    default:
      break;
  }

  return TRUE;

}
コード例 #11
0
/* This function looks for an input-selector, and, if one is found,
   cycle through its sink pads */
static gboolean
check_track_selection (GstValidateMediaInfo * mi, gchar ** error_message)
{
  GstElement *playbin;
  GstElement *videosink, *audiosink;
  GstElement *input_selector = NULL;
  GstBus *bus;
  GstMessage *msg;
  gboolean ret = TRUE;
  GstStateChangeReturn state_ret;
  GstIterator *iterator;
  GstPad *original_pad;
  static const GstClockTime switch_delay = GST_SECOND * 5;
  GValue value = { 0, };

  playbin = gst_element_factory_make ("playbin", "fc-playbin");
  videosink = gst_element_factory_make ("fakesink", "fc-videosink");
  audiosink = gst_element_factory_make ("fakesink", "fc-audiosink");

  if (!playbin || !videosink || !audiosink) {
    *error_message = g_strdup ("Playbin and/or fakesink not available");
  }

  g_object_set (playbin, "video-sink", videosink, "audio-sink", audiosink,
      "uri", mi->uri, NULL);
  g_object_set (videosink, "sync", TRUE, NULL);
  g_object_set (audiosink, "sync", TRUE, NULL);

  bus = gst_pipeline_get_bus (GST_PIPELINE (playbin));

  state_ret = gst_element_set_state (playbin, GST_STATE_PAUSED);
  if (state_ret == GST_STATE_CHANGE_FAILURE) {
    *error_message = g_strdup ("Failed to change pipeline to paused");
    ret = FALSE;
    goto end;
  } else if (state_ret == GST_STATE_CHANGE_ASYNC) {
    msg =
        gst_bus_timed_pop_filtered (bus, GST_CLOCK_TIME_NONE,
        GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_EOS | GST_MESSAGE_ERROR);
    if (msg && GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ASYNC_DONE) {
      gst_message_unref (msg);
    } else {
      ret = FALSE;
      *error_message = g_strdup ("Playback finihshed unexpectedly");
      goto end;
    }
  }

  if (gst_element_set_state (playbin,
          GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
    *error_message = g_strdup ("Failed to set pipeline to playing");
    ret = FALSE;
    goto end;
  }

  iterator = gst_bin_iterate_recurse (GST_BIN (playbin));
  if (!gst_iterator_find_custom (iterator,
          (GCompareFunc) find_input_selector, &value, NULL)) {
    /* It's fine, there's only one if several tracks of the same type */
    gst_iterator_free (iterator);
    input_selector = NULL;
    goto end;
  }
  input_selector = g_value_dup_object (&value);
  g_value_reset (&value);
  gst_iterator_free (iterator);
  g_object_get (input_selector, "active-pad", &original_pad, NULL);
  if (!original_pad) {
    /* Unexpected, log an error somehow ? */
    ret = FALSE;
    gst_object_unref (input_selector);
    input_selector = NULL;
    goto end;
  }

  /* Attach a buffer counter to each pad */
  setup_input_selector_counters (input_selector);

  while (1) {
    msg =
        gst_bus_timed_pop_filtered (bus, switch_delay,
        GST_MESSAGE_ERROR | GST_MESSAGE_EOS);
    if (msg) {
      if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_EOS) {
        /* all good */
        ret = TRUE;
      } else if (GST_MESSAGE_TYPE (msg) == GST_MESSAGE_ERROR) {
        GError *error = NULL;
        gchar *debug = NULL;

        gst_message_parse_error (msg, &error, &debug);
        *error_message = g_strdup_printf ("Playback error: %s : %s",
            error->message, debug);
        g_error_free (error);
        g_free (debug);

        ret = FALSE;
      } else {
        g_assert_not_reached ();
      }
      gst_message_unref (msg);
    } else {
      /* Timeout, switch track if we have more, or stop */
      GstPad *active_pad, *next_pad;

      g_object_get (input_selector, "active-pad", &active_pad, NULL);
      if (!active_pad) {
        *error_message =
            g_strdup ("Failed to get active-pad from input-selector");
        ret = FALSE;
        goto end;
      }
      next_pad = find_next_pad (input_selector, active_pad);
      gst_object_unref (active_pad);
      if (!next_pad) {
        ret = FALSE;
        goto end;
      }
      if (next_pad == original_pad) {
        goto end;
      }
      g_object_set (input_selector, "active-pad", next_pad, NULL);
      gst_object_unref (next_pad);
    }
  }

end:
  if (input_selector) {
    if (!check_and_remove_input_selector_counters (input_selector,
            error_message))
      ret = FALSE;
    gst_object_unref (input_selector);
  }
  gst_object_unref (bus);
  gst_element_set_state (playbin, GST_STATE_NULL);
  gst_object_unref (playbin);

  return ret;
}
コード例 #12
0
/*!
 * \brief CvVideoWriter_GStreamer::open
 * \param filename filename to output to
 * \param fourcc desired codec fourcc
 * \param fps desired framerate
 * \param frameSize the size of the expected frames
 * \param is_color color or grayscale
 * \return success
 *
 * We support 2 modes of operation. Either the user enters a filename and a fourcc
 * code, or enters a manual pipeline description like in CvVideoCapture_Gstreamer.
 * In the latter case, we just push frames on the appsink with appropriate caps.
 * In the former case, we try to deduce the correct container from the filename,
 * and the correct encoder from the fourcc profile.
 *
 * If the file extension did was not recognize, an avi container is used
 *
 */
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
                                    double fps, CvSize frameSize, bool is_color )
{
    CV_FUNCNAME("CvVideoWriter_GStreamer::open");

    // check arguments
    assert (filename);
    assert (fps > 0);
    assert (frameSize.width > 0  &&  frameSize.height > 0);

    // init gstreamer
    gst_initializer::init();

    // init vars
    bool manualpipeline = true;
    int  bufsize = 0;
    GError *err = NULL;
    const char* mime = NULL;
    GstStateChangeReturn stateret;

    GstCaps* caps = NULL;
    GstCaps* videocaps = NULL;

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
    GstCaps* containercaps = NULL;
    GstEncodingContainerProfile* containerprofile = NULL;
    GstEncodingVideoProfile* videoprofile = NULL;
#endif

    GstIterator* it = NULL;
    gboolean done = FALSE;
    GstElement *element = NULL;
    gchar* name = NULL;
    GstElement* splitter = NULL;
    GstElement* combiner = NULL;

    // we first try to construct a pipeline from the given string.
    // if that fails, we assume it is an ordinary filename

    __BEGIN__;

    encodebin = gst_parse_launch(filename, &err);
    manualpipeline = (encodebin != NULL);

    if(manualpipeline)
    {
#if GST_VERSION_MAJOR == 0
        it = gst_bin_iterate_sources(GST_BIN(encodebin));
        if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#else
        it = gst_bin_iterate_sources (GST_BIN(encodebin));
        GValue value = G_VALUE_INIT;

        while (!done) {
          switch (gst_iterator_next (it, &value)) {
            case GST_ITERATOR_OK:
              element = GST_ELEMENT (g_value_get_object (&value));
              name = gst_element_get_name(element);
              if (name){
                if(strstr(name, "opencvsrc") != NULL || strstr(name, "appsrc") != NULL) {
                  source = GST_ELEMENT ( gst_object_ref (element) );
                  done = TRUE;
                }
                g_free(name);
              }
              g_value_unset (&value);

              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);

        if (!source){
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsrc in manual pipeline\n");
            return false;
        }
#endif
        pipeline = encodebin;
    }
    else
    {
        pipeline = gst_pipeline_new (NULL);

        // we just got a filename and a fourcc code.
        // first, try to guess the container from the filename
        //encodebin = gst_element_factory_make("encodebin", NULL);

        //proxy old non existing fourcc ids. These were used in previous opencv versions,
        //but do not even exist in gstreamer any more
        if (fourcc == CV_FOURCC('M','P','1','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'1');
        if (fourcc == CV_FOURCC('M','P','2','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'2');
        if (fourcc == CV_FOURCC('D','R','A','C')) fourcc = CV_FOURCC('d', 'r', 'a' ,'c');


        //create encoder caps from fourcc

        videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL);
        if (!videocaps){
            CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this codec.");
        }

        //create container caps from file extension
        mime = filenameToMimetype(filename);
        if (!mime) {
            CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this file type.");
        }

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
        containercaps = gst_caps_from_string(mime);

        //create encodebin profile
        containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL);
        videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1);
        gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile);
#endif

        //create pipeline elements
        encodebin = gst_element_factory_make("encodebin", NULL);

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
        g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL);
#endif
        source = gst_element_factory_make("appsrc", NULL);
        file = gst_element_factory_make("filesink", NULL);
        g_object_set(G_OBJECT(file), "location", filename, NULL);
    }

    if (is_color)
    {
        input_pix_fmt = GST_VIDEO_FORMAT_BGR;
        bufsize = frameSize.width * frameSize.height * 3;

#if GST_VERSION_MAJOR == 0
        caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
                                         frameSize.width,
                                         frameSize.height,
                                         int(fps), 1,
                                         1, 1);
#else
        caps = gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "BGR",
                                   "width", G_TYPE_INT, frameSize.width,
                                   "height", G_TYPE_INT, frameSize.height,
                                   "framerate", GST_TYPE_FRACTION, int(fps), 1,
                                   NULL);
        caps = gst_caps_fixate(caps);

#endif

    }
    else
    {
#if FULL_GST_VERSION >= VERSION_NUM(0,10,29)
        input_pix_fmt = GST_VIDEO_FORMAT_GRAY8;
        bufsize = frameSize.width * frameSize.height;

#if GST_VERSION_MAJOR == 0
        caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8,
                                         frameSize.width,
                                         frameSize.height,
                                         int(fps), 1,
                                         1, 1);
#else
        caps = gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "GRAY8",
                                   "width", G_TYPE_INT, frameSize.width,
                                   "height", G_TYPE_INT, frameSize.height,
                                   "framerate", GST_TYPE_FRACTION, int(fps), 1,
                                   NULL);
        caps = gst_caps_fixate(caps);
#endif
#else
        CV_Assert(!"Gstreamer 0.10.29 or newer is required for grayscale input");
#endif
    }

    gst_app_src_set_caps(GST_APP_SRC(source), caps);
    gst_app_src_set_stream_type(GST_APP_SRC(source), GST_APP_STREAM_TYPE_STREAM);
    gst_app_src_set_size (GST_APP_SRC(source), -1);

    g_object_set(G_OBJECT(source), "format", GST_FORMAT_TIME, NULL);
    g_object_set(G_OBJECT(source), "block", 1, NULL);
    g_object_set(G_OBJECT(source), "is-live", 0, NULL);


    if(!manualpipeline)
    {
        g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL);
        gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL);
        if(!gst_element_link_many(source, encodebin, file, NULL)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
        }
    }

#if GST_VERSION_MAJOR == 0
    // HACK: remove streamsplitter and streamcombiner from
    // encodebin pipeline to prevent early EOF event handling
    // We always fetch BGR or gray-scale frames, so combiner->spliter
    // endge in graph is useless.
    it = gst_bin_iterate_recurse (GST_BIN(encodebin));
    while (!done) {
      switch (gst_iterator_next (it, (void**)&element)) {
        case GST_ITERATOR_OK:
          name = gst_element_get_name(element);
          if (strstr(name, "streamsplitter"))
            splitter = element;
          else if (strstr(name, "streamcombiner"))
            combiner = element;
          break;
        case GST_ITERATOR_RESYNC:
          gst_iterator_resync (it);
          break;
        case GST_ITERATOR_ERROR:
          done = true;
          break;
        case GST_ITERATOR_DONE:
          done = true;
          break;
      }
    }

    gst_iterator_free (it);

    if (splitter && combiner)
    {
        gst_element_unlink(splitter, combiner);

        GstPad* src  = gst_element_get_pad(combiner, "src");
        GstPad* sink = gst_element_get_pad(combiner, "encodingsink");

        GstPad* srcPeer = gst_pad_get_peer(src);
        GstPad* sinkPeer = gst_pad_get_peer(sink);

        gst_pad_unlink(sinkPeer, sink);
        gst_pad_unlink(src, srcPeer);

        gst_pad_link(sinkPeer, srcPeer);

        src = gst_element_get_pad(splitter, "encodingsrc");
        sink = gst_element_get_pad(splitter, "sink");

        srcPeer = gst_pad_get_peer(src);
        sinkPeer = gst_pad_get_peer(sink);

        gst_pad_unlink(sinkPeer, sink);
        gst_pad_unlink(src, srcPeer);

        gst_pad_link(sinkPeer, srcPeer);
    }
#endif

    stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    if(stateret  == GST_STATE_CHANGE_FAILURE) {
        handleMessage(pipeline);
        CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
    }

    framerate = fps;
    num_frames = 0;

    handleMessage(pipeline);

    __END__;

    return true;
}
コード例 #13
0
GstBusSyncReply Gst_bus_call(GstBus * bus, GstMessage *msg, gpointer user_data)
{
	gchar * sourceName;
	
	// source
	GstObject * source;
	source = GST_MESSAGE_SRC(msg);
	
	if (!GST_IS_OBJECT(source))
		return GST_BUS_DROP;
	
	sourceName = gst_object_get_name(source);

	switch (GST_MESSAGE_TYPE(msg)) 
	{
		case GST_MESSAGE_EOS: 
		{
			g_message("End-of-stream");
			end_eof = 1;
			break;
		}
		
		case GST_MESSAGE_ERROR: 
		{
			gchar * debug;
			GError *err;
			gst_message_parse_error(msg, &err, &debug);
			g_free (debug);
			lt_info_c( "%s:%s - GST_MESSAGE_ERROR: %s (%i) from %s\n", FILENAME, __FUNCTION__, err->message, err->code, sourceName );
			if ( err->domain == GST_STREAM_ERROR )
			{
				if ( err->code == GST_STREAM_ERROR_CODEC_NOT_FOUND )
				{
					if ( g_strrstr(sourceName, "videosink") )
						lt_info_c( "%s:%s - GST_MESSAGE_ERROR: videosink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
					else if ( g_strrstr(sourceName, "audiosink") )
						lt_info_c( "%s:%s - GST_MESSAGE_ERROR: audioSink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
				}
			}
			g_error_free(err);

			end_eof = 1; 		// NOTE: just to exit
			
			break;
		}
		
		case GST_MESSAGE_INFO:
		{
			gchar *debug;
			GError *inf;
	
			gst_message_parse_info (msg, &inf, &debug);
			g_free (debug);
			if ( inf->domain == GST_STREAM_ERROR && inf->code == GST_STREAM_ERROR_DECODE )
			{
				if ( g_strrstr(sourceName, "videosink") )
					lt_info_c( "%s:%s - GST_MESSAGE_INFO: videosink\n", FILENAME, __FUNCTION__ ); //FIXME: how shall playback handle this event???
			}
			g_error_free(inf);
			break;
		}
		
		case GST_MESSAGE_TAG:
		{
			GstTagList *tags, *result;
			gst_message_parse_tag(msg, &tags);
	
			result = gst_tag_list_merge(m_stream_tags, tags, GST_TAG_MERGE_REPLACE);
			if (result)
			{
				if (m_stream_tags)
					gst_tag_list_free(m_stream_tags);
				m_stream_tags = result;
			}
	
			const GValue *gv_image = gst_tag_list_get_value_index(tags, GST_TAG_IMAGE, 0);
			if ( gv_image )
			{
				GstBuffer *buf_image;
				buf_image = gst_value_get_buffer (gv_image);
				int fd = open("/tmp/.id3coverart", O_CREAT|O_WRONLY|O_TRUNC, 0644);
				if(fd >= 0)
				{
					int ret = write(fd, GST_BUFFER_DATA(buf_image), GST_BUFFER_SIZE(buf_image));
					close(fd);
					lt_info_c( "%s:%s - GST_MESSAGE_INFO: cPlayback::state /tmp/.id3coverart %d bytes written\n", FILENAME, __FUNCTION__ , ret);
				}
				//FIXME: how shall playback handle this event???
			}
			gst_tag_list_free(tags);
			lt_info_c( "%s:%s - GST_MESSAGE_INFO: update info tags\n", FILENAME, __FUNCTION__);  //FIXME: how shall playback handle this event???
			break;
		}
		
		case GST_MESSAGE_STATE_CHANGED:
		{
			if(GST_MESSAGE_SRC(msg) != GST_OBJECT(m_gst_playbin))
				break;

			GstState old_state, new_state;
			gst_message_parse_state_changed(msg, &old_state, &new_state, NULL);
			
			if(old_state == new_state)
				break;
			lt_info_c( "%s:%s - GST_MESSAGE_STATE_CHANGED: state transition %s -> %s\n", FILENAME, __FUNCTION__, gst_element_state_get_name(old_state), gst_element_state_get_name(new_state));
		
			GstStateChange transition = (GstStateChange)GST_STATE_TRANSITION(old_state, new_state);
		
			switch(transition)
			{
				case GST_STATE_CHANGE_NULL_TO_READY:
				{
				}	break;
				case GST_STATE_CHANGE_READY_TO_PAUSED:
				{
					GstIterator *children;
					if (audioSink)
					{
						gst_object_unref(GST_OBJECT(audioSink));
						audioSink = NULL;
					}
					
					if (videoSink)
					{
						gst_object_unref(GST_OBJECT(videoSink));
						videoSink = NULL;
					}
					children = gst_bin_iterate_recurse(GST_BIN(m_gst_playbin));
					audioSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBAudioSink"));
					videoSink = GST_ELEMENT_CAST(gst_iterator_find_custom(children, (GCompareFunc)match_sinktype, (gpointer)"GstDVBVideoSink"));
					gst_iterator_free(children);
					
				}	break;
				case GST_STATE_CHANGE_PAUSED_TO_PLAYING:
				{
				}	break;
				case GST_STATE_CHANGE_PLAYING_TO_PAUSED:
				{
				}	break;
				case GST_STATE_CHANGE_PAUSED_TO_READY:
				{
					if (audioSink)
					{
						gst_object_unref(GST_OBJECT(audioSink));
						audioSink = NULL;
					}
					if (videoSink)
					{
						gst_object_unref(GST_OBJECT(videoSink));
						videoSink = NULL;
					}
				}	break;
				case GST_STATE_CHANGE_READY_TO_NULL:
				{
				}	break;
			}
			break;
		}
#if 0
		case GST_MESSAGE_ELEMENT:
		{
			if(gst_structure_has_name(gst_message_get_structure(msg), "prepare-xwindow-id")) 
			{
				// set window id
				gst_x_overlay_set_xwindow_id(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)), glfb->getWindowID());
				
				// reshape window
				gst_x_overlay_set_render_rectangle(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)), 0, 0, glfb->getOSDWidth(), glfb->getOSDHeight());
				
				// sync frames
				gst_x_overlay_expose(GST_X_OVERLAY(GST_MESSAGE_SRC (msg)));
			}
		}
#endif
		break;
		default:
			break;
	}

	return GST_BUS_DROP;
}