int AudioTestSource_i::serviceFunction()
{
    if (bus) {
        GstMessage* message = gst_bus_timed_pop_filtered(bus, GST_MSECOND, static_cast<GstMessageType>(GST_MESSAGE_EOS | GST_MESSAGE_ERROR));
        if (message == 0) {
            return NOOP;
        }

        switch(GST_MESSAGE_TYPE(message)){

           case GST_MESSAGE_ERROR:{
               gchar *debug;
               GError *err;

               gst_message_parse_error(message, &err, &debug);
               LOG_ERROR(AudioTestSource_i, "Gstreamer Error: " << err->message);
               g_error_free(err);
               g_free(debug);
           }
           break;

           case GST_MESSAGE_EOS:
               LOG_DEBUG(AudioTestSource_i, "End of stream");
               break;

           default:
            LOG_INFO(AudioTestSource_i, "Received gstreamer message: " <<gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
               break;
           }
    }

    return NOOP;
}
/* 
 * run_pipeline:
 * @pipe: the pipeline to run
 * @desc: the description for use in messages
 * @events: is a mask of expected events
 * @tevent: is the expected terminal event.
 *
 * the poll call will time out after half a second.
 */
static void
run_pipeline (GstElement * pipe, const gchar * descr,
    GstMessageType events, GstMessageType tevent, GstState target_state)
{
  GstBus *bus;
  GstMessage *message;
  GstMessageType revent;
  GstStateChangeReturn ret;

  g_assert (pipe);
  bus = gst_element_get_bus (pipe);
  g_assert (bus);

  fail_if (gst_element_set_state (pipe, target_state) ==
      GST_STATE_CHANGE_FAILURE, "Could not set pipeline %s to playing", descr);
  ret = gst_element_get_state (pipe, NULL, NULL, 10 * GST_SECOND);
  if (ret == GST_STATE_CHANGE_ASYNC) {
    g_critical ("Pipeline '%s' failed to go to PAUSED fast enough", descr);
    goto done;
  } else if ((ret != GST_STATE_CHANGE_SUCCESS)
      && (ret != GST_STATE_CHANGE_NO_PREROLL)) {
    g_critical ("Pipeline '%s' failed to go into PAUSED state (%s)", descr,
        gst_element_state_change_return_get_name (ret));
    goto done;
  }

  while (1) {
    message = gst_bus_poll (bus, GST_MESSAGE_ANY, GST_SECOND / 2);

    /* always have to pop the message before getting back into poll */
    if (message) {
      revent = GST_MESSAGE_TYPE (message);
      gst_message_unref (message);
    } else {
      revent = GST_MESSAGE_UNKNOWN;
    }

    if (revent == tevent) {
      break;
    } else if (revent == GST_MESSAGE_UNKNOWN) {
      g_critical ("Unexpected timeout in gst_bus_poll, looking for %d: %s",
          tevent, descr);
      break;
    } else if (revent & events) {
      continue;
    }
    g_critical
        ("Unexpected message received of type %d, '%s', looking for %d: %s",
        revent, gst_message_type_get_name (revent), tevent, descr);
  }

done:
  fail_if (gst_element_set_state (pipe, GST_STATE_NULL) ==
      GST_STATE_CHANGE_FAILURE, "Could not set pipeline %s to NULL", descr);
  gst_element_get_state (pipe, NULL, NULL, GST_CLOCK_TIME_NONE);
  gst_object_unref (pipe);

  gst_bus_set_flushing (bus, TRUE);
  gst_object_unref (bus);
}
Exemple #3
0
static gboolean app_bus_callback(GstBus *bus, GstMessage *message, gpointer data)
{
    app_data_t     *app = data;
    GMainLoop      *loop = app->loop;

    switch (GST_MESSAGE_TYPE(message))
    {
    case GST_MESSAGE_ERROR:
    {
        GError     *err;
        gchar      *debug;

        /* ...dump error-message reported by the GStreamer */
        gst_message_parse_error (message, &err, &debug);
        TRACE(ERROR, _b("execution failed: %s"), err->message);
        g_error_free(err);
        g_free(debug);

        /* ...right now this is a fatal error */
        BUG(1, _x("breakpoint"));

        /* ...and terminate the loop */
        g_main_loop_quit(loop);
        break;
    }

    case GST_MESSAGE_EOS:
    {
        /* ...end-of-stream encountered; break the loop */
        TRACE(INFO, _b("execution completed"));
        g_main_loop_quit(loop);
        break;
    }

    case GST_MESSAGE_STATE_CHANGED:
    {
        /* ...state has changed; test if it is start or stop */
        if (GST_MESSAGE_SRC(message) == GST_OBJECT_CAST(app->pipe))
        {
            GstState        old, new, pending;
        
            /* ...parse state message */
            gst_message_parse_state_changed(message, &old, &new, &pending);

            TRACE(INFO, _b("transition from %d to %d"), old, new);
        }

        break;
    }
    
    default:
        /* ...ignore message */
        TRACE(0, _b("ignore message: %s"), gst_message_type_get_name(GST_MESSAGE_TYPE(message)));
    }
/*
 * run_pipeline:
 * @pipe: the pipeline to run
 * @desc: the description for use in messages
 * @message_types: is a mask of expected message_types
 * @tmessage: is the expected terminal message
 *
 * the poll call will time out after half a second.
 */
static void
run_pipeline (GstElement * pipeline, const gchar * descr,
    GstMessageType message_types, GstMessageType tmessage)
{
  GstBus *bus;
  GstMessageType rmessage;
  GstStateChangeReturn ret;

  fail_if (pipeline == NULL);
  bus = gst_element_get_bus (pipeline);
  fail_if (bus == NULL);

  GST_DEBUG ("running pipeline %s", descr);

  ret = gst_element_set_state (pipeline, GST_STATE_PLAYING);
  ret = gst_element_get_state (pipeline, NULL, NULL, GST_CLOCK_TIME_NONE);

  if (ret != GST_STATE_CHANGE_SUCCESS) {
    GST_WARNING ("have failed state change %d", ret);
    g_critical ("Couldn't set pipeline to PLAYING");
    goto done;
  }

  while (1) {
    GstMessage *message = gst_bus_poll (bus, GST_MESSAGE_ANY, GST_SECOND / 2);

    if (message) {
      rmessage = GST_MESSAGE_TYPE (message);
      gst_message_unref (message);
    } else {
      rmessage = GST_MESSAGE_UNKNOWN;
    }

    if (rmessage == tmessage) {
      break;
    } else if (rmessage == GST_MESSAGE_UNKNOWN) {
      g_critical ("Unexpected timeout in gst_bus_poll, looking for %d: %s",
          tmessage, descr);
      break;
    } else if (rmessage & message_types) {
      continue;
    }
    g_critical
        ("Unexpected message received of type %d, '%s', looking for %d: %s",
        rmessage, gst_message_type_get_name (rmessage), tmessage, descr);
  }

done:
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  gst_object_unref (bus);
}
/*
 * run_pipeline:
 * @pipe: the pipeline to run
 * @desc: the description for use in messages
 * @events: is a mask of expected events
 * @tevent: is the expected terminal event.
 *
 * the poll call will time out after half a second.
 */
static void
run_pipeline (GstElement * pipe, const gchar * descr,
    GstMessageType events, GstMessageType tevent)
{
  GstBus *bus;
  GstMessage *message;
  GstMessageType revent;
  GstStateChangeReturn ret;

  g_assert (pipe);
  bus = gst_element_get_bus (pipe);
  g_assert (bus);

  ret = gst_element_set_state (pipe, GST_STATE_PLAYING);
  ret = gst_element_get_state (pipe, NULL, NULL, GST_CLOCK_TIME_NONE);
  if (ret != GST_STATE_CHANGE_SUCCESS) {
    g_critical ("Couldn't set pipeline to PLAYING");
    goto done;
  }

  while (1) {
    message = gst_bus_poll (bus, GST_MESSAGE_ANY, GST_SECOND / 2);

    /* always have to pop the message before getting back into poll */
    if (message) {
      revent = GST_MESSAGE_TYPE (message);
      gst_message_unref (message);
    } else {
      revent = GST_MESSAGE_UNKNOWN;
    }

    if (revent == tevent) {
      break;
    } else if (revent == GST_MESSAGE_UNKNOWN) {
      g_critical ("Unexpected timeout in gst_bus_poll, looking for %d: %s",
          tevent, descr);
      break;
    } else if (revent & events) {
      continue;
    }
    g_critical
        ("Unexpected message received of type %d, '%s', looking for %d: %s",
        revent, gst_message_type_get_name (revent), tevent, descr);
  }

done:
  gst_element_set_state (pipe, GST_STATE_NULL);
  gst_object_unref (pipe);
}
static gboolean
bus_message (GstBus * bus, GstMessage * message, App * app)
{
  GST_DEBUG ("got message %s \n",
      gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_EOS:
      g_main_loop_quit (app->loop);
      break;
    default:
      break;
  }
  return TRUE;
}
Exemple #7
0
static void
test_launch_bt_dec (BT_TEST_ARGS)
{
  BT_TEST_START;
  GST_INFO ("-- arrange --");
  guint pix = _i >> 1;
  guint fix = _i & 1;
  gchar *str = g_strdup_printf (bt_dec_pipelines[pix],
      check_get_test_song_path (bt_dec_files[fix]));
  GstElement *pipeline = gst_parse_launch (str, NULL);
  GstMessageType message_types =
      GST_MESSAGE_NEW_CLOCK | GST_MESSAGE_STATE_CHANGED |
      GST_MESSAGE_STREAM_STATUS | GST_MESSAGE_ASYNC_DONE |
      GST_MESSAGE_STREAM_START | GST_MESSAGE_TAG;
  GstMessageType tmessage = GST_MESSAGE_EOS;

  GST_INFO ("-- act --");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  GstStateChangeReturn ret = gst_element_get_state (pipeline, NULL, NULL,
      GST_CLOCK_TIME_NONE);

  GST_INFO ("-- assert --");
  fail_unless (ret == GST_STATE_CHANGE_SUCCESS,
      "Couldn't set pipeline to PLAYING: %s",
      gst_element_state_change_return_get_name (ret));

  GstBus *bus = gst_element_get_bus (pipeline);
  while (1) {
    GstMessageType rmessage = get_message_type (bus);
    if (rmessage == tmessage) {
      break;
    } else if (rmessage == GST_MESSAGE_UNKNOWN) {
      fail ("Unexpected timeout in gst_bus_poll, looking for %d", tmessage);
      break;
    } else if (rmessage & message_types) {
      continue;
    }
    fail ("Unexpected message received of type %d, '%s', looking for %d",
        rmessage, gst_message_type_get_name (rmessage), tmessage);
  }

  GST_INFO ("-- cleanup --");
  gst_object_unref (bus);
  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);
  g_free (str);
  BT_TEST_END;
}
gboolean bus_callback(GstBus* sender, GstMessage* message, void* data)
{
  gPlay* gplay = reinterpret_cast<gPlay*> (data);
  
  switch (GST_MESSAGE_TYPE (message)) {
    case GST_MESSAGE_STATE_CHANGED:
    {
      GstState newState;
      gst_message_parse_state_changed(message, NULL, &newState, NULL);
      
      std::string message_name(GST_MESSAGE_SRC_NAME(message));//TODO: Avoid this copy using glib
      
      if (message_name.compare("playbin") == 0){
	gplay->on_state_changed(newState);
      }
    }
      break;
      
    case GST_MESSAGE_TAG:
    {
      GstTagList* tag_list = 0;
      gst_message_parse_tag(message, &tag_list);
      Track t;
      track_from_tag(tag_list, &t);
      gplay->on_tag_found(t);
      gst_tag_list_free(tag_list);
    }
      break;
      
    case GST_MESSAGE_EOS:
      gplay->on_eos();
      break;
      
    case GST_MESSAGE_STREAM_STATUS:
      GstStreamStatusType message_type;
      gst_message_parse_stream_status(message, &message_type, NULL);
      g_print("Stream status: %d\n", message_type);
      break;
      
    default:
      g_print("Message from %s: %s\n", GST_MESSAGE_SRC_NAME(message), gst_message_type_get_name(GST_MESSAGE_TYPE(message)));
      break;
  }
  
  //TODO: Should I dispose message?
  return true;
}
Exemple #9
0
static gboolean
gst_bus_message (GstBus * bus, GstMessage * message, void *unused)
{
    (void)bus;
    (void)unused;

    DEBUGF("    [gst] got BUS message %s\n",
        gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

    switch (GST_MESSAGE_TYPE (message)) {
        case GST_MESSAGE_ERROR:
        {
        GError *err;
        gchar *debug;
        gst_message_parse_error (message, &err, &debug);

        DEBUGF("[gst] Received error: Src: %s, msg: %s\n", GST_MESSAGE_SRC_NAME(message), err->message);

        g_error_free (err);
        g_free (debug);
        }

        g_main_loop_quit (pcm_loop);
        break;
        case GST_MESSAGE_EOS:
        gst_element_set_state (GST_ELEMENT(gst_pipeline), GST_STATE_NULL);
        break;
    case GST_MESSAGE_STATE_CHANGED:
    {
        GstState old_state, new_state;

        gst_message_parse_state_changed (message, &old_state, &new_state, NULL);
        DEBUGF("[gst] Element %s changed state from %s to %s.\n",
            GST_MESSAGE_SRC_NAME(message),
            gst_element_state_get_name (old_state),
            gst_element_state_get_name (new_state));
        break;
        }
        default:
        break;
    }

    return TRUE;
}
Exemple #10
0
static void
message_received (GstBus * bus, GstMessage * message, GstPipeline * pipeline)
{
  const GstStructure *s;

  s = gst_message_get_structure (message);
  g_print ("message from \"%s\" (%s): ",
      GST_STR_NULL (GST_ELEMENT_NAME (GST_MESSAGE_SRC (message))),
      gst_message_type_get_name (GST_MESSAGE_TYPE (message)));
  if (s) {
    gchar *sstr;

    sstr = gst_structure_to_string (s);
    g_print ("%s\n", sstr);
    g_free (sstr);
  } else {
    g_print ("no message details\n");
  }
}
bool GstPipelineWrapper::GstMessageParser(GstBus* bus, GstMessage* msg, GstPipelineWrapper* pipeline)
{
	if (!pipeline->get_is_verbose()) return true;

	if (msg != NULL) 
	{
		GError*	err 		= 0;
		gchar*	debug_info 	= 0;
     
		switch (GST_MESSAGE_TYPE (msg)) 
		{
			case GST_MESSAGE_ERROR:
				gst_message_parse_error (msg, &err, &debug_info);
				g_printerr ("Error received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
				g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
				g_clear_error (&err); g_free (debug_info);
				break;
				
			case GST_MESSAGE_WARNING:
				gst_message_parse_warning(msg, &err, &debug_info);
				g_printerr ("Warning received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
				g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
				g_clear_error (&err); g_free (debug_info);
				break;
		
			case GST_MESSAGE_INFO:
				gst_message_parse_info(msg, &err, &debug_info);
				g_printerr ("Info received from element %s: %s\n", GST_OBJECT_NAME (msg->src), err->message);
				g_printerr ("Debugging information: %s\n", debug_info ? debug_info : "none");
				g_clear_error (&err); g_free (debug_info);
				break;
				
			case GST_MESSAGE_EOS:
				g_print ("End-Of-Stream reached.\n");
				break;
						
			case GST_MESSAGE_STATE_CHANGED:
				GstState old_state, new_state;
				gst_message_parse_state_changed(msg, &old_state, &new_state, 0);
				g_print ("Element %s changed state from %s to %s.\n", GST_OBJECT_NAME (msg->src), gst_element_state_get_name (old_state),gst_element_state_get_name (new_state));
				break;
				
			case GST_MESSAGE_QOS:
				break;
			
			case GST_MESSAGE_STREAM_STATUS:
			
				GstStreamStatusType stream_status_type;
				GstElement*			owner;
				const gchar*		stream_status_type_string;
				gst_message_parse_stream_status(msg, &stream_status_type, &owner);
				
				switch (stream_status_type)
				{
					case GST_STREAM_STATUS_TYPE_CREATE	: stream_status_type_string = "CREATE"; break;
					case GST_STREAM_STATUS_TYPE_ENTER	: stream_status_type_string = "ENTER"; break;
					case GST_STREAM_STATUS_TYPE_LEAVE	: stream_status_type_string = "LEAVE"; break;
					case GST_STREAM_STATUS_TYPE_DESTROY : stream_status_type_string = "DESTROY"; break;

					case GST_STREAM_STATUS_TYPE_START	: stream_status_type_string = "START"; break;
					case GST_STREAM_STATUS_TYPE_PAUSE	: stream_status_type_string = "PAUSE"; break;
					case GST_STREAM_STATUS_TYPE_STOP 	: stream_status_type_string = "STOP"; break;
				}
				
				g_printerr ("STREAM STATUS received from element %s: %s\n", GST_OBJECT_NAME (owner), stream_status_type_string);
				//g_free (stream_status_type_string);
				break;
			
			default:
				g_printerr ("Unparsed message received of type: %s\n", gst_message_type_get_name(GST_MESSAGE_TYPE(msg)));
				break;
		}
	}
	return true;
}
Exemple #12
0
nsresult GStreamerReader::ReadMetadata(MediaInfo* aInfo,
                                       MetadataTags** aTags)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
  nsresult ret = NS_OK;

  /*
   * Parse MP3 headers before we kick off the GStreamer pipeline otherwise there
   * might be concurrent stream operations happening on both decoding and gstreamer
   * threads which will screw the GStreamer state machine.
   */
  bool isMP3 = mDecoder->GetResource()->GetContentType().EqualsASCII(AUDIO_MP3);
  if (isMP3) {
    ParseMP3Headers();
  }


  /* We do 3 attempts here: decoding audio and video, decoding video only,
   * decoding audio only. This allows us to play streams that have one broken
   * stream but that are otherwise decodeable.
   */
  guint flags[3] = {GST_PLAY_FLAG_VIDEO|GST_PLAY_FLAG_AUDIO,
    static_cast<guint>(~GST_PLAY_FLAG_AUDIO), static_cast<guint>(~GST_PLAY_FLAG_VIDEO)};
  guint default_flags, current_flags;
  g_object_get(mPlayBin, "flags", &default_flags, nullptr);

  GstMessage* message = nullptr;
  for (unsigned int i = 0; i < G_N_ELEMENTS(flags); i++) {
    current_flags = default_flags & flags[i];
    g_object_set(G_OBJECT(mPlayBin), "flags", current_flags, nullptr);

    /* reset filter caps to ANY */
    GstCaps* caps = gst_caps_new_any();
    GstElement* filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);

    filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");
    g_object_set(filter, "caps", caps, nullptr);
    gst_object_unref(filter);
    gst_caps_unref(caps);
    filter = nullptr;

    if (!(current_flags & GST_PLAY_FLAG_AUDIO))
      filter = gst_bin_get_by_name(GST_BIN(mAudioSink), "filter");
    else if (!(current_flags & GST_PLAY_FLAG_VIDEO))
      filter = gst_bin_get_by_name(GST_BIN(mVideoSink), "filter");

    if (filter) {
      /* Little trick: set the target caps to "skip" so that playbin2 fails to
       * find a decoder for the stream we want to skip.
       */
      GstCaps* filterCaps = gst_caps_new_simple ("skip", nullptr, nullptr);
      g_object_set(filter, "caps", filterCaps, nullptr);
      gst_caps_unref(filterCaps);
      gst_object_unref(filter);
    }

    LOG(PR_LOG_DEBUG, "starting metadata pipeline");
    if (gst_element_set_state(mPlayBin, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
      LOG(PR_LOG_DEBUG, "metadata pipeline state change failed");
      ret = NS_ERROR_FAILURE;
      continue;
    }

    /* Wait for ASYNC_DONE, which is emitted when the pipeline is built,
     * prerolled and ready to play. Also watch for errors.
     */
    message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
                 (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR | GST_MESSAGE_EOS));
    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ASYNC_DONE) {
      LOG(PR_LOG_DEBUG, "read metadata pipeline prerolled");
      gst_message_unref(message);
      ret = NS_OK;
      break;
    } else {
      LOG(PR_LOG_DEBUG, "read metadata pipeline failed to preroll: %s",
            gst_message_type_get_name (GST_MESSAGE_TYPE (message)));

      if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ERROR) {
        GError* error;
        gchar* debug;
        gst_message_parse_error(message, &error, &debug);
        LOG(PR_LOG_ERROR, "read metadata error: %s: %s", error->message, debug);
        g_error_free(error);
        g_free(debug);
      }
      /* Unexpected stream close/EOS or other error. We'll give up if all
       * streams are in error/eos. */
      gst_element_set_state(mPlayBin, GST_STATE_NULL);
      gst_message_unref(message);
      ret = NS_ERROR_FAILURE;
    }
  }

  if (NS_SUCCEEDED(ret))
    ret = CheckSupportedFormats();

  if (NS_FAILED(ret))
    /* we couldn't get this to play */
    return ret;

  /* report the duration */
  gint64 duration;

  if (isMP3 && mMP3FrameParser.IsMP3()) {
    // The MP3FrameParser has reported a duration; use that over the gstreamer
    // reported duration for inter-platform consistency.
    ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
    mUseParserDuration = true;
    mLastParserDuration = mMP3FrameParser.GetDuration();
    mDecoder->SetMediaDuration(mLastParserDuration);
  } else {
    LOG(PR_LOG_DEBUG, "querying duration");
    // Otherwise use the gstreamer duration.
#if GST_VERSION_MAJOR >= 1
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
          GST_FORMAT_TIME, &duration)) {
#else
    GstFormat format = GST_FORMAT_TIME;
    if (gst_element_query_duration(GST_ELEMENT(mPlayBin),
      &format, &duration) && format == GST_FORMAT_TIME) {
#endif
      ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
      LOG(PR_LOG_DEBUG, "have duration %" GST_TIME_FORMAT, GST_TIME_ARGS(duration));
      duration = GST_TIME_AS_USECONDS (duration);
      mDecoder->SetMediaDuration(duration);
    }
  }

  int n_video = 0, n_audio = 0;
  g_object_get(mPlayBin, "n-video", &n_video, "n-audio", &n_audio, nullptr);
  mInfo.mVideo.mHasVideo = n_video != 0;
  mInfo.mAudio.mHasAudio = n_audio != 0;

  *aInfo = mInfo;

  *aTags = nullptr;

  // Watch the pipeline for fatal errors
#if GST_VERSION_MAJOR >= 1
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this, nullptr);
#else
  gst_bus_set_sync_handler(mBus, GStreamerReader::ErrorCb, this);
#endif

  /* set the pipeline to PLAYING so that it starts decoding and queueing data in
   * the appsinks */
  gst_element_set_state(mPlayBin, GST_STATE_PLAYING);

  return NS_OK;
}

bool
GStreamerReader::IsMediaSeekable()
{
  if (mUseParserDuration) {
    return true;
  }

  gint64 duration;
#if GST_VERSION_MAJOR >= 1
  if (gst_element_query_duration(GST_ELEMENT(mPlayBin), GST_FORMAT_TIME,
                                 &duration)) {
#else
  GstFormat format = GST_FORMAT_TIME;
  if (gst_element_query_duration(GST_ELEMENT(mPlayBin), &format, &duration) &&
      format == GST_FORMAT_TIME) {
#endif
    return true;
  }

  return false;
}

nsresult GStreamerReader::CheckSupportedFormats()
{
  bool done = false;
  bool unsupported = false;

  GstIterator* it = gst_bin_iterate_recurse(GST_BIN(mPlayBin));
  while (!done) {
    GstIteratorResult res;
    GstElement* element;

#if GST_VERSION_MAJOR >= 1
    GValue value = {0,};
    res = gst_iterator_next(it, &value);
#else
    res = gst_iterator_next(it, (void **) &element);
#endif
    switch(res) {
      case GST_ITERATOR_OK:
      {
#if GST_VERSION_MAJOR >= 1
        element = GST_ELEMENT (g_value_get_object (&value));
#endif
        GstElementFactory* factory = gst_element_get_factory(element);
        if (factory) {
          const char* klass = gst_element_factory_get_klass(factory);
          GstPad* pad = gst_element_get_static_pad(element, "sink");
          if (pad) {
            GstCaps* caps;

#if GST_VERSION_MAJOR >= 1
            caps = gst_pad_get_current_caps(pad);
#else
            caps = gst_pad_get_negotiated_caps(pad);
#endif

            if (caps) {
              /* check for demuxers but ignore elements like id3demux */
              if (strstr (klass, "Demuxer") && !strstr(klass, "Metadata"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleContainerCaps(caps);
              else if (strstr (klass, "Decoder") && !strstr(klass, "Generic"))
                unsupported = !GStreamerFormatHelper::Instance()->CanHandleCodecCaps(caps);

              gst_caps_unref(caps);
            }
            gst_object_unref(pad);
          }
        }

#if GST_VERSION_MAJOR >= 1
        g_value_unset (&value);
#else
        gst_object_unref(element);
#endif
        done = unsupported;
        break;
      }
      case GST_ITERATOR_RESYNC:
        unsupported = false;
        done = false;
        break;
      case GST_ITERATOR_ERROR:
        done = true;
        break;
      case GST_ITERATOR_DONE:
        done = true;
        break;
    }
  }

  return unsupported ? NS_ERROR_FAILURE : NS_OK;
}

nsresult GStreamerReader::ResetDecode()
{
  nsresult res = NS_OK;

  LOG(PR_LOG_DEBUG, "reset decode");

  if (NS_FAILED(MediaDecoderReader::ResetDecode())) {
    res = NS_ERROR_FAILURE;
  }

  mVideoQueue.Reset();
  mAudioQueue.Reset();

  mVideoSinkBufferCount = 0;
  mAudioSinkBufferCount = 0;
  mReachedAudioEos = false;
  mReachedVideoEos = false;
#if GST_VERSION_MAJOR >= 1
  mConfigureAlignment = true;
#endif

  LOG(PR_LOG_DEBUG, "reset decode done");

  return res;
}

bool GStreamerReader::DecodeAudioData()
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedAudioEos && !mAudioSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mAudioSinkBufferCount) {
      if(!mVideoSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mAudioSinkBufferCount) {
          /* There is still no audio data available, so either there is video data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it.
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mAudioAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mAudioAppSink);
#endif

    mAudioSinkBufferCount--;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mAudioSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  timestamp = GST_TIME_AS_USECONDS(timestamp);

  int64_t offset = GST_BUFFER_OFFSET(buffer);
  guint8* data;
#if GST_VERSION_MAJOR >= 1
  GstMapInfo info;
  gst_buffer_map(buffer, &info, GST_MAP_READ);
  unsigned int size = info.size;
  data = info.data;
#else
  unsigned int size = GST_BUFFER_SIZE(buffer);
  data = GST_BUFFER_DATA(buffer);
#endif
  int32_t frames = (size / sizeof(AudioDataValue)) / mInfo.mAudio.mChannels;

  typedef AudioCompactor::NativeCopy GstCopy;
  mAudioCompactor.Push(offset,
                       timestamp,
                       mInfo.mAudio.mRate,
                       frames,
                       mInfo.mAudio.mChannels,
                       GstCopy(data,
                               size,
                               mInfo.mAudio.mChannels));
#if GST_VERSION_MAJOR >= 1
  gst_buffer_unmap(buffer, &info);
#endif

  gst_buffer_unref(buffer);

  return true;
}

bool GStreamerReader::DecodeVideoFrame(bool &aKeyFrameSkip,
                                       int64_t aTimeThreshold)
{
  NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");

  GstBuffer *buffer = nullptr;

  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);

    if (mReachedVideoEos && !mVideoSinkBufferCount) {
      return false;
    }

    /* Wait something to be decoded before return or continue */
    if (!mVideoSinkBufferCount) {
      if (!mAudioSinkBufferCount) {
        /* We have nothing decoded so it makes no sense to return to the state machine
         * as it will call us back immediately, we'll return again and so on, wasting
         * CPU cycles for no job done. So, block here until there is either video or
         * audio data available
        */
        mon.Wait();
        if (!mVideoSinkBufferCount) {
          /* There is still no video data available, so either there is audio data or
           * something else has happened (Eos, etc...). Return to the state machine
           * to process it
           */
          return true;
        }
      }
      else {
        return true;
      }
    }

    mDecoder->NotifyDecodedFrames(0, 1);

#if GST_VERSION_MAJOR >= 1
    GstSample *sample = gst_app_sink_pull_sample(mVideoAppSink);
    buffer = gst_buffer_ref(gst_sample_get_buffer(sample));
    gst_sample_unref(sample);
#else
    buffer = gst_app_sink_pull_buffer(mVideoAppSink);
#endif
    mVideoSinkBufferCount--;
  }

  bool isKeyframe = !GST_BUFFER_FLAG_IS_SET(buffer, GST_BUFFER_FLAG_DELTA_UNIT);
  if ((aKeyFrameSkip && !isKeyframe)) {
    gst_buffer_unref(buffer);
    return true;
  }

  int64_t timestamp = GST_BUFFER_TIMESTAMP(buffer);
  {
    ReentrantMonitorAutoEnter mon(mGstThreadsMonitor);
    timestamp = gst_segment_to_stream_time(&mVideoSegment,
                                           GST_FORMAT_TIME, timestamp);
  }
  NS_ASSERTION(GST_CLOCK_TIME_IS_VALID(timestamp),
               "frame has invalid timestamp");

  timestamp = GST_TIME_AS_USECONDS(timestamp);
  int64_t duration = 0;
  if (GST_CLOCK_TIME_IS_VALID(GST_BUFFER_DURATION(buffer)))
    duration = GST_TIME_AS_USECONDS(GST_BUFFER_DURATION(buffer));
  else if (fpsNum && fpsDen)
    /* add 1-frame duration */
    duration = gst_util_uint64_scale(GST_USECOND, fpsDen, fpsNum);

  if (timestamp < aTimeThreshold) {
    LOG(PR_LOG_DEBUG, "skipping frame %" GST_TIME_FORMAT
                      " threshold %" GST_TIME_FORMAT,
                      GST_TIME_ARGS(timestamp * 1000),
                      GST_TIME_ARGS(aTimeThreshold * 1000));
    gst_buffer_unref(buffer);
    return true;
  }

  if (!buffer)
    /* no more frames */
    return true;

#if GST_VERSION_MAJOR >= 1
  if (mConfigureAlignment && buffer->pool) {
    GstStructure *config = gst_buffer_pool_get_config(buffer->pool);
    GstVideoAlignment align;
    if (gst_buffer_pool_config_get_video_alignment(config, &align))
      gst_video_info_align(&mVideoInfo, &align);
    gst_structure_free(config);
    mConfigureAlignment = false;
  }
#endif

  nsRefPtr<PlanarYCbCrImage> image = GetImageFromBuffer(buffer);
  if (!image) {
    /* Ugh, upstream is not calling gst_pad_alloc_buffer(). Fallback to
     * allocating a PlanarYCbCrImage backed GstBuffer here and memcpy.
     */
    GstBuffer* tmp = nullptr;
    CopyIntoImageBuffer(buffer, &tmp, image);
    gst_buffer_unref(buffer);
    buffer = tmp;
  }

  int64_t offset = mDecoder->GetResource()->Tell(); // Estimate location in media.
  VideoData* video = VideoData::CreateFromImage(mInfo.mVideo,
                                                mDecoder->GetImageContainer(),
                                                offset, timestamp, duration,
                                                static_cast<Image*>(image.get()),
                                                isKeyframe, -1, mPicture);
  mVideoQueue.Push(video);

  gst_buffer_unref(buffer);

  return true;
}
Exemple #13
0
static gboolean player_gstreamer_onGstreamerMessage (PlayerGstreamer* self, GstBus* bus, GstMessage* message, FsoDevicePlayingSound* sound) {
	gboolean result = FALSE;
	Block1Data* _data1_;
	FsoDevicePlayingSound* _tmp0_;
	FsoDevicePlayingSound* _tmp1_;
	FsoFrameworkLogger* _tmp2_;
	GstMessage* _tmp3_;
	GstMessageType _tmp4_;
	const gchar* _tmp5_ = NULL;
	FsoDevicePlayingSound* _tmp6_;
	const gchar* _tmp7_;
	const gchar* _tmp8_ = NULL;
	gchar* _tmp9_ = NULL;
	gchar* _tmp10_;
	gboolean _tmp11_ = FALSE;
	FsoDevicePlayingSound* _tmp12_;
	guint32 _tmp13_;
	GstPipeline* _tmp14_;
	GstPipeline* pipeline;
	GstMessage* _tmp15_;
	GstMessageType _tmp16_;
	g_return_val_if_fail (self != NULL, FALSE);
	g_return_val_if_fail (bus != NULL, FALSE);
	g_return_val_if_fail (message != NULL, FALSE);
	g_return_val_if_fail (sound != NULL, FALSE);
	_data1_ = g_slice_new0 (Block1Data);
	_data1_->_ref_count_ = 1;
	_data1_->self = g_object_ref (self);
	_tmp0_ = sound;
	_tmp1_ = _fso_device_playing_sound_ref0 (_tmp0_);
	_data1_->sound = _tmp1_;
	_tmp2_ = fso_framework_theLogger;
	_tmp3_ = message;
	_tmp4_ = _tmp3_->type;
	_tmp5_ = gst_message_type_get_name (_tmp4_);
	_tmp6_ = _data1_->sound;
	_tmp7_ = _tmp6_->name;
	_tmp8_ = string_to_string (_tmp7_);
	_tmp9_ = g_strconcat ("Gstreamer: ", _tmp5_, " for sound ", _tmp8_, NULL);
	_tmp10_ = _tmp9_;
	_tmp11_ = fso_framework_logger_debug (_tmp2_, _tmp10_);
	g_assert (_tmp11_);
	_g_free0 (_tmp10_);
	_tmp12_ = _data1_->sound;
	_tmp13_ = _tmp12_->data;
	_tmp14_ = _gst_object_ref0 (GST_IS_PIPELINE (_tmp13_) ? ((GstPipeline*) _tmp13_) : NULL);
	pipeline = _tmp14_;
	_tmp15_ = message;
	_tmp16_ = _tmp15_->type;
	switch (_tmp16_) {
		case GST_MESSAGE_EOS:
		{
			{
				FsoDevicePlayingSound* _tmp17_;
				gint _tmp18_;
				_tmp17_ = _data1_->sound;
				_tmp18_ = _tmp17_->loop;
				_tmp17_->loop = _tmp18_ - 1;
				if (_tmp18_ > 0) {
					GstPipeline* _tmp19_;
					_tmp19_ = pipeline;
					gst_element_seek_simple ((GstElement*) _tmp19_, GST_FORMAT_TIME, GST_SEEK_FLAG_FLUSH, (gint64) 0);
				} else {
					FsoDevicePlayingSound* _tmp20_;
					_tmp20_ = _data1_->sound;
					player_gstreamer_stop (self, _tmp20_);
				}
				break;
			}
		}
		case GST_MESSAGE_ERROR:
		{
			{
				GError* e = NULL;
				gchar* debug = NULL;
				GstMessage* _tmp21_;
				GError* _tmp22_ = NULL;
				gchar* _tmp23_ = NULL;
				FsoFrameworkLogger* _tmp24_;
				GError* _tmp25_;
				const gchar* _tmp26_;
				const gchar* _tmp27_ = NULL;
				const gchar* _tmp28_;
				const gchar* _tmp29_ = NULL;
				gchar* _tmp30_ = NULL;
				gchar* _tmp31_;
				_tmp21_ = message;
				gst_message_parse_error (_tmp21_, &_tmp22_, &_tmp23_);
				_g_error_free0 (e);
				e = _tmp22_;
				_g_free0 (debug);
				debug = _tmp23_;
				_tmp24_ = fso_framework_theLogger;
				_tmp25_ = e;
				_tmp26_ = _tmp25_->message;
				_tmp27_ = string_to_string (_tmp26_);
				_tmp28_ = debug;
				_tmp29_ = string_to_string (_tmp28_);
				_tmp30_ = g_strconcat ("Gstreamer: Error ", _tmp27_, ": ", _tmp29_, NULL);
				_tmp31_ = _tmp30_;
				fso_framework_logger_warning (_tmp24_, _tmp31_);
				_g_free0 (_tmp31_);
				_g_free0 (debug);
				_g_error_free0 (e);
				break;
			}
		}
		case GST_MESSAGE_STATE_CHANGED:
		{
			{
				GstState previous = 0;
				GstState current = 0;
				GstState pending = 0;
				GstMessage* _tmp32_;
				GstState _tmp33_ = 0;
				GstState _tmp34_ = 0;
				GstState _tmp35_ = 0;
				gboolean _tmp36_ = FALSE;
				gboolean _tmp37_ = FALSE;
				GstState _tmp38_;
				gboolean _tmp40_;
				gboolean _tmp42_;
				_tmp32_ = message;
				gst_message_parse_state_changed (_tmp32_, &_tmp33_, &_tmp34_, &_tmp35_);
				previous = _tmp33_;
				current = _tmp34_;
				pending = _tmp35_;
				_tmp38_ = previous;
				if (_tmp38_ == GST_STATE_READY) {
					GstState _tmp39_;
					_tmp39_ = current;
					_tmp37_ = _tmp39_ == GST_STATE_PAUSED;
				} else {
					_tmp37_ = FALSE;
				}
				_tmp40_ = _tmp37_;
				if (_tmp40_) {
					GstState _tmp41_;
					_tmp41_ = pending;
					_tmp36_ = _tmp41_ == GST_STATE_PLAYING;
				} else {
					_tmp36_ = FALSE;
				}
				_tmp42_ = _tmp36_;
				if (_tmp42_) {
					FsoDevicePlayingSound* _tmp43_;
					gint _tmp44_;
					_tmp43_ = _data1_->sound;
					_tmp44_ = _tmp43_->length;
					if (_tmp44_ > 0) {
						FsoDevicePlayingSound* _tmp45_;
						gint _tmp46_;
						_tmp45_ = _data1_->sound;
						_tmp46_ = _tmp45_->length;
						g_timeout_add_seconds_full (G_PRIORITY_DEFAULT, (guint) _tmp46_, _______lambda2__gsource_func, block1_data_ref (_data1_), block1_data_unref);
					}
				} else {
					gboolean _tmp47_ = FALSE;
					gboolean _tmp48_ = FALSE;
					GstState _tmp49_;
					gboolean _tmp51_;
					gboolean _tmp53_;
					_tmp49_ = previous;
					if (_tmp49_ == GST_STATE_PLAYING) {
						GstState _tmp50_;
						_tmp50_ = current;
						_tmp48_ = _tmp50_ == GST_STATE_PAUSED;
					} else {
						_tmp48_ = FALSE;
					}
					_tmp51_ = _tmp48_;
					if (_tmp51_) {
						GstState _tmp52_;
						_tmp52_ = pending;
						_tmp47_ = _tmp52_ == GST_STATE_READY;
					} else {
						_tmp47_ = FALSE;
					}
					_tmp53_ = _tmp47_;
					if (_tmp53_) {
						FsoDevicePlayingSound* _tmp54_;
						_tmp54_ = _data1_->sound;
						player_gstreamer_stop (self, _tmp54_);
					} else {
					}
				}
				break;
			}
		}
		default:
		{
			{
				FsoFrameworkLogger* _tmp55_;
				GstMessage* _tmp56_;
				GstMessageType _tmp57_;
				const gchar* _tmp58_ = NULL;
				gchar* _tmp59_ = NULL;
				gchar* _tmp60_;
				_tmp55_ = fso_framework_theLogger;
				_tmp56_ = message;
				_tmp57_ = _tmp56_->type;
				_tmp58_ = gst_message_type_get_name (_tmp57_);
				_tmp59_ = g_strconcat ("Gstreamer: Unhandled message w/ type ", _tmp58_, NULL);
				_tmp60_ = _tmp59_;
				fso_framework_logger_warning (_tmp55_, _tmp60_);
				_g_free0 (_tmp60_);
				break;
			}
		}
	}
	result = TRUE;
	_gst_object_unref0 (pipeline);
	block1_data_unref (_data1_);
	_data1_ = NULL;
	return result;
}
static GstBusSyncReply
bus_sync_handler (GstBus * bus, GstMessage * message, GstPipeline * pipeline)
{
  const GstStructure *structure;
  const GValue *value;
  gchar *contents;
  gint i;
  guint size = 0;

  /* select msg */
  if (GST_MESSAGE_TYPE (message) != GST_MESSAGE_ELEMENT ||
      !gst_structure_has_name (gst_message_get_structure (message),
          "facedetect"))
    return GST_BUS_PASS;

  /* parse msg structure */
  structure = gst_message_get_structure (message);

  /* if facedetect is into buffer */
  if (structure &&
      strcmp (gst_structure_get_name (structure), "facedetect") == 0) {
    if (!silent) {
      /* print message type and structure name */
      g_print ("Type message, name message: %s{{%s}}\n",
          gst_message_type_get_name (message->type),
          gst_structure_get_name (structure));

      /* print msg structure names and type */
      for (i = 0; i < gst_structure_n_fields (structure); i++) {
        const gchar *name = gst_structure_nth_field_name (structure, i);
        GType type = gst_structure_get_field_type (structure, name);
        g_print ("-Name field, type: %s[%s]\n", name, g_type_name (type));
      }
    }

    /* get structure of faces */
    value = gst_structure_get_value (structure, "faces");
    /* obtain the contents into the structure */
    contents = g_strdup_value_contents (value);
    if (!silent)
      g_print ("Detected objects: %s\n\n", *(&contents));

    /* list size */
    size = gst_value_list_get_size (value);

    /* if face is detected, obtain the values X and Y of mouth and of nose. */
    if (size != 0) {
      GstState state;

      /* if paused, set to playing */
      gst_element_get_state (GST_ELEMENT (playbin), &state, NULL,
          GST_CLOCK_TIME_NONE);
      if (state != GST_STATE_PLAYING) {
        gst_element_set_state (GST_ELEMENT (playbin), GST_STATE_PLAYING);
      }

      if (ctrlvol) {
        gdouble volume;

        const GValue *faces_value = gst_value_list_get_value (value, 0);
        const GstStructure *faces_structure =
            gst_value_get_structure (faces_value);
        gboolean have_mouth_y =
            gst_structure_has_field (faces_structure, "mouth->y");
        gboolean have_mouth_x =
            gst_structure_has_field (faces_structure, "mouth->x");
        gboolean have_nose_y =
            gst_structure_has_field (faces_structure, "nose->y");
        gboolean have_nose_x =
            gst_structure_has_field (faces_structure, "nose->x");

        /* get the volume value */
        g_object_get (G_OBJECT (playbin), "volume", &volume, NULL);

        /* media operation - hide your mouth for down the volume of the video */
        if (have_mouth_y == 0 && have_mouth_x == 0) {
          volume = volume - 0.5;
          if (volume <= 0.5)
            volume = 0.0;
          g_object_set (G_OBJECT (playbin), "volume", volume, NULL);
        }
        /* media operation - hide your nose for up the volume of the video */
        if (have_nose_y == 0 && have_nose_x == 0) {
          volume = volume + 0.5;
          if (volume >= 9.5)
            volume = 10.0;
          g_object_set (G_OBJECT (playbin), "volume", volume, NULL);
        }
      }
      /* if face is not detected */
    } else {
      /* media operation - hide your face to stop media play */
      gst_element_set_state (playbin, GST_STATE_PAUSED);
    }
  }
  gst_message_unref (message);
  return GST_BUS_DROP;
}
Exemple #15
0
/* http://<xxx>/manual/html/section-bus-message-types.html */
static gboolean my_bus_callback(GstBus *bus, GstMessage *msg,
	gpointer user_data)
{
	GstMessageType msgType;
	GstObject *msgSrc;
	gchar *msgSrcName;

	/* used in switch */
	/* error message */
	gchar *debug;
	GError *err;
	GstState oldstate, newstate, pending;

	/* stream status */
	GstElement *owner;

	msgType = GST_MESSAGE_TYPE(msg);
	msgSrc = GST_MESSAGE_SRC(msg);
	msgSrcName = GST_OBJECT_NAME(msgSrc);

	switch (GST_MESSAGE_TYPE(msg)) {
	case GST_MESSAGE_EOS:
		g_print("GStreamer: end-of-stream\n");
		pthread_mutex_lock(&g_mutex);

		gst_element_set_state(GST_ELEMENT(g_pipeline), GST_STATE_NULL);
		trigger_callback(GST_STATE_NULL);

		pthread_mutex_unlock(&g_mutex);
		break;

	case GST_MESSAGE_ERROR:
		gst_message_parse_error(msg, &err, &debug);
		g_free (debug);

		g_error("GStreamer: error: [%d] %s\n", err->code, err->message);
		g_error_free(err);

		/* TODO no sleep in callback */
		pthread_mutex_lock(&g_mutex);

		/* setting state to null flushes pipeline */
		gst_element_set_state(GST_ELEMENT(g_pipeline), GST_STATE_NULL);
		trigger_callback(GST_STATE_NULL);

		pthread_mutex_unlock(&g_mutex);
		break;

	case GST_MESSAGE_STATE_CHANGED:
		gst_message_parse_state_changed(msg, &oldstate, &newstate, &pending);
#if 0   /* noisy */
		g_print("GStreamer: %s: State change: OLD: '%s', NEW: '%s', PENDING: '%s'\n",
				msgSrcName,
				gststate_get_name(oldstate),
				gststate_get_name(newstate),
				gststate_get_name(pending));
#endif
		if (!strcmp(msgSrcName, g_pipeline_name))
			trigger_callback(newstate); /* TODO GstState != GStreamer_state */

		break;

	case GST_MESSAGE_WARNING:
	case GST_MESSAGE_INFO:
		/* TODO */
		break;
	case GST_MESSAGE_APPLICATION:  /* marshal information into the main thread */
	case GST_MESSAGE_ASYNC_START:
	case GST_MESSAGE_ASYNC_DONE:
	case GST_MESSAGE_BUFFERING: /* caching of network streams */
	case GST_MESSAGE_CLOCK_LOST:
	case GST_MESSAGE_CLOCK_PROVIDE:
	case GST_MESSAGE_ELEMENT:  /* custom message, e.g. qtdemux redirect */
	case GST_MESSAGE_LATENCY:
	case GST_MESSAGE_NEW_CLOCK:
	case GST_MESSAGE_REQUEST_STATE:
	case GST_MESSAGE_SEGMENT_DONE:
	case GST_MESSAGE_SEGMENT_START:
	case GST_MESSAGE_STATE_DIRTY:
	case GST_MESSAGE_STEP_DONE:
	case GST_MESSAGE_STRUCTURE_CHANGE:
	case GST_MESSAGE_TAG: /* meta data: artist, title */
		/* ignore */
		break;
	case GST_MESSAGE_DURATION:
	default:
		g_print("GStreamer: BUS_CALL %s %d\n",
				gst_message_type_get_name(GST_MESSAGE_TYPE(msg)),
				GST_MESSAGE_TYPE(msg));
		break;
	}

	return 1;
}
static gboolean
bus_message(GstBus *bus, GstMessage *message, void *pv)
{
	switch (GST_MESSAGE_TYPE(message))
	{
		case GST_MESSAGE_ERROR:
		{
			GError *err = NULL;
			gchar *dbg_info = NULL;

			gst_message_parse_error(message, &err, &dbg_info);
			AVB_LOGF_ERROR("GStreamer ERROR message from element %s: %s",
			               GST_OBJECT_NAME(message->src),
			               err->message);
			AVB_LOGF_ERROR("Additional info: %s\n", (dbg_info) ? dbg_info : "none");
			g_error_free(err);
			g_free(dbg_info);
			break;
		}
		case GST_MESSAGE_WARNING:
		{
			GError *err = NULL;
			gchar *dbg_info = NULL;

			gst_message_parse_warning(message, &err, &dbg_info);
			AVB_LOGF_WARNING("GStreamer WARNING message from element %s: %s",
			                 GST_OBJECT_NAME(message->src),
			                 err->message);
			AVB_LOGF_WARNING("Additional info: %s\n", (dbg_info) ? dbg_info : "none");
			g_error_free(err);
			g_free(dbg_info);
			break;
		}
		case GST_MESSAGE_INFO:
		{
			GError *err = NULL;
			gchar *dbg_info = NULL;

			gst_message_parse_info(message, &err, &dbg_info);
			AVB_LOGF_ERROR("GStreamer INFO message from element %s: %s",
			               GST_OBJECT_NAME(message->src),
			               err->message);
			AVB_LOGF_ERROR("Additional info: %s\n", (dbg_info) ? dbg_info : "none");
			g_error_free(err);
			g_free(dbg_info);
			break;
		}
		case GST_MESSAGE_STATE_CHANGED:
		{
			GstState old_state, new_state;
			gst_message_parse_state_changed(message, &old_state, &new_state, NULL);
			AVB_LOGF_DEBUG("Element %s changed state from %s to %s",
			               GST_OBJECT_NAME(message->src),
			               gst_element_state_get_name(old_state),
			               gst_element_state_get_name(new_state));
			break;
		}
		case GST_MESSAGE_STREAM_STATUS:
		{
			// not so valuable
			break;
		}
		case GST_MESSAGE_EOS:
			AVB_LOG_INFO("EOS received");
			break;
		default:
			AVB_LOGF_INFO("GStreamer '%s' message from element %s",
			              gst_message_type_get_name(GST_MESSAGE_TYPE(message)),
			              GST_OBJECT_NAME(message->src));
			break;
	}

	return TRUE;
}
int BasicMediaStream::ProcessBusMessage(GstBus* bus, GstMessage* message,
		gpointer user_data)
{
	switch (GST_MESSAGE_TYPE (message))
	{
	    case GST_MESSAGE_ERROR:
	    {
	      GError *err;
	      gchar *debug;

	      gst_message_parse_error (message, &err, &debug);

	      std::string errorMsg = "Stream " + m_name + " encountered an error: " + err->message;
	      m_context->Logger->WriteError(errorMsg);

	      g_error_free (err);
	      g_free (debug);

	      gst_element_set_state (m_pipeline, GST_STATE_READY);
	      gst_element_set_state (m_pipeline, GST_STATE_PLAYING);
	      break;
	    }

	    case GST_MESSAGE_EOS:
	    {
	      gst_element_set_state (m_pipeline, GST_STATE_READY);

	      std::string info = "Stream " + m_name + " was stopped.";
	      m_context->Logger->WriteInfo(info);
	      break;
	    }

	      /*// REMARK: code based on GStreamer tutorial
	    case GST_MESSAGE_BUFFERING:
	      gint percent = 0;

	      // disable buffering when in playing mode
	      if (m_pipeline->pending_state == GST_STATE_PLAYING)
	      	break;

	      if (percent < 100)
	        gst_element_set_state (m_pipeline, GST_STATE_PAUSED);
	      else
	        gst_element_set_state (m_pipeline, GST_STATE_PLAYING);
	      break;
	       */

	    case GST_MESSAGE_CLOCK_LOST:
	    {
	   	std::string warn = "Stream " + m_name + " lost clock. Reseting.";
		   m_context->Logger->WriteWarning(warn);

		   gst_element_set_state (m_pipeline, GST_STATE_PAUSED);
	      gst_element_set_state (m_pipeline, GST_STATE_PLAYING);
	      break;
	    }

	    case GST_MESSAGE_STATE_CHANGED:
	    {
	   	GstState oldState, newState, pendingState;
	   	gst_message_parse_state_changed (message, &oldState, &newState, &pendingState);
	   	std::stringstream msg;
		   msg << "Stream " << m_name << " changed state from "
		   		<< gst_element_state_get_name (oldState) << " to "
		   		<< gst_element_state_get_name (newState) << ", pending: "
		   		<< gst_element_state_get_name (pendingState);
			m_context->Logger->WriteWarning(msg.str());
			break;
	    }

	    default:
	    {
	   	std::stringstream msg;
	   	msg << "Unhandled message: ";
	   	msg << gst_message_type_get_name(GST_MESSAGE_TYPE (message));
			m_context->Logger->WriteWarning(msg.str());
	      break;
	    }
	 }

	return 1;
}
static GstBusSyncReply
bus_sync_handler (GstBus * bus, GstMessage * message, GstPipeline * pipeline)
{
  const GstStructure *structure;
  gint64 position, length;
  GstFormat format = GST_FORMAT_TIME;
  const GValue *x_value, *y_value;
  gint x, i, y;
  /* select msg */
  if (GST_MESSAGE_TYPE (message) != GST_MESSAGE_ELEMENT ||
      !gst_structure_has_name (gst_message_get_structure (message),
          "hand-gesture"))
    return GST_BUS_PASS;

  /* parse msg structure */
  structure = gst_message_get_structure (message);

  /* if PALM gesture detected */
  if (structure &&
      strcmp (gst_structure_get_name (structure), "hand-gesture") == 0 &&
      strcmp (gst_structure_get_string (structure, "gesture"), "palm") == 0) {
    /* media operation - closed palm to stop media play */
    gst_element_set_state (playbin, GST_STATE_PAUSED);
  }

  /* if FIST gesture detected */
  if (structure &&
      strcmp (gst_structure_get_name (structure), "hand-gesture") == 0 &&
      strcmp (gst_structure_get_string (structure, "gesture"), "fist") == 0) {
    /* print message type and structure name */
    g_print ("%s{{%s}}\n", gst_message_type_get_name (message->type),
        gst_structure_get_name (structure));
    /* print msg structure names&values */
    for (i = 0; i < gst_structure_n_fields (structure); i++) {
      const gchar *name = gst_structure_nth_field_name (structure, i);
      GType type = gst_structure_get_field_type (structure, name);
      const GValue *value = gst_structure_get_value (structure, name);
      type == G_TYPE_STRING ?
          g_print ("-%s[%s]{%s}\n", name, g_type_name (type),
          g_value_get_string (value)) : g_print ("-%s[%s]{%d}\n", name,
          g_type_name (type), g_value_get_uint (value));
    }
    g_print ("\n");

    /* get X,Y positions in frame */
    x_value = gst_structure_get_value (structure, "x");
    x = g_value_get_uint (x_value);
    y_value = gst_structure_get_value (structure, "y");
    y = g_value_get_uint (y_value);

    /* set object volumes [0-10] based on Y */
    g_object_set (G_OBJECT (playbin), "volume", (gdouble) (10 - y / 24), NULL);

    /* seek playback positions */
    gst_element_query_duration (playbin, format, &length);
    /* Width = 320 is specified in caps */
    position = (gint64) length *x / 320;
    gst_element_set_state (playbin, GST_STATE_PAUSED);
    gst_element_seek (GST_ELEMENT (playbin),
        1.0,
        format,
        GST_SEEK_FLAG_FLUSH,
        GST_SEEK_TYPE_SET, position, GST_SEEK_TYPE_NONE, GST_CLOCK_TIME_NONE);
    gst_element_set_state (GST_ELEMENT (playbin), GST_STATE_PLAYING);
  }

  gst_message_unref (message);
  return GST_BUS_DROP;
}
Exemple #19
0
static VALUE
type_name(VALUE self)
{
    return CSTR2RVAL(gst_message_type_get_name(RVAL2GST_MSG_TYPE(self)));
}
static gboolean
gst_ss_demux_download_bus_cb(GstBus *bus, GstMessage *msg, gpointer data)
{
  GstSSDemuxStream *stream = (GstSSDemuxStream *)data;
  GstSSDemux *demux = stream->parent;

  switch (GST_MESSAGE_TYPE(msg)) {
    case GST_MESSAGE_EOS: {
      guint64 download_rate = -1;

      GST_INFO_OBJECT (stream->pad, "received EOS on download pipe..");
      // increase the fragment count on EOS
      stream->frag_cnt++;

      if (g_strrstr (gst_element_get_name(stream->urisrc), "http")) {
        g_object_get (stream->urisrc, "download-rate", &download_rate, NULL);
        g_print("*********** '%s' download rate = %d bps **************\n", stream->name, download_rate);
      }

      // TODO: need to remove download_rate> 0 check.. make it generic
      if ((stream->type == SS_STREAM_VIDEO) && (demux->ss_mode != SS_MODE_AONLY) && (download_rate >= 0)) {
        if (stream->frag_cnt >= demux->fragments_cache) {
          /* for switching, we are considering video download rate only */
          demux->ss_mode = gst_ssm_parse_switch_qualitylevel (demux->parser, download_rate);
        }
      } else if (stream->type == SS_STREAM_AUDIO && (demux->ss_mode == SS_MODE_AONLY)) {
        /* when video is not present using audio download rate to calculate switching */
         demux->ss_mode = gst_ssm_parse_switch_qualitylevel (demux->parser, download_rate);
         if (demux->ss_mode != SS_MODE_AONLY) {
           g_print ("\n\nMoving to AV mode by audio considering audio download rate\n\n\n\n");
         }
      }

      g_cond_signal (stream->cond);

#ifdef SIMULATE_AUDIO_ONLY
      /* when fragment count is multiple of 4, switch to audio only case */
      if ((stream->frag_cnt % 4 == 0) && (stream->type == SS_STREAM_VIDEO) &&
	  	GST_SSM_PARSE_IS_LIVE_PRESENTATION(demux->parser)) {
        g_print ("\n\t ######## Forcibly switching to audio only for testing ##########\n");
        demux->ss_mode = SS_MODE_AONLY;
      }
  #endif
      GST_DEBUG_OBJECT (stream->pad, "Signalling eos condition...");

      GST_DEBUG_OBJECT (demux, "number of fragments downloaded = %d", stream->frag_cnt);
      break;
    }
    case GST_MESSAGE_ERROR: {
      GError *error = NULL;
      gchar* debug = NULL;

      g_print ("Error from %s\n", gst_element_get_name (GST_MESSAGE_SRC(msg)));

      gst_message_parse_error( msg, &error, &debug );
      if (error)
        GST_ERROR_OBJECT (demux, "GST_MESSAGE_ERROR: error= %s\n", error->message);

      GST_ERROR_OBJECT (demux, "GST_MESSAGE_ERROR: debug = %s\n", debug);

      /* handling error, when client requests url, which is yet to be prepared by server */
      if ((!strncmp(error->message, "Precondition Failed", strlen("Precondition Failed"))) && (5 == error->code)) {
        GstStateChangeReturn ret;

        /* wait for 1sec & request the url again */
        // TODO: need to make wait time as generic or Adding loop count to request again & again
        GST_INFO_OBJECT (demux, "ERROR : code = %d, msg = %s, NEED to request again", error->code, error->message);
        usleep (1000000); // 1 sec

        /* put the current pipeline to NULL state */
        gst_element_set_state (stream->pipe, GST_STATE_NULL);
        gst_element_get_state (stream->pipe, NULL, NULL, GST_CLOCK_TIME_NONE);
        stream->pipe = stream->urisrc = stream->parser = stream->sink = NULL;

        g_print ("Going to download fragment AGAIN : %s\n", stream->uri);
        if (!gst_ss_demux_create_download_pipe (demux, stream, stream->uri, stream->start_ts)) {
          GST_ERROR_OBJECT (demux, "failed to create download pipeline");
          if (!gst_element_post_message (GST_ELEMENT(demux), msg)) {
            GST_ERROR_OBJECT (demux, "failed to post error");
            return FALSE;
          }
        }

        ret = gst_element_set_state (stream->pipe, GST_STATE_PLAYING);
        if (ret == GST_STATE_CHANGE_FAILURE) {
          if (!gst_element_post_message (GST_ELEMENT(demux), msg)) {
            GST_ERROR_OBJECT (demux, "failed to post error");
            return FALSE;
          }
        }

        } else {
          if (error)
          g_print ("GST_MESSAGE_ERROR: error= %s\n", error->message);

          g_print ("GST_MESSAGE_ERROR: debug = %s\n", debug);
          if (!gst_element_post_message (GST_ELEMENT(demux), msg)) {
            GST_ERROR_OBJECT (demux, "failed to post error");
            gst_ss_demux_stop (demux, stream);
            g_free( debug);
            debug = NULL;
            g_error_free( error);
            return FALSE;
        }
        gst_ss_demux_stop (demux, stream);
      }

      g_free( debug);
      debug = NULL;
      g_error_free( error);
      break;
    }
    case GST_MESSAGE_WARNING: {
      char* debug = NULL;
      GError* error = NULL;
      gst_message_parse_warning(msg, &error, &debug);
      GST_WARNING_OBJECT(demux, "warning : %s\n", error->message);
      GST_WARNING_OBJECT(demux, "debug : %s\n", debug);
      g_error_free( error );
      g_free( debug);
      break;
    }
    default : {
      GST_LOG_OBJECT(demux, "unhandled message : %s\n", gst_message_type_get_name (GST_MESSAGE_TYPE (msg)));
      break;
    }
  }

  return TRUE;
}