Exemple #1
0
void GstEnginePipeline::BufferingMessageReceived(GstMessage* msg) {
  // Only handle buffering messages from the queue2 element in audiobin - not
  // the one that's created automatically by uridecodebin.
  if (GST_ELEMENT(GST_MESSAGE_SRC(msg)) != queue_) {
    return;
  }

  int percent = 0;
  gst_message_parse_buffering(msg, &percent);

  const GstState current_state = state();

  if (percent == 0 && current_state == GST_STATE_PLAYING && !buffering_) {
    buffering_ = true;
    emit BufferingStarted();

    SetState(GST_STATE_PAUSED);
  } else if (percent == 100 && buffering_) {
    buffering_ = false;
    emit BufferingFinished();

    SetState(GST_STATE_PLAYING);
  } else if (buffering_) {
    emit BufferingProgress(percent);
  }
}
void GstEnginePipeline::BufferingMessageReceived(GstMessage* msg) {
    // Only handle buffering messages from the queue2 element in audiobin - not
    // the one that's created automatically by uridecodebin.
    if (GST_ELEMENT(GST_MESSAGE_SRC(msg)) != queue_) {
        return;
    }

    // If we are loading new next track, we don't have to pause the playback.
    // The buffering is for the next track and not the current one.
    if (emit_track_ended_on_stream_start_) {
        qLog(Debug) << "Buffering next track";
        return;
    }

    int percent = 0;
    gst_message_parse_buffering(msg, &percent);

    const GstState current_state = state();

    if (percent == 0 && current_state == GST_STATE_PLAYING && !buffering_) {
        buffering_ = true;
        emit BufferingStarted();

        SetState(GST_STATE_PAUSED);
    } else if (percent == 100 && buffering_) {
        buffering_ = false;
        emit BufferingFinished();

        SetState(GST_STATE_PLAYING);
    } else if (buffering_) {
        emit BufferingProgress(percent);
    }
}