/**
 * gst_wrapper_camera_bin_src_construct_pipeline:
 * @bcamsrc: camerasrc object
 *
 * This function creates and links the elements of the camerasrc bin
 * videosrc ! cspconv ! srcfilter ! cspconv ! capsfilter ! crop ! scale ! \
 * capsfilter ! tee name=t
 *    t. ! ... (viewfinder pad)
 *    t. ! output-selector name=outsel
 *        outsel. ! (image pad)
 *        outsel. ! (video pad)
 *
 * Returns: TRUE, if elements were successfully created, FALSE otherwise
 */
static gboolean
gst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc)
{
  GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (bcamsrc);
  GstBin *cbin = GST_BIN (bcamsrc);
  GstElement *tee;
  GstElement *filter_csp;
  GstElement *src_csp;
  GstElement *capsfilter;
  gboolean ret = FALSE;
  GstPad *vf_pad;
  GstPad *tee_capture_pad;
  GstPad *src_caps_src_pad;

  if (!self->elements_created) {

    GST_DEBUG_OBJECT (self, "constructing pipeline");

    /* Add application set or default video src element */
    if (!(self->src_vid_src = gst_camerabin_setup_default_element (cbin,
                self->app_vid_src, "autovideosrc", DEFAULT_VIDEOSRC,
                "camerasrc-real-src"))) {
      self->src_vid_src = NULL;
      goto done;
    } else {
      if (!gst_camerabin_add_element (cbin, self->src_vid_src)) {
        goto done;
      }
    }
    /* we lost the reference */
    self->app_vid_src = NULL;

    /* we listen for changes to max-zoom in the video src so that
     * we can proxy them to the basecamerasrc property */
    if (g_object_class_find_property (G_OBJECT_GET_CLASS (bcamsrc), "max-zoom")) {
      g_signal_connect (G_OBJECT (self->src_vid_src), "notify::max-zoom",
          (GCallback) gst_wrapper_camera_bin_src_max_zoom_cb, bcamsrc);
    }

    /* add a buffer probe to the src elemento to drop EOS from READY->NULL */
    {
      GstPad *pad;
      pad = gst_element_get_static_pad (self->src_vid_src, "src");

      self->src_event_probe_id = gst_pad_add_event_probe (pad,
          (GCallback) gst_wrapper_camera_src_src_event_probe, self);
      gst_object_unref (pad);
    }

    if (!gst_camerabin_create_and_add_element (cbin, "ffmpegcolorspace",
            "src-colorspace"))
      goto done;

    if (!(self->src_filter =
            gst_camerabin_create_and_add_element (cbin, "capsfilter",
                "src-capsfilter")))
      goto done;

    /* attach to notify::caps on the first capsfilter and use a callback
     * to recalculate the zoom properties when these caps change and to
     * propagate the caps to the second capsfilter */
    src_caps_src_pad = gst_element_get_static_pad (self->src_filter, "src");
    g_signal_connect (src_caps_src_pad, "notify::caps",
        G_CALLBACK (gst_wrapper_camera_bin_src_caps_cb), self);
    gst_object_unref (src_caps_src_pad);

    if (!(self->src_zoom_crop =
            gst_camerabin_create_and_add_element (cbin, "videocrop",
                "zoom-crop")))
      goto done;
    if (!(self->src_zoom_scale =
            gst_camerabin_create_and_add_element (cbin, "videoscale",
                "zoom-scale")))
      goto done;
    if (!(self->src_zoom_filter =
            gst_camerabin_create_and_add_element (cbin, "capsfilter",
                "zoom-capsfilter")))
      goto done;

    if (!(tee =
            gst_camerabin_create_and_add_element (cbin, "tee",
                "camerasrc-tee")))
      goto done;

    /* viewfinder pad */
    vf_pad = gst_element_get_request_pad (tee, "src%d");
    g_object_set (tee, "alloc-pad", vf_pad, NULL);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad);
    gst_object_unref (vf_pad);

    /* image/video pad from tee */
    tee_capture_pad = gst_element_get_request_pad (tee, "src%d");

    self->output_selector =
        gst_element_factory_make ("output-selector", "outsel");
    g_object_set (self->output_selector, "pad-negotiation-mode", 0, NULL);
    gst_bin_add (GST_BIN (self), self->output_selector);
    {
      GstPad *pad = gst_element_get_static_pad (self->output_selector, "sink");

      /* check return TODO */
      gst_pad_link (tee_capture_pad, pad);
      gst_object_unref (pad);
    }
    gst_object_unref (tee_capture_pad);

    /* Create the 2 output pads for video and image */
    self->outsel_vidpad =
        gst_element_get_request_pad (self->output_selector, "src%d");
    self->outsel_imgpad =
        gst_element_get_request_pad (self->output_selector, "src%d");

    g_assert (self->outsel_vidpad != NULL);
    g_assert (self->outsel_imgpad != NULL);

    gst_pad_add_buffer_probe (self->outsel_imgpad,
        G_CALLBACK (gst_wrapper_camera_bin_src_imgsrc_probe), self);
    gst_pad_add_buffer_probe (self->outsel_vidpad,
        G_CALLBACK (gst_wrapper_camera_bin_src_vidsrc_probe), self);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->imgsrc),
        self->outsel_imgpad);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->vidsrc),
        self->outsel_vidpad);

    if (bcamsrc->mode == MODE_IMAGE) {
      g_object_set (self->output_selector, "active-pad", self->outsel_imgpad,
          NULL);
    } else {
      g_object_set (self->output_selector, "active-pad", self->outsel_vidpad,
          NULL);
    }



    gst_pad_set_active (self->vfsrc, TRUE);
    gst_pad_set_active (self->imgsrc, TRUE);    /* XXX ??? */
    gst_pad_set_active (self->vidsrc, TRUE);    /* XXX ??? */
  }

  /* Do this even if pipeline is constructed */

  if (self->video_filter) {
    /* check if we need to replace the current one */
    if (self->video_filter != self->app_vid_filter) {
      gst_bin_remove (cbin, self->video_filter);
      gst_object_unref (self->video_filter);
      self->video_filter = NULL;
      filter_csp = gst_bin_get_by_name (cbin, "filter-colorspace");
      gst_bin_remove (cbin, filter_csp);
      gst_object_unref (filter_csp);
      filter_csp = NULL;
    }
  }

  if (!self->video_filter) {
    if (self->app_vid_filter) {
      self->video_filter = gst_object_ref (self->app_vid_filter);
      filter_csp = gst_element_factory_make ("ffmpegcolorspace",
          "filter-colorspace");
      gst_bin_add_many (cbin, self->video_filter, filter_csp, NULL);
      src_csp = gst_bin_get_by_name (cbin, "src-colorspace");
      capsfilter = gst_bin_get_by_name (cbin, "src-capsfilter");
      if (gst_pad_is_linked (gst_element_get_static_pad (src_csp, "src")))
        gst_element_unlink (src_csp, capsfilter);
      if (!gst_element_link_many (src_csp, self->video_filter, filter_csp,
              capsfilter, NULL))
        goto done;
    }
  }
  ret = TRUE;
  self->elements_created = TRUE;
done:
  return ret;
}
Exemple #2
0
static gboolean
brasero_transcode_create_pipeline (BraseroTranscode *transcode,
				   GError **error)
{
	gchar *uri;
	gboolean keep_dts;
	GstElement *decode;
	GstElement *source;
	GstBus *bus = NULL;
	GstCaps *filtercaps;
	GValue *value = NULL;
	GstElement *pipeline;
	GstElement *sink = NULL;
	BraseroJobAction action;
	GstElement *filter = NULL;
	GstElement *volume = NULL;
	GstElement *convert = NULL;
	BraseroTrack *track = NULL;
	GstElement *resample = NULL;
	BraseroTranscodePrivate *priv;

	priv = BRASERO_TRANSCODE_PRIVATE (transcode);

	BRASERO_JOB_LOG (transcode, "Creating new pipeline");

	priv->set_active_state = 0;

	/* free the possible current pipeline and create a new one */
	if (priv->pipeline) {
		gst_element_set_state (priv->pipeline, GST_STATE_NULL);
		gst_object_unref (G_OBJECT (priv->pipeline));
		priv->link = NULL;
		priv->sink = NULL;
		priv->source = NULL;
		priv->convert = NULL;
		priv->pipeline = NULL;
	}

	/* create three types of pipeline according to the needs: (possibly adding grvolume)
	 * - filesrc ! decodebin ! audioconvert ! fakesink (find size) and filesrc!mp3parse!fakesink for mp3s
	 * - filesrc ! decodebin ! audioresample ! audioconvert ! audio/x-raw-int,rate=44100,width=16,depth=16,endianness=4321,signed ! filesink
	 * - filesrc ! decodebin ! audioresample ! audioconvert ! audio/x-raw-int,rate=44100,width=16,depth=16,endianness=4321,signed ! fdsink
	 */
	pipeline = gst_pipeline_new (NULL);

	bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
	gst_bus_add_watch (bus,
			   (GstBusFunc) brasero_transcode_bus_messages,
			   transcode);
	gst_object_unref (bus);

	/* source */
	brasero_job_get_current_track (BRASERO_JOB (transcode), &track);
	uri = brasero_track_stream_get_source (BRASERO_TRACK_STREAM (track), TRUE);
	source = gst_element_make_from_uri (GST_URI_SRC, uri, NULL);
	g_free (uri);

	if (source == NULL) {
		g_set_error (error,
			     BRASERO_BURN_ERROR,
			     BRASERO_BURN_ERROR_GENERAL,
			     /* Translators: %s is the name of the object (as in
			      * GObject) from the Gstreamer library that could
			      * not be created */
			     _("%s element could not be created"),
			     "\"Source\"");
		goto error;
	}
	gst_bin_add (GST_BIN (pipeline), source);
	g_object_set (source,
		      "typefind", FALSE,
		      NULL);

	/* sink */
	brasero_job_get_action (BRASERO_JOB (transcode), &action);
	switch (action) {
	case BRASERO_JOB_ACTION_SIZE:
		if (priv->mp3_size_pipeline)
			return brasero_transcode_create_pipeline_size_mp3 (transcode, pipeline, source, error);

		sink = gst_element_factory_make ("fakesink", NULL);
		break;

	case BRASERO_JOB_ACTION_IMAGE:
		volume = brasero_transcode_create_volume (transcode, track);

		if (brasero_job_get_fd_out (BRASERO_JOB (transcode), NULL) != BRASERO_BURN_OK) {
			gchar *output;

			brasero_job_get_image_output (BRASERO_JOB (transcode),
						      &output,
						      NULL);
			sink = gst_element_factory_make ("filesink", NULL);
			g_object_set (sink,
				      "location", output,
				      NULL);
			g_free (output);
		}
		else {
			int fd;

			brasero_job_get_fd_out (BRASERO_JOB (transcode), &fd);
			sink = gst_element_factory_make ("fdsink", NULL);
			g_object_set (sink,
				      "fd", fd,
				      NULL);
		}
		break;

	default:
		goto error;
	}

	if (!sink) {
		g_set_error (error,
			     BRASERO_BURN_ERROR,
			     BRASERO_BURN_ERROR_GENERAL,
			     _("%s element could not be created"),
			     "\"Sink\"");
		goto error;
	}

	gst_bin_add (GST_BIN (pipeline), sink);
	g_object_set (sink,
		      "sync", FALSE,
		      NULL);

	brasero_job_tag_lookup (BRASERO_JOB (transcode),
				BRASERO_SESSION_STREAM_AUDIO_FORMAT,
				&value);
	if (value)
		keep_dts = (g_value_get_int (value) & BRASERO_AUDIO_FORMAT_DTS) != 0;
	else
		keep_dts = FALSE;

	if (keep_dts
	&&  action == BRASERO_JOB_ACTION_IMAGE
	&& (brasero_track_stream_get_format (BRASERO_TRACK_STREAM (track)) & BRASERO_AUDIO_FORMAT_DTS) != 0) {
		GstElement *wavparse;
		GstPad *sinkpad;

		BRASERO_JOB_LOG (transcode, "DTS wav pipeline");

		/* FIXME: volume normalization won't work here. We'd need to 
		 * reencode it afterwards otherwise. */
		/* This is a special case. This is DTS wav. So we only decode wav. */
		wavparse = gst_element_factory_make ("wavparse", NULL);
		if (wavparse == NULL) {
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
				     _("%s element could not be created"),
				     "\"Wavparse\"");
			goto error;
		}
		gst_bin_add (GST_BIN (pipeline), wavparse);

		if (!gst_element_link (source, wavparse)) {
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
			             _("Impossible to link plugin pads"));
			goto error;
		}

		g_signal_connect (wavparse,
		                  "pad-added",
		                  G_CALLBACK (brasero_transcode_wavparse_pad_added_cb),
		                  transcode);

		/* This is an ugly workaround for the lack of accuracy with
		 * gstreamer. Yet this is unfortunately a necessary evil. */
		priv->pos = 0;
		priv->size = 0;
		sinkpad = gst_element_get_pad (sink, "sink");
		priv->probe = gst_pad_add_buffer_probe (sinkpad,
							G_CALLBACK (brasero_transcode_buffer_handler),
							transcode);
		gst_object_unref (sinkpad);


		priv->link = NULL;
		priv->sink = sink;
		priv->decode = NULL;
		priv->source = source;
		priv->convert = NULL;
		priv->pipeline = pipeline;

		gst_element_set_state (pipeline, GST_STATE_PLAYING);
		return TRUE;
	}

	/* audioconvert */
	convert = gst_element_factory_make ("audioconvert", NULL);
	if (convert == NULL) {
		g_set_error (error,
			     BRASERO_BURN_ERROR,
			     BRASERO_BURN_ERROR_GENERAL,
			     _("%s element could not be created"),
			     "\"Audioconvert\"");
		goto error;
	}
	gst_bin_add (GST_BIN (pipeline), convert);

	if (action == BRASERO_JOB_ACTION_IMAGE) {
		BraseroStreamFormat session_format;
		BraseroTrackType *output_type;

		output_type = brasero_track_type_new ();
		brasero_job_get_output_type (BRASERO_JOB (transcode), output_type);
		session_format = brasero_track_type_get_stream_format (output_type);
		brasero_track_type_free (output_type);

		/* audioresample */
		resample = gst_element_factory_make ("audioresample", NULL);
		if (resample == NULL) {
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
				     _("%s element could not be created"),
				     "\"Audioresample\"");
			goto error;
		}
		gst_bin_add (GST_BIN (pipeline), resample);

		/* filter */
		filter = gst_element_factory_make ("capsfilter", NULL);
		if (!filter) {
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
				     _("%s element could not be created"),
				     "\"Filter\"");
			goto error;
		}
		gst_bin_add (GST_BIN (pipeline), filter);
		filtercaps = gst_caps_new_full (gst_structure_new ("audio/x-raw-int",
								   "channels", G_TYPE_INT, 2,
								   "width", G_TYPE_INT, 16,
								   "depth", G_TYPE_INT, 16,
								   /* NOTE: we use little endianness only for libburn which requires little */
								   "endianness", G_TYPE_INT, (session_format & BRASERO_AUDIO_FORMAT_RAW_LITTLE_ENDIAN) != 0 ? 1234:4321,
								   "rate", G_TYPE_INT, 44100,
								   "signed", G_TYPE_BOOLEAN, TRUE,
								   NULL),
						NULL);
		g_object_set (GST_OBJECT (filter), "caps", filtercaps, NULL);
		gst_caps_unref (filtercaps);
	}

	/* decode */
	decode = gst_element_factory_make ("decodebin", NULL);
	if (decode == NULL) {
		g_set_error (error,
			     BRASERO_BURN_ERROR,
			     BRASERO_BURN_ERROR_GENERAL,
			     _("%s element could not be created"),
			     "\"Decodebin\"");
		goto error;
	}
	gst_bin_add (GST_BIN (pipeline), decode);

	if (action == BRASERO_JOB_ACTION_IMAGE) {
		GstPad *sinkpad;
		gboolean res;

		if (!gst_element_link (source, decode)) {
			BRASERO_JOB_LOG (transcode, "Impossible to link plugin pads");
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
			             _("Impossible to link plugin pads"));
			goto error;
		}

		priv->link = resample;
		g_signal_connect (G_OBJECT (decode),
				  "new-decoded-pad",
				  G_CALLBACK (brasero_transcode_new_decoded_pad_cb),
				  transcode);

		if (volume) {
			gst_bin_add (GST_BIN (pipeline), volume);
			res = gst_element_link_many (resample,
			                             volume,
						     convert,
			                             filter,
			                             sink,
			                             NULL);
		}
		else
			res = gst_element_link_many (resample,
			                             convert,
			                             filter,
			                             sink,
			                             NULL);

		if (!res) {
			BRASERO_JOB_LOG (transcode, "Impossible to link plugin pads");
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
				     _("Impossible to link plugin pads"));
			goto error;
		}

		/* This is an ugly workaround for the lack of accuracy with
		 * gstreamer. Yet this is unfortunately a necessary evil. */
		priv->pos = 0;
		priv->size = 0;
		sinkpad = gst_element_get_pad (sink, "sink");
		priv->probe = gst_pad_add_buffer_probe (sinkpad,
							G_CALLBACK (brasero_transcode_buffer_handler),
							transcode);
		gst_object_unref (sinkpad);
	}
	else {
		if (!gst_element_link (source, decode)
		||  !gst_element_link (convert, sink)) {
			BRASERO_JOB_LOG (transcode, "Impossible to link plugin pads");
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
				     _("Impossible to link plugin pads"));
			goto error;
		}

		priv->link = convert;
		g_signal_connect (G_OBJECT (decode),
				  "new-decoded-pad",
				  G_CALLBACK (brasero_transcode_new_decoded_pad_cb),
				  transcode);
	}

	priv->sink = sink;
	priv->decode = decode;
	priv->source = source;
	priv->convert = convert;
	priv->pipeline = pipeline;

	gst_element_set_state (pipeline, GST_STATE_PLAYING);
	return TRUE;

error:

	if (error && (*error))
		BRASERO_JOB_LOG (transcode,
				 "can't create object : %s \n",
				 (*error)->message);

	gst_object_unref (GST_OBJECT (pipeline));
	return FALSE;
}
bool GstEnginePipeline::Init() {
    // Here we create all the parts of the gstreamer pipeline - from the source
    // to the sink.  The parts of the pipeline are split up into bins:
    //   uri decode bin -> audio bin
    // The uri decode bin is a gstreamer builtin that automatically picks the
    // right type of source and decoder for the URI.

    // The audio bin gets created here and contains:
    //   queue ! audioconvert ! <caps32>
    //         ! ( rgvolume ! rglimiter ! audioconvert2 ) ! tee
    // rgvolume and rglimiter are only created when replaygain is enabled.

    // After the tee the pipeline splits.  One split is converted to 16-bit int
    // samples for the scope, the other is kept as float32 and sent to the
    // speaker.
    //   tee1 ! probe_queue ! probe_converter ! <caps16> ! probe_sink
    //   tee2 ! audio_queue ! equalizer_preamp ! equalizer ! volume ! audioscale
    //        ! convert ! audiosink

    // Audio bin
    audiobin_ = gst_bin_new("audiobin");
    gst_bin_add(GST_BIN(pipeline_), audiobin_);

    // Create the sink
    if (!(audiosink_ = engine_->CreateElement(sink_, audiobin_)))
        return false;

    if (GstEngine::DoesThisSinkSupportChangingTheOutputDeviceToAUserEditableString(sink_) && !device_.isEmpty())
        g_object_set(G_OBJECT(audiosink_), "device", device_.toUtf8().constData(), NULL);

    // Create all the other elements
    GstElement *tee, *probe_queue, *probe_converter, *probe_sink, *audio_queue,
               *convert;

    queue_            = engine_->CreateElement("queue2",           audiobin_);
    audioconvert_     = engine_->CreateElement("audioconvert",     audiobin_);
    tee               = engine_->CreateElement("tee",              audiobin_);

    probe_queue       = engine_->CreateElement("queue",            audiobin_);
    probe_converter   = engine_->CreateElement("audioconvert",     audiobin_);
    probe_sink        = engine_->CreateElement("fakesink",         audiobin_);

    audio_queue       = engine_->CreateElement("queue",            audiobin_);
    equalizer_preamp_ = engine_->CreateElement("volume",           audiobin_);
    equalizer_        = engine_->CreateElement("equalizer-nbands", audiobin_);
    volume_           = engine_->CreateElement("volume",           audiobin_);
    audioscale_       = engine_->CreateElement("audioresample",    audiobin_);
    convert           = engine_->CreateElement("audioconvert",     audiobin_);

    if (!queue_ || !audioconvert_ || !tee || !probe_queue || !probe_converter ||
            !probe_sink || !audio_queue || !equalizer_preamp_ || !equalizer_ ||
            !volume_ || !audioscale_ || !convert) {
        return false;
    }

    // Create the replaygain elements if it's enabled.  event_probe is the
    // audioconvert element we attach the probe to, which will change depending
    // on whether replaygain is enabled.  convert_sink is the element after the
    // first audioconvert, which again will change.
    GstElement* event_probe = audioconvert_;
    GstElement* convert_sink = tee;

    if (rg_enabled_) {
        rgvolume_      = engine_->CreateElement("rgvolume",     audiobin_);
        rglimiter_     = engine_->CreateElement("rglimiter",    audiobin_);
        audioconvert2_ = engine_->CreateElement("audioconvert", audiobin_);
        event_probe = audioconvert2_;
        convert_sink = rgvolume_;

        if (!rgvolume_ || !rglimiter_ || !audioconvert2_) {
            return false;
        }

        // Set replaygain settings
        g_object_set(G_OBJECT(rgvolume_), "album-mode", rg_mode_, NULL);
        g_object_set(G_OBJECT(rgvolume_), "pre-amp", double(rg_preamp_), NULL);
        g_object_set(G_OBJECT(rglimiter_), "enabled", int(rg_compression_), NULL);
    }

    // Create a pad on the outside of the audiobin and connect it to the pad of
    // the first element.
    GstPad* pad = gst_element_get_pad(queue_, "sink");
    gst_element_add_pad(audiobin_, gst_ghost_pad_new("sink", pad));
    gst_object_unref(pad);

    // Add a data probe on the src pad of the audioconvert element for our scope.
    // We do it here because we want pre-equalized and pre-volume samples
    // so that our visualization are not be affected by them.
    pad = gst_element_get_pad(event_probe, "src");
    gst_pad_add_event_probe(pad, G_CALLBACK(EventHandoffCallback), this);
    gst_object_unref(pad);

    // Configure the fakesink properly
    g_object_set(G_OBJECT(probe_sink), "sync", TRUE, NULL);

    // Set the equalizer bands
    g_object_set(G_OBJECT(equalizer_), "num-bands", 10, NULL);

    int last_band_frequency = 0;
    for (int i=0 ; i<kEqBandCount ; ++i) {
        GstObject* band = gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), i);

        const float frequency = kEqBandFrequencies[i];
        const float bandwidth = frequency - last_band_frequency;
        last_band_frequency = frequency;

        g_object_set(G_OBJECT(band), "freq", frequency,
                     "bandwidth", bandwidth,
                     "gain", 0.0f, NULL);
        g_object_unref(G_OBJECT(band));
    }

    // Set the buffer duration.  We set this on this queue instead of the
    // decode bin (in ReplaceDecodeBin()) because setting it on the decode bin
    // only affects network sources.
    // Disable the default buffer and byte limits, so we only buffer based on
    // time.
    g_object_set(G_OBJECT(queue_), "max-size-buffers", 0, NULL);
    g_object_set(G_OBJECT(queue_), "max-size-bytes", 0, NULL);
    g_object_set(G_OBJECT(queue_), "max-size-time", buffer_duration_nanosec_, NULL);
    g_object_set(G_OBJECT(queue_), "low-percent", 1, NULL);

    if (buffer_duration_nanosec_ > 0) {
        g_object_set(G_OBJECT(queue_), "use-buffering", true, NULL);
    }

    gst_element_link(queue_, audioconvert_);

    // Create the caps to put in each path in the tee.  The scope path gets 16-bit
    // ints and the audiosink path gets float32.
    GstCaps* caps16 = gst_caps_new_simple ("audio/x-raw-int",
                                           "width", G_TYPE_INT, 16,
                                           "signed", G_TYPE_BOOLEAN, true,
                                           NULL);
    GstCaps* caps32 = gst_caps_new_simple ("audio/x-raw-float",
                                           "width", G_TYPE_INT, 32,
                                           NULL);
    if (mono_playback_) {
        gst_caps_set_simple(caps32, "channels", G_TYPE_INT, 1, NULL);
    }

    // Link the elements with special caps
    gst_element_link_filtered(probe_converter, probe_sink, caps16);
    gst_element_link_filtered(audioconvert_, convert_sink, caps32);
    gst_caps_unref(caps16);
    gst_caps_unref(caps32);

    // Link the outputs of tee to the queues on each path.
    gst_pad_link(gst_element_get_request_pad(tee, "src%d"), gst_element_get_pad(probe_queue, "sink"));
    gst_pad_link(gst_element_get_request_pad(tee, "src%d"), gst_element_get_pad(audio_queue, "sink"));

    // Link replaygain elements if enabled.
    if (rg_enabled_) {
        gst_element_link_many(rgvolume_, rglimiter_, audioconvert2_, tee, NULL);
    }

    // Link everything else.
    gst_element_link(probe_queue, probe_converter);
    gst_element_link_many(audio_queue, equalizer_preamp_, equalizer_, volume_,
                          audioscale_, convert, audiosink_, NULL);

    // Add probes and handlers.
    gst_pad_add_buffer_probe(gst_element_get_pad(probe_converter, "src"), G_CALLBACK(HandoffCallback), this);
    gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallbackSync, this);
    bus_cb_id_ = gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)), BusCallback, this);

    MaybeLinkDecodeToAudio();

    return true;
}