Пример #1
0
/**
 * gst_wrapper_camera_bin_src_construct_pipeline:
 * @bcamsrc: camerasrc object
 *
 * This function creates and links the elements of the camerasrc bin
 * videosrc ! cspconv ! srcfilter ! cspconv ! capsfilter ! crop ! scale ! \
 * capsfilter ! tee name=t
 *    t. ! ... (viewfinder pad)
 *    t. ! output-selector name=outsel
 *        outsel. ! (image pad)
 *        outsel. ! (video pad)
 *
 * Returns: TRUE, if elements were successfully created, FALSE otherwise
 */
static gboolean
gst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc)
{
  GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (bcamsrc);
  GstBin *cbin = GST_BIN (bcamsrc);
  GstElement *tee;
  GstElement *filter_csp;
  GstElement *src_csp;
  GstElement *capsfilter;
  gboolean ret = FALSE;
  GstPad *vf_pad;
  GstPad *tee_capture_pad;
  GstPad *src_caps_src_pad;

  if (!self->elements_created) {

    GST_DEBUG_OBJECT (self, "constructing pipeline");

    /* Add application set or default video src element */
    if (!(self->src_vid_src = gst_camerabin_setup_default_element (cbin,
                self->app_vid_src, "autovideosrc", DEFAULT_VIDEOSRC,
                "camerasrc-real-src"))) {
      self->src_vid_src = NULL;
      goto done;
    } else {
      if (!gst_camerabin_add_element (cbin, self->src_vid_src)) {
        goto done;
      }
    }
    /* we lost the reference */
    self->app_vid_src = NULL;

    /* we listen for changes to max-zoom in the video src so that
     * we can proxy them to the basecamerasrc property */
    if (g_object_class_find_property (G_OBJECT_GET_CLASS (bcamsrc), "max-zoom")) {
      g_signal_connect (G_OBJECT (self->src_vid_src), "notify::max-zoom",
          (GCallback) gst_wrapper_camera_bin_src_max_zoom_cb, bcamsrc);
    }

    /* add a buffer probe to the src elemento to drop EOS from READY->NULL */
    {
      GstPad *pad;
      pad = gst_element_get_static_pad (self->src_vid_src, "src");

      self->src_event_probe_id = gst_pad_add_event_probe (pad,
          (GCallback) gst_wrapper_camera_src_src_event_probe, self);
      gst_object_unref (pad);
    }

    if (!gst_camerabin_create_and_add_element (cbin, "ffmpegcolorspace",
            "src-colorspace"))
      goto done;

    if (!(self->src_filter =
            gst_camerabin_create_and_add_element (cbin, "capsfilter",
                "src-capsfilter")))
      goto done;

    /* attach to notify::caps on the first capsfilter and use a callback
     * to recalculate the zoom properties when these caps change and to
     * propagate the caps to the second capsfilter */
    src_caps_src_pad = gst_element_get_static_pad (self->src_filter, "src");
    g_signal_connect (src_caps_src_pad, "notify::caps",
        G_CALLBACK (gst_wrapper_camera_bin_src_caps_cb), self);
    gst_object_unref (src_caps_src_pad);

    if (!(self->src_zoom_crop =
            gst_camerabin_create_and_add_element (cbin, "videocrop",
                "zoom-crop")))
      goto done;
    if (!(self->src_zoom_scale =
            gst_camerabin_create_and_add_element (cbin, "videoscale",
                "zoom-scale")))
      goto done;
    if (!(self->src_zoom_filter =
            gst_camerabin_create_and_add_element (cbin, "capsfilter",
                "zoom-capsfilter")))
      goto done;

    if (!(tee =
            gst_camerabin_create_and_add_element (cbin, "tee",
                "camerasrc-tee")))
      goto done;

    /* viewfinder pad */
    vf_pad = gst_element_get_request_pad (tee, "src%d");
    g_object_set (tee, "alloc-pad", vf_pad, NULL);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad);
    gst_object_unref (vf_pad);

    /* image/video pad from tee */
    tee_capture_pad = gst_element_get_request_pad (tee, "src%d");

    self->output_selector =
        gst_element_factory_make ("output-selector", "outsel");
    g_object_set (self->output_selector, "pad-negotiation-mode", 0, NULL);
    gst_bin_add (GST_BIN (self), self->output_selector);
    {
      GstPad *pad = gst_element_get_static_pad (self->output_selector, "sink");

      /* check return TODO */
      gst_pad_link (tee_capture_pad, pad);
      gst_object_unref (pad);
    }
    gst_object_unref (tee_capture_pad);

    /* Create the 2 output pads for video and image */
    self->outsel_vidpad =
        gst_element_get_request_pad (self->output_selector, "src%d");
    self->outsel_imgpad =
        gst_element_get_request_pad (self->output_selector, "src%d");

    g_assert (self->outsel_vidpad != NULL);
    g_assert (self->outsel_imgpad != NULL);

    gst_pad_add_buffer_probe (self->outsel_imgpad,
        G_CALLBACK (gst_wrapper_camera_bin_src_imgsrc_probe), self);
    gst_pad_add_buffer_probe (self->outsel_vidpad,
        G_CALLBACK (gst_wrapper_camera_bin_src_vidsrc_probe), self);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->imgsrc),
        self->outsel_imgpad);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->vidsrc),
        self->outsel_vidpad);

    if (bcamsrc->mode == MODE_IMAGE) {
      g_object_set (self->output_selector, "active-pad", self->outsel_imgpad,
          NULL);
    } else {
      g_object_set (self->output_selector, "active-pad", self->outsel_vidpad,
          NULL);
    }



    gst_pad_set_active (self->vfsrc, TRUE);
    gst_pad_set_active (self->imgsrc, TRUE);    /* XXX ??? */
    gst_pad_set_active (self->vidsrc, TRUE);    /* XXX ??? */
  }

  /* Do this even if pipeline is constructed */

  if (self->video_filter) {
    /* check if we need to replace the current one */
    if (self->video_filter != self->app_vid_filter) {
      gst_bin_remove (cbin, self->video_filter);
      gst_object_unref (self->video_filter);
      self->video_filter = NULL;
      filter_csp = gst_bin_get_by_name (cbin, "filter-colorspace");
      gst_bin_remove (cbin, filter_csp);
      gst_object_unref (filter_csp);
      filter_csp = NULL;
    }
  }

  if (!self->video_filter) {
    if (self->app_vid_filter) {
      self->video_filter = gst_object_ref (self->app_vid_filter);
      filter_csp = gst_element_factory_make ("ffmpegcolorspace",
          "filter-colorspace");
      gst_bin_add_many (cbin, self->video_filter, filter_csp, NULL);
      src_csp = gst_bin_get_by_name (cbin, "src-colorspace");
      capsfilter = gst_bin_get_by_name (cbin, "src-capsfilter");
      if (gst_pad_is_linked (gst_element_get_static_pad (src_csp, "src")))
        gst_element_unlink (src_csp, capsfilter);
      if (!gst_element_link_many (src_csp, self->video_filter, filter_csp,
              capsfilter, NULL))
        goto done;
    }
  }
  ret = TRUE;
  self->elements_created = TRUE;
done:
  return ret;
}
Пример #2
0
int
main (int argc, char *argv[])
{
  GstElement *filesrc, *audiosink, *queue;
  GstElement *pipeline;

  gst_init (&argc, &argv);

  if (argc != 2) {
    g_print ("usage: %s <filename>\n", argv[0]);
    exit (-1);
  }

  /* create a new bin to hold the elements */
  pipeline = gst_pipeline_new ("pipeline");
  g_assert (pipeline != NULL);

  /* create a disk reader */
  filesrc = gst_element_factory_make ("filesrc", "disk_source");
  g_assert (filesrc != NULL);
  g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
  g_signal_connect (G_OBJECT (filesrc), "eos", G_CALLBACK (eos), thread);

  queue = gst_element_factory_make ("queue", "queue");

  /* and an audio sink */
  audiosink = gst_element_factory_make ("alsasink", "play_audio");
  g_assert (audiosink != NULL);

  /* add objects to the main pipeline */
  /*
     gst_pipeline_add_src(GST_PIPELINE(pipeline), filesrc);
     gst_pipeline_add_sink(GST_PIPELINE(pipeline), queue);

     gst_bin_add(GST_BIN (pipeline), audiosink);

     gst_pad_link(gst_element_get_pad(queue,"src"),
     gst_element_get_pad(audiosink,"sink"));

     if (!gst_pipeline_autoplug(GST_PIPELINE(pipeline))) {
     g_print("cannot autoplug pipeline\n");
     exit(-1);
     }
   */

  gst_bin_add (GST_BIN (pipeline), thread);

  /* make it ready */
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_READY);
  /* start playing */
  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_PLAYING);

  playing = TRUE;

  while (playing) {
    gst_bin_iterate (GST_BIN (pipeline));
  }

  gst_element_set_state (GST_ELEMENT (pipeline), GST_STATE_NULL);

  exit (0);
}
Пример #3
0
gboolean
fs_rtp_sub_stream_set_codecbin_unlock (FsRtpSubStream *substream,
    FsCodec *codec,
    GstElement *codecbin,
    GError **error)
{
  GstCaps *caps = NULL;
  gchar *tmp;
  gboolean ret = FALSE;
  GstPad *pad;
  gboolean codec_changed = TRUE;

  FS_RTP_SUB_STREAM_LOCK (substream);

  if (substream->priv->stopped)
  {
    FS_RTP_SUB_STREAM_UNLOCK (substream);
    FS_RTP_SESSION_UNLOCK (substream->priv->session);
    gst_object_unref (codecbin);
    fs_codec_destroy (codec);
    fs_rtp_sub_stream_try_stop (substream);
    return TRUE;
  }
  substream->priv->modifying = TRUE;
  FS_RTP_SUB_STREAM_UNLOCK (substream);

  if (substream->codec)
  {
    if (!fs_codec_are_equal (codec, substream->codec))
      codec_changed = FALSE;
  }

  if (substream->priv->codecbin)
  {
    FsCodec *saved_codec = substream->codec;
    GstElement *old_codecbin;

    gst_element_set_locked_state (substream->priv->codecbin, TRUE);
    if (gst_element_set_state (substream->priv->codecbin, GST_STATE_NULL) !=
        GST_STATE_CHANGE_SUCCESS)
    {
      gst_element_set_locked_state (substream->priv->codecbin, FALSE);
      g_set_error (error, FS_ERROR, FS_ERROR_INTERNAL,
          "Could not set the codec bin for ssrc %u"
          " and payload type %d to the state NULL", substream->ssrc,
          substream->pt);
      FS_RTP_SUB_STREAM_LOCK (substream);
      substream->priv->modifying = FALSE;
      FS_RTP_SUB_STREAM_UNLOCK (substream);
      FS_RTP_SESSION_UNLOCK (substream->priv->session);
      gst_object_unref (codecbin);
      fs_codec_destroy (codec);
      fs_rtp_sub_stream_try_stop (substream);
      return FALSE;
    }

    old_codecbin = substream->priv->codecbin;
    substream->priv->codecbin = NULL;
    FS_RTP_SESSION_UNLOCK (substream->priv->session);

    gst_bin_remove (GST_BIN (substream->priv->conference), old_codecbin);

    FS_RTP_SESSION_LOCK (substream->priv->session);
    if (substream->codec == saved_codec)
    {
      fs_codec_destroy (substream->codec);
      substream->codec = NULL;
    }

    if (substream->priv->caps)
      gst_caps_unref (substream->priv->caps);
    substream->priv->caps = NULL;
  }

  FS_RTP_SESSION_UNLOCK (substream->priv->session);

  gst_object_ref (codecbin);

  if (!gst_bin_add (GST_BIN (substream->priv->conference), codecbin))
  {
    gst_object_unref (codecbin);
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
      "Could not add the codec bin to the conference");
    FS_RTP_SUB_STREAM_LOCK (substream);
    substream->priv->modifying = FALSE;
    FS_RTP_SUB_STREAM_UNLOCK (substream);
    fs_rtp_sub_stream_try_stop (substream);
    return FALSE;
  }

  if (gst_element_set_state (codecbin, GST_STATE_PLAYING) ==
      GST_STATE_CHANGE_FAILURE)
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
      "Could not set the codec bin to the playing state");
    goto error;
  }

  if (!gst_element_link_pads (codecbin, "src",
      substream->priv->output_valve, "sink"))
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
      "Could not link the codec bin to the output_valve");
    goto error;
  }

  if (!gst_element_link_pads (substream->priv->capsfilter, "src",
          codecbin, "sink"))
  {
     g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
         "Could not link the receive capsfilter and the codecbin for pt %d",
         substream->pt);
    goto error;
  }

  caps = fs_codec_to_gst_caps (codec);
  tmp = gst_caps_to_string (caps);
  GST_DEBUG ("Setting caps %s on recv substream", tmp);
  g_free (tmp);
  g_object_set (substream->priv->capsfilter, "caps", caps, NULL);

  pad = gst_element_get_static_pad (codecbin, "sink");
  if (!pad)
  {
    g_set_error (error, FS_ERROR, FS_ERROR_INTERNAL, "Could not get sink pad"
        " from codecbin");
    goto error;
  }

  /* This is a non-error error
   * Some codecs require config data to start.. so we should just ignore them
   */
  if (!gst_pad_set_caps (pad, caps))
  {
    ret = TRUE;
    gst_object_unref (pad);
    gst_caps_unref (caps);

    GST_DEBUG ("Could not set the caps on the codecbin, waiting on config-data"
        " for SSRC:%x pt:%d", substream->ssrc, substream->pt);

    /* We call this to drop all buffers until something comes up */
    fs_rtp_sub_stream_add_probe_locked (substream);
    goto error;
  }

  gst_object_unref (pad);

  FS_RTP_SESSION_LOCK (substream->priv->session);
  substream->priv->caps = caps;
  substream->priv->codecbin = codecbin;
  substream->codec = codec;
  FS_RTP_SUB_STREAM_LOCK (substream);
  substream->priv->modifying = FALSE;
  FS_RTP_SUB_STREAM_UNLOCK (substream);

  if (substream->priv->stream && !substream->priv->output_ghostpad)
  {
    if (!fs_rtp_sub_stream_add_output_ghostpad_unlock (substream, error))
      goto error;
  }
  else
  {
    FS_RTP_SESSION_UNLOCK (substream->priv->session);
    if (codec_changed)
      g_signal_emit (substream, signals[CODEC_CHANGED], 0);
  }

  gst_object_unref (codecbin);

  fs_rtp_sub_stream_try_stop (substream);

  return TRUE;

 error:
  FS_RTP_SUB_STREAM_LOCK (substream);
  substream->priv->modifying = FALSE;
  FS_RTP_SUB_STREAM_UNLOCK (substream);


  gst_element_set_locked_state (codecbin, TRUE);
  gst_element_set_state (codecbin, GST_STATE_NULL);
  gst_object_ref (codecbin);
  gst_bin_remove (GST_BIN (substream->priv->conference), codecbin);

  gst_object_unref (codecbin);
  fs_codec_destroy (codec);

  fs_rtp_sub_stream_try_stop (substream);

  return ret;
}
Пример #4
0
/*
 * owr_local_media_source_get_pad
 *
 * The beginning of a media source chain in the pipeline looks like this:
 *
 * +--------+   +------------+   +-----+
 * | source +---+ capsfilter +---+ tee +---
 * +--------+   +------------+   +-----+
 *
 * Only one such chain is created per media source for the initial get_pad
 * call. Subsequent calls will just obtain another tee pad. After these initial
 * elements are created, they are linked together and synced up to the PLAYING
 * state.
 *
 * Once the initial chain is created, a block is placed on the new src pad of
 * the tee. The rest of the new chain (conversion elements, capsfilter, queues,
 * etc.) is created, linked and synced in the pad block callback.
 */
static GstPad *owr_local_media_source_get_pad(OwrMediaSource *media_source, GstCaps *caps)
{
    OwrLocalMediaSource *local_source;
    OwrLocalMediaSourcePrivate *priv;
    GstElement *source_bin, *post_tee_bin;
    GstElement *source = NULL, *capsfilter = NULL, *tee;
    GstPad *ghostpad = NULL;
    gchar *pad_name;
    OwrMediaType media_type = OWR_MEDIA_TYPE_UNKNOWN;
    OwrSourceType source_type = OWR_SOURCE_TYPE_UNKNOWN;
    OwrCodecType codec_type = OWR_CODEC_TYPE_NONE;
    guint source_id;

    g_assert(media_source);
    local_source = OWR_LOCAL_MEDIA_SOURCE(media_source);
    priv = local_source->priv;

    g_object_get(media_source, "media-type", &media_type, "type", &source_type, NULL);

    /* only create the source bin for this media source once */
    if (_owr_media_source_get_element(media_source)) {
        GST_DEBUG_OBJECT(media_source, "Re-using existing source element/bin");
        source_bin = _owr_media_source_get_element(media_source);
        tee = priv->source_tee;
    } else {
        GEnumClass *media_enum_class, *source_enum_class;
        GEnumValue *media_enum_value, *source_enum_value;
        gchar *bin_name;
        GstCaps *source_caps;
        GstStructure *source_structure;
        GstElement *fakesink;

        media_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_MEDIA_TYPE));
        source_enum_class = G_ENUM_CLASS(g_type_class_ref(OWR_TYPE_SOURCE_TYPE));
        media_enum_value = g_enum_get_value(media_enum_class, media_type);
        source_enum_value = g_enum_get_value(source_enum_class, source_type);

        bin_name = g_strdup_printf("local-%s-%s-source-bin-%u",
            media_enum_value ? media_enum_value->value_nick : "unknown",
            source_enum_value ? source_enum_value->value_nick : "unknown",
            g_atomic_int_add(&unique_bin_id, 1));

        g_type_class_unref(media_enum_class);
        g_type_class_unref(source_enum_class);

        source_bin = gst_bin_new(bin_name);

        g_free(bin_name);
        bin_name = NULL;

        gst_bin_add(GST_BIN(_owr_get_pipeline()), source_bin);
        gst_element_sync_state_with_parent(GST_ELEMENT(source_bin));

        GST_DEBUG_OBJECT(local_source, "media_type: %d, type: %d", media_type, source_type);

        if (media_type == OWR_MEDIA_TYPE_UNKNOWN || source_type == OWR_SOURCE_TYPE_UNKNOWN) {
            GST_ERROR_OBJECT(local_source,
                "Cannot connect source with unknown type or media type to other component");
            goto done;
        }

        switch (media_type) {
        case OWR_MEDIA_TYPE_AUDIO:
            {
            switch (source_type) {
            case OWR_SOURCE_TYPE_CAPTURE:
                CREATE_ELEMENT(source, AUDIO_SRC, "audio-source");
#if !defined(__APPLE__) || !TARGET_IPHONE_SIMULATOR
                g_object_set(source, "buffer-time", G_GINT64_CONSTANT(40000),
                    "latency-time", G_GINT64_CONSTANT(10000), NULL);
#ifdef __APPLE__
                g_object_set(source, "device", priv->device_index, NULL);
#endif
#endif
                break;
            case OWR_SOURCE_TYPE_TEST:
                CREATE_ELEMENT(source, "audiotestsrc", "audio-source");
                g_object_set(source, "is-live", TRUE, NULL);
                break;
            case OWR_SOURCE_TYPE_UNKNOWN:
            default:
                g_assert_not_reached();
                goto done;
            }

#if defined(__APPLE__) && !TARGET_IPHONE_SIMULATOR
            /* workaround for osxaudiosrc bug
             * https://bugzilla.gnome.org/show_bug.cgi?id=711764 */
            CREATE_ELEMENT(capsfilter, "capsfilter", "audio-source-capsfilter");
            source_caps = gst_caps_copy(caps);
            source_structure = gst_caps_get_structure(source_caps, 0);
            gst_structure_set(source_structure,
                "format", G_TYPE_STRING, "S32LE",
                "rate", G_TYPE_INT, 44100, NULL);
            gst_structure_remove_field(source_structure, "channels");
            g_object_set(capsfilter, "caps", source_caps, NULL);
            gst_caps_unref(source_caps);
            gst_bin_add(GST_BIN(source_bin), capsfilter);
#endif

            break;
            }
        case OWR_MEDIA_TYPE_VIDEO:
        {
            switch (source_type) {
            case OWR_SOURCE_TYPE_CAPTURE:
                CREATE_ELEMENT(source, VIDEO_SRC, "video-source");
#if defined(__APPLE__) && !TARGET_IPHONE_SIMULATOR
                g_object_set(source, "device-index", priv->device_index, NULL);
#elif defined(__ANDROID__)
                g_object_set(source, "cam-index", priv->device_index, NULL);
#elif defined(__linux__)
                {
                    gchar *device = g_strdup_printf("/dev/video%u", priv->device_index);
                    g_object_set(source, "device", device, NULL);
                    g_free(device);
                }
#endif
                break;
            case OWR_SOURCE_TYPE_TEST:
                CREATE_ELEMENT(source, "videotestsrc", "video-source");
                g_object_set(source, "is-live", TRUE, NULL);
                break;
            case OWR_SOURCE_TYPE_UNKNOWN:
            default:
                g_assert_not_reached();
                goto done;
            }

            CREATE_ELEMENT(capsfilter, "capsfilter", "video-source-capsfilter");
            source_caps = gst_caps_copy(caps);
            source_structure = gst_caps_get_structure(source_caps, 0);
            gst_structure_remove_field(source_structure, "format");
            gst_structure_remove_field(source_structure, "framerate");
            g_object_set(capsfilter, "caps", source_caps, NULL);
            gst_caps_unref(source_caps);
            gst_bin_add(GST_BIN(source_bin), capsfilter);

            break;
        }
        case OWR_MEDIA_TYPE_UNKNOWN:
        default:
            g_assert_not_reached();
            goto done;
        }
        g_assert(source);

        CREATE_ELEMENT(tee, "tee", "source-tee");

        CREATE_ELEMENT(fakesink, "fakesink", "source-tee-fakesink");
        g_object_set(fakesink, "async", FALSE, NULL);

        gst_bin_add_many(GST_BIN(source_bin), source, tee, fakesink, NULL);

        gst_element_sync_state_with_parent(fakesink);
        LINK_ELEMENTS(tee, fakesink);

        if (!source)
            GST_ERROR_OBJECT(media_source, "Failed to create source element!");
    }

    codec_type = _owr_caps_to_codec_type(caps);
    source_id = g_atomic_int_add(&unique_pad_id, 1);

    pad_name = g_strdup_printf("src_%u_%u", codec_type, source_id);
    ghostpad = gst_ghost_pad_new_no_target(pad_name, GST_PAD_SRC);
    g_free(pad_name);

    post_tee_bin = create_post_tee_bin(media_source, source_bin, caps, ghostpad, source_id);
    if (!post_tee_bin) {
        gst_object_unref(ghostpad);
        ghostpad = NULL;
        goto done;
    }

    if (!gst_element_link(tee, post_tee_bin)) {
        GST_ERROR("Failed to link source tee to source-post-tee-bin-%u", source_id);
        g_object_unref(post_tee_bin);
        ghostpad = NULL;
        goto done;
    }

    if (!_owr_media_source_get_element(media_source)) {
        /* the next code block inside the if is a workaround for avfvideosrc
         * not handling on-the-fly reconfiguration
         * on upstream reconfigure events, we drop the event in the probe */
        if (media_type == OWR_MEDIA_TYPE_VIDEO) {
            GstPad *tee_sinkpad;

            tee_sinkpad = gst_element_get_static_pad(tee, "sink");
            gst_pad_add_probe(tee_sinkpad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM,
                drop_reconfigure_cb, NULL, NULL);
        }

        if (capsfilter) {
            LINK_ELEMENTS(capsfilter, tee);
            gst_element_sync_state_with_parent(tee);
            gst_element_sync_state_with_parent(capsfilter);
            LINK_ELEMENTS(source, capsfilter);
        } else {
            gst_element_sync_state_with_parent(tee);
            LINK_ELEMENTS(source, tee);
        }
        gst_element_sync_state_with_parent(source);
        _owr_media_source_set_element(media_source, source_bin);
        priv->source_tee = tee;
    }

done:
    return ghostpad;
}
static GstElement *
ges_track_video_transition_create_element (GESTrackObject * object)
{
  GstElement *topbin, *iconva, *iconvb, *oconv;
  GObject *target = NULL;
  const gchar *propname = NULL;
  GstElement *mixer = NULL;
  GstPad *sinka_target, *sinkb_target, *src_target, *sinka, *sinkb, *src;
  GstController *controller;
  GstInterpolationControlSource *control_source;
  GESTrackVideoTransition *self;
  GESTrackVideoTransitionPrivate *priv;

  self = GES_TRACK_VIDEO_TRANSITION (object);
  priv = self->priv;

  GST_LOG ("creating a video bin");

  topbin = gst_bin_new ("transition-bin");
  iconva = gst_element_factory_make ("ffmpegcolorspace", "tr-csp-a");
  iconvb = gst_element_factory_make ("ffmpegcolorspace", "tr-csp-b");
  oconv = gst_element_factory_make ("ffmpegcolorspace", "tr-csp-output");

  gst_bin_add_many (GST_BIN (topbin), iconva, iconvb, oconv, NULL);
  /* Prefer videomixer2 to videomixer */
  mixer = gst_element_factory_make ("videomixer2", NULL);
  if (mixer == NULL)
    mixer = gst_element_factory_make ("videomixer", NULL);
  g_object_set (G_OBJECT (mixer), "background", 1, NULL);
  gst_bin_add (GST_BIN (topbin), mixer);

  if (priv->type != GES_VIDEO_STANDARD_TRANSITION_TYPE_CROSSFADE) {
    priv->sinka =
        (GstPad *) link_element_to_mixer_with_smpte (GST_BIN (topbin), iconva,
        mixer, priv->type, NULL);
    priv->sinkb =
        (GstPad *) link_element_to_mixer_with_smpte (GST_BIN (topbin), iconvb,
        mixer, priv->type, &priv->smpte);
    target = (GObject *) priv->smpte;
    propname = "position";
    priv->start_value = 1.0;
    priv->end_value = 0.0;
  } else {
    priv->sinka = (GstPad *) link_element_to_mixer (iconva, mixer);
    priv->sinkb = (GstPad *) link_element_to_mixer (iconvb, mixer);
    target = (GObject *) priv->sinkb;
    propname = "alpha";
    priv->start_value = 0.0;
    priv->end_value = 1.0;
  }

  priv->mixer = gst_object_ref (mixer);

  fast_element_link (mixer, oconv);

  sinka_target = gst_element_get_static_pad (iconva, "sink");
  sinkb_target = gst_element_get_static_pad (iconvb, "sink");
  src_target = gst_element_get_static_pad (oconv, "src");

  sinka = gst_ghost_pad_new ("sinka", sinka_target);
  sinkb = gst_ghost_pad_new ("sinkb", sinkb_target);
  src = gst_ghost_pad_new ("src", src_target);

  gst_element_add_pad (topbin, src);
  gst_element_add_pad (topbin, sinka);
  gst_element_add_pad (topbin, sinkb);

  gst_object_unref (sinka_target);
  gst_object_unref (sinkb_target);
  gst_object_unref (src_target);

  /* set up interpolation */

  g_object_set (target, propname, (gfloat) 0.0, NULL);

  controller = gst_object_control_properties (target, propname, NULL);

  control_source = gst_interpolation_control_source_new ();
  gst_controller_set_control_source (controller,
      propname, GST_CONTROL_SOURCE (control_source));
  gst_interpolation_control_source_set_interpolation_mode (control_source,
      GST_INTERPOLATE_LINEAR);

  priv->controller = controller;
  priv->control_source = control_source;

  return topbin;
}
/****************************************************
 *              GstElement vmetods                  *
 ****************************************************/
static GstPad *
_request_new_pad (GstElement * element, GstPadTemplate * templ,
    const gchar * name, const GstCaps * caps)
{
  GstPad *audioresample_srcpad, *audioconvert_sinkpad, *tmpghost;
  GstPad *ghost;
  GstElement *audioconvert, *audioresample;
  PadInfos *infos = g_slice_new0 (PadInfos);
  GESSmartAdder *self = GES_SMART_ADDER (element);

  infos->adder_pad = gst_element_request_pad (self->adder,
      gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS (self->adder),
          "sink_%u"), NULL, caps);

  if (infos->adder_pad == NULL) {
    GST_WARNING_OBJECT (element, "Could not get any pad from GstAdder");
    g_slice_free (PadInfos, infos);

    return NULL;
  }

  infos->self = self;

  infos->bin = gst_bin_new (NULL);
  audioconvert = gst_element_factory_make ("audioconvert", NULL);
  audioresample = gst_element_factory_make ("audioresample", NULL);

  gst_bin_add_many (GST_BIN (infos->bin), audioconvert, audioresample, NULL);
  gst_element_link_many (audioconvert, audioresample, NULL);

  audioconvert_sinkpad = gst_element_get_static_pad (audioconvert, "sink");
  tmpghost = GST_PAD (gst_ghost_pad_new (NULL, audioconvert_sinkpad));
  gst_object_unref (audioconvert_sinkpad);
  gst_pad_set_active (tmpghost, TRUE);
  gst_element_add_pad (GST_ELEMENT (infos->bin), tmpghost);

  gst_bin_add (GST_BIN (self), infos->bin);
  ghost = gst_ghost_pad_new (NULL, tmpghost);
  gst_pad_set_active (ghost, TRUE);
  if (!gst_element_add_pad (GST_ELEMENT (self), ghost))
    goto could_not_add;

  audioresample_srcpad = gst_element_get_static_pad (audioresample, "src");
  tmpghost = GST_PAD (gst_ghost_pad_new (NULL, audioresample_srcpad));
  gst_object_unref (audioresample_srcpad);
  gst_pad_set_active (tmpghost, TRUE);
  gst_element_add_pad (GST_ELEMENT (infos->bin), tmpghost);
  gst_pad_link (tmpghost, infos->adder_pad);

  LOCK (self);
  g_hash_table_insert (self->pads_infos, ghost, infos);
  UNLOCK (self);

  GST_DEBUG_OBJECT (self, "Returning new pad %" GST_PTR_FORMAT, ghost);
  return ghost;

could_not_add:
  {
    GST_ERROR_OBJECT (self, "could not add pad");
    destroy_pad (infos);
    return NULL;
  }
}
bool GstEnginePipeline::Init() {
  // Here we create all the parts of the gstreamer pipeline - from the source
  // to the sink.  The parts of the pipeline are split up into bins:
  //   uri decode bin -> audio bin
  // The uri decode bin is a gstreamer builtin that automatically picks the
  // right type of source and decoder for the URI.

  // The audio bin gets created here and contains:
  //   queue ! audioconvert ! <caps32>
  //         ! ( rgvolume ! rglimiter ! audioconvert2 ) ! tee
  // rgvolume and rglimiter are only created when replaygain is enabled.

  // After the tee the pipeline splits.  One split is converted to 16-bit int
  // samples for the scope, the other is kept as float32 and sent to the
  // speaker.
  //   tee1 ! probe_queue ! probe_converter ! <caps16> ! probe_sink
  //   tee2 ! audio_queue ! equalizer_preamp ! equalizer ! volume ! audioscale
  //        ! convert ! audiosink

  // Audio bin
  audiobin_ = gst_bin_new("audiobin");
  gst_bin_add(GST_BIN(pipeline_), audiobin_);

  // Create the sink
  if (!(audiosink_ = engine_->CreateElement(sink_, audiobin_))) return false;

  if (GstEngine::
          DoesThisSinkSupportChangingTheOutputDeviceToAUserEditableString(
              sink_) &&
      !device_.isEmpty())
    g_object_set(G_OBJECT(audiosink_), "device", device_.toUtf8().constData(),
                 nullptr);

  // Create all the other elements
  GstElement* tee, *probe_queue, *probe_converter, *probe_sink, *audio_queue,
      *convert;

  queue_ = engine_->CreateElement("queue2", audiobin_);
  audioconvert_ = engine_->CreateElement("audioconvert", audiobin_);
  tee = engine_->CreateElement("tee", audiobin_);

  probe_queue = engine_->CreateElement("queue", audiobin_);
  probe_converter = engine_->CreateElement("audioconvert", audiobin_);
  probe_sink = engine_->CreateElement("fakesink", audiobin_);

  audio_queue = engine_->CreateElement("queue", audiobin_);
  equalizer_preamp_ = engine_->CreateElement("volume", audiobin_);
  equalizer_ = engine_->CreateElement("equalizer-nbands", audiobin_);
  stereo_panorama_ = engine_->CreateElement("audiopanorama", audiobin_);
  volume_ = engine_->CreateElement("volume", audiobin_);
  audioscale_ = engine_->CreateElement("audioresample", audiobin_);
  convert = engine_->CreateElement("audioconvert", audiobin_);

  if (!queue_ || !audioconvert_ || !tee || !probe_queue || !probe_converter ||
      !probe_sink || !audio_queue || !equalizer_preamp_ || !equalizer_ ||
      !stereo_panorama_ || !volume_ || !audioscale_ || !convert) {
    return false;
  }

  // Create the replaygain elements if it's enabled.  event_probe is the
  // audioconvert element we attach the probe to, which will change depending
  // on whether replaygain is enabled.  convert_sink is the element after the
  // first audioconvert, which again will change.
  GstElement* event_probe = audioconvert_;
  GstElement* convert_sink = tee;

  if (rg_enabled_) {
    rgvolume_ = engine_->CreateElement("rgvolume", audiobin_);
    rglimiter_ = engine_->CreateElement("rglimiter", audiobin_);
    audioconvert2_ = engine_->CreateElement("audioconvert", audiobin_);
    event_probe = audioconvert2_;
    convert_sink = rgvolume_;

    if (!rgvolume_ || !rglimiter_ || !audioconvert2_) {
      return false;
    }

    // Set replaygain settings
    g_object_set(G_OBJECT(rgvolume_), "album-mode", rg_mode_, nullptr);
    g_object_set(G_OBJECT(rgvolume_), "pre-amp", double(rg_preamp_), nullptr);
    g_object_set(G_OBJECT(rglimiter_), "enabled", int(rg_compression_),
                 nullptr);
  }

  // Create a pad on the outside of the audiobin and connect it to the pad of
  // the first element.
  GstPad* pad = gst_element_get_static_pad(queue_, "sink");
  gst_element_add_pad(audiobin_, gst_ghost_pad_new("sink", pad));
  gst_object_unref(pad);

  // Add a data probe on the src pad of the audioconvert element for our scope.
  // We do it here because we want pre-equalized and pre-volume samples
  // so that our visualization are not be affected by them.
  pad = gst_element_get_static_pad(event_probe, "src");
  gst_pad_add_event_probe(pad, G_CALLBACK(EventHandoffCallback), this);
  gst_object_unref(pad);

  // Configure the fakesink properly
  g_object_set(G_OBJECT(probe_sink), "sync", TRUE, nullptr);

  // Set the equalizer bands
  g_object_set(G_OBJECT(equalizer_), "num-bands", 10, nullptr);

  int last_band_frequency = 0;
  for (int i = 0; i < kEqBandCount; ++i) {
    GstObject* band =
        gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), i);

    const float frequency = kEqBandFrequencies[i];
    const float bandwidth = frequency - last_band_frequency;
    last_band_frequency = frequency;

    g_object_set(G_OBJECT(band), "freq", frequency, "bandwidth", bandwidth,
                 "gain", 0.0f, nullptr);
    g_object_unref(G_OBJECT(band));
  }

  // Set the stereo balance.
  g_object_set(G_OBJECT(stereo_panorama_), "panorama", stereo_balance_,
               nullptr);

  // Set the buffer duration.  We set this on this queue instead of the
  // decode bin (in ReplaceDecodeBin()) because setting it on the decode bin
  // only affects network sources.
  // Disable the default buffer and byte limits, so we only buffer based on
  // time.
  g_object_set(G_OBJECT(queue_), "max-size-buffers", 0, nullptr);
  g_object_set(G_OBJECT(queue_), "max-size-bytes", 0, nullptr);
  g_object_set(G_OBJECT(queue_), "max-size-time", buffer_duration_nanosec_,
               nullptr);
  g_object_set(G_OBJECT(queue_), "low-percent", 1, nullptr);

  if (buffer_duration_nanosec_ > 0) {
    g_object_set(G_OBJECT(queue_), "use-buffering", true, nullptr);
  }

  gst_element_link(queue_, audioconvert_);

  // Create the caps to put in each path in the tee.  The scope path gets 16-bit
  // ints and the audiosink path gets float32.
  GstCaps* caps16 =
      gst_caps_new_simple("audio/x-raw-int", "width", G_TYPE_INT, 16, "signed",
                          G_TYPE_BOOLEAN, true, nullptr);
  GstCaps* caps32 = gst_caps_new_simple("audio/x-raw-float", "width",
                                        G_TYPE_INT, 32, nullptr);
  if (mono_playback_) {
    gst_caps_set_simple(caps32, "channels", G_TYPE_INT, 1, nullptr);
  }

  // Link the elements with special caps
  gst_element_link_filtered(probe_converter, probe_sink, caps16);
  gst_element_link_filtered(audioconvert_, convert_sink, caps32);
  gst_caps_unref(caps16);
  gst_caps_unref(caps32);

  // Link the outputs of tee to the queues on each path.
  gst_pad_link(gst_element_get_request_pad(tee, "src%d"),
               gst_element_get_static_pad(probe_queue, "sink"));
  gst_pad_link(gst_element_get_request_pad(tee, "src%d"),
               gst_element_get_static_pad(audio_queue, "sink"));

  // Link replaygain elements if enabled.
  if (rg_enabled_) {
    gst_element_link_many(rgvolume_, rglimiter_, audioconvert2_, tee, nullptr);
  }

  // Link everything else.
  gst_element_link(probe_queue, probe_converter);
  gst_element_link_many(audio_queue, equalizer_preamp_, equalizer_,
                        stereo_panorama_, volume_, audioscale_, convert,
                        audiosink_, nullptr);

  // Add probes and handlers.
  gst_pad_add_buffer_probe(gst_element_get_static_pad(probe_converter, "src"),
                           G_CALLBACK(HandoffCallback), this);
  gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)),
                           BusCallbackSync, this);
  bus_cb_id_ = gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)),
                                 BusCallback, this);

  MaybeLinkDecodeToAudio();

  return true;
}
Пример #8
0
static gint create_encoder_pipeline (Encoder *encoder)
{
    GstElement *pipeline, *element;
    Bin *bin;
    Link *link;
    GSList *bins, *links, *elements;
    GstElementFactory *element_factory;
    GType type;
    EncoderStream *stream;
    GstAppSrcCallbacks callbacks = {
        need_data_callback,
        NULL,
        NULL
    };
    GstAppSinkCallbacks encoder_appsink_callbacks = {
        NULL,
        NULL,
        new_sample_callback
    };
    GstCaps *caps;
    GstBus *bus;

    pipeline = gst_pipeline_new (NULL);

    /* add element to pipeline first. */
    bins = encoder->bins;
    while (bins != NULL) {
        bin = bins->data;
        elements = bin->elements;
        while (elements != NULL) {
            element = elements->data;
            if (!gst_bin_add (GST_BIN (pipeline), element)) {
                GST_ERROR ("add element %s to bin %s error.", gst_element_get_name (element), bin->name);
                return 1;
            }
            elements = g_slist_next (elements);
        }
        bins = g_slist_next (bins);
    }

    /* then links element. */
    bins = encoder->bins;
    while (bins != NULL) {
        bin = bins->data;
        element = bin->first;
        element_factory = gst_element_get_factory (element);
        type = gst_element_factory_get_element_type (element_factory);
        stream = NULL;
        if (g_strcmp0 ("GstAppSrc", g_type_name (type)) == 0) {
            GST_INFO ("Encoder appsrc found.");
            stream = encoder_get_stream (encoder, bin->name);
            gst_app_src_set_callbacks (GST_APP_SRC (element), &callbacks, stream, NULL);
        }
        element = bin->last;
        element_factory = gst_element_get_factory (element);
        type = gst_element_factory_get_element_type (element_factory);
        if ((g_strcmp0 ("GstAppSink", g_type_name (type)) == 0) ||
                (g_strcmp0 ("GstHlsSink", g_type_name (type)) == 0) ||
                (g_strcmp0 ("GstFileSink", g_type_name (type)) == 0)) {
            GstPad *pad;

            if (g_strcmp0 ("GstAppSink", g_type_name (type)) == 0) {
                GST_INFO ("Encoder appsink found.");
                gst_app_sink_set_callbacks (GST_APP_SINK (element), &encoder_appsink_callbacks, encoder, NULL);
            }
            pad = gst_element_get_static_pad (element, "sink");
            gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM, encoder_appsink_event_probe, encoder, NULL);
        }
        links = bin->links;
        while (links != NULL) {
            link = links->data;
            GST_INFO ("link element: %s -> %s", link->src_name, link->sink_name);
            if (link->caps != NULL) {
                caps = gst_caps_from_string (link->caps);
                gst_element_link_filtered (link->src, link->sink, caps);
                gst_caps_unref (caps);

            } else {
                gst_element_link (link->src, link->sink);
            }
            links = g_slist_next (links);
        }
        bins = g_slist_next (bins);
    }
    bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
    gst_bus_add_watch (bus, bus_callback, encoder);
    g_object_unref (bus);
    encoder->pipeline = pipeline;

    return 0;
}
bool GstEnginePipeline::Init() {
  // Here we create all the parts of the gstreamer pipeline - from the source
  // to the sink.  The parts of the pipeline are split up into bins:
  //   uri decode bin -> audio bin
  // The uri decode bin is a gstreamer builtin that automatically picks the
  // right type of source and decoder for the URI.

  // The audio bin gets created here and contains:
  //   queue ! audioconvert ! <caps32>
  //         ! ( rgvolume ! rglimiter ! audioconvert2 ) ! tee
  // rgvolume and rglimiter are only created when replaygain is enabled.

  // After the tee the pipeline splits.  One split is converted to 16-bit int
  // samples for the scope, the other is kept as float32 and sent to the
  // speaker.
  //   tee1 ! probe_queue ! probe_converter ! <caps16> ! probe_sink
  //   tee2 ! audio_queue ! equalizer_preamp ! equalizer ! volume ! audioscale
  //        ! convert ! audiosink

  gst_segment_init(&last_decodebin_segment_, GST_FORMAT_TIME);

  // Audio bin
  audiobin_ = gst_bin_new("audiobin");
  gst_bin_add(GST_BIN(pipeline_), audiobin_);

  // Create the sink
  if (!(audiosink_ = engine_->CreateElement(sink_, audiobin_))) return false;

  if (g_object_class_find_property(G_OBJECT_GET_CLASS(audiosink_), "device") &&
      !device_.toString().isEmpty()) {
    switch (device_.type()) {
      case QVariant::Int:
        g_object_set(G_OBJECT(audiosink_), "device", device_.toInt(), nullptr);
        break;
      case QVariant::String:
        g_object_set(G_OBJECT(audiosink_), "device",
                     device_.toString().toUtf8().constData(), nullptr);
        break;

#ifdef Q_OS_WIN32
      case QVariant::ByteArray: {
        GUID guid = QUuid(device_.toByteArray());
        g_object_set(G_OBJECT(audiosink_), "device", &guid, nullptr);
        break;
      }
#endif  // Q_OS_WIN32

      default:
        qLog(Warning) << "Unknown device type" << device_;
        break;
    }
  }

  // Create all the other elements
  GstElement* tee, *probe_queue, *probe_converter, *probe_sink, *audio_queue,
      *convert;

  queue_ = engine_->CreateElement("queue2", audiobin_);
  audioconvert_ = engine_->CreateElement("audioconvert", audiobin_);
  tee = engine_->CreateElement("tee", audiobin_);

  probe_queue = engine_->CreateElement("queue", audiobin_);
  probe_converter = engine_->CreateElement("audioconvert", audiobin_);
  probe_sink = engine_->CreateElement("fakesink", audiobin_);

  audio_queue = engine_->CreateElement("queue", audiobin_);
  equalizer_preamp_ = engine_->CreateElement("volume", audiobin_);
  equalizer_ = engine_->CreateElement("equalizer-nbands", audiobin_);
  stereo_panorama_ = engine_->CreateElement("audiopanorama", audiobin_);
  volume_ = engine_->CreateElement("volume", audiobin_);
  audioscale_ = engine_->CreateElement("audioresample", audiobin_);
  convert = engine_->CreateElement("audioconvert", audiobin_);

  if (!queue_ || !audioconvert_ || !tee || !probe_queue || !probe_converter ||
      !probe_sink || !audio_queue || !equalizer_preamp_ || !equalizer_ ||
      !stereo_panorama_ || !volume_ || !audioscale_ || !convert) {
    return false;
  }

  // Create the replaygain elements if it's enabled.  event_probe is the
  // audioconvert element we attach the probe to, which will change depending
  // on whether replaygain is enabled.  convert_sink is the element after the
  // first audioconvert, which again will change.
  GstElement* event_probe = audioconvert_;
  GstElement* convert_sink = tee;

  if (rg_enabled_) {
    rgvolume_ = engine_->CreateElement("rgvolume", audiobin_);
    rglimiter_ = engine_->CreateElement("rglimiter", audiobin_);
    audioconvert2_ = engine_->CreateElement("audioconvert", audiobin_);
    event_probe = audioconvert2_;
    convert_sink = rgvolume_;

    if (!rgvolume_ || !rglimiter_ || !audioconvert2_) {
      return false;
    }

    // Set replaygain settings
    g_object_set(G_OBJECT(rgvolume_), "album-mode", rg_mode_, nullptr);
    g_object_set(G_OBJECT(rgvolume_), "pre-amp", double(rg_preamp_), nullptr);
    g_object_set(G_OBJECT(rglimiter_), "enabled", int(rg_compression_),
                 nullptr);
  }

  // Create a pad on the outside of the audiobin and connect it to the pad of
  // the first element.
  GstPad* pad = gst_element_get_static_pad(queue_, "sink");
  gst_element_add_pad(audiobin_, gst_ghost_pad_new("sink", pad));
  gst_object_unref(pad);

  // Add a data probe on the src pad of the audioconvert element for our scope.
  // We do it here because we want pre-equalized and pre-volume samples
  // so that our visualization are not be affected by them.
  pad = gst_element_get_static_pad(event_probe, "src");
  gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_EVENT_UPSTREAM,
                    &EventHandoffCallback, this, NULL);
  gst_object_unref(pad);

  // Configure the fakesink properly
  g_object_set(G_OBJECT(probe_sink), "sync", TRUE, nullptr);

  // Setting the equalizer bands:
  //
  // GStreamer's GstIirEqualizerNBands sets up shelve filters for the first and
  // last bands as corner cases. That was causing the "inverted slider" bug.
  // As a workaround, we create two dummy bands at both ends of the spectrum.
  // This causes the actual first and last adjustable bands to be
  // implemented using band-pass filters.

  g_object_set(G_OBJECT(equalizer_), "num-bands", 10 + 2, nullptr);

  // Dummy first band (bandwidth 0, cutting below 20Hz):
  GstObject* first_band = GST_OBJECT(
      gst_child_proxy_get_child_by_index(GST_CHILD_PROXY(equalizer_), 0));
  g_object_set(G_OBJECT(first_band), "freq", 20.0, "bandwidth", 0, "gain", 0.0f,
               nullptr);
  g_object_unref(G_OBJECT(first_band));

  // Dummy last band (bandwidth 0, cutting over 20KHz):
  GstObject* last_band = GST_OBJECT(gst_child_proxy_get_child_by_index(
      GST_CHILD_PROXY(equalizer_), kEqBandCount + 1));
  g_object_set(G_OBJECT(last_band), "freq", 20000.0, "bandwidth", 0, "gain",
               0.0f, nullptr);
  g_object_unref(G_OBJECT(last_band));

  int last_band_frequency = 0;
  for (int i = 0; i < kEqBandCount; ++i) {
    const int index_in_eq = i + 1;
    GstObject* band = GST_OBJECT(gst_child_proxy_get_child_by_index(
        GST_CHILD_PROXY(equalizer_), index_in_eq));

    const float frequency = kEqBandFrequencies[i];
    const float bandwidth = frequency - last_band_frequency;
    last_band_frequency = frequency;

    g_object_set(G_OBJECT(band), "freq", frequency, "bandwidth", bandwidth,
                 "gain", 0.0f, nullptr);
    g_object_unref(G_OBJECT(band));
  }

  // Set the stereo balance.
  g_object_set(G_OBJECT(stereo_panorama_), "panorama", stereo_balance_,
               nullptr);

  // Set the buffer duration.  We set this on this queue instead of the
  // decode bin (in ReplaceDecodeBin()) because setting it on the decode bin
  // only affects network sources.
  // Disable the default buffer and byte limits, so we only buffer based on
  // time.
  g_object_set(G_OBJECT(queue_), "max-size-buffers", 0, nullptr);
  g_object_set(G_OBJECT(queue_), "max-size-bytes", 0, nullptr);
  g_object_set(G_OBJECT(queue_), "max-size-time", buffer_duration_nanosec_,
               nullptr);
  g_object_set(G_OBJECT(queue_), "low-percent", buffer_min_fill_, nullptr);

  if (buffer_duration_nanosec_ > 0) {
    g_object_set(G_OBJECT(queue_), "use-buffering", true, nullptr);
  }

  gst_element_link_many(queue_, audioconvert_, convert_sink, nullptr);

  // Link the elements with special caps
  // The scope path through the tee gets 16-bit ints.
  GstCaps* caps16 = gst_caps_new_simple("audio/x-raw", "format", G_TYPE_STRING,
                                        "S16LE", NULL);
  gst_element_link_filtered(probe_converter, probe_sink, caps16);
  gst_caps_unref(caps16);

  // Link the outputs of tee to the queues on each path.
  gst_pad_link(gst_element_get_request_pad(tee, "src_%u"),
               gst_element_get_static_pad(probe_queue, "sink"));
  gst_pad_link(gst_element_get_request_pad(tee, "src_%u"),
               gst_element_get_static_pad(audio_queue, "sink"));

  // Link replaygain elements if enabled.
  if (rg_enabled_) {
    gst_element_link_many(rgvolume_, rglimiter_, audioconvert2_, tee, nullptr);
  }

  // Link everything else.
  gst_element_link(probe_queue, probe_converter);
  gst_element_link_many(audio_queue, equalizer_preamp_, equalizer_,
                        stereo_panorama_, volume_, audioscale_, convert,
                        nullptr);

  // add caps for fixed sample rate and mono, but only if requested
  if (sample_rate_ != GstEngine::kAutoSampleRate && sample_rate_ > 0) {
    GstCaps* caps = gst_caps_new_simple("audio/x-raw", "rate", G_TYPE_INT,
                                        sample_rate_, nullptr);
    if (mono_playback_) {
      gst_caps_set_simple(caps, "channels", G_TYPE_INT, 1, nullptr);
    }

    gst_element_link_filtered(convert, audiosink_, caps);
    gst_caps_unref(caps);
  } else if (mono_playback_) {
    GstCaps* capsmono =
        gst_caps_new_simple("audio/x-raw", "channels", G_TYPE_INT, 1, nullptr);
    gst_element_link_filtered(convert, audiosink_, capsmono);
    gst_caps_unref(capsmono);
  } else {
    gst_element_link(convert, audiosink_);
  }

  // Add probes and handlers.
  gst_pad_add_probe(gst_element_get_static_pad(probe_converter, "src"),
                    GST_PAD_PROBE_TYPE_BUFFER, HandoffCallback, this, nullptr);
  gst_bus_set_sync_handler(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)),
                           BusCallbackSync, this, nullptr);
  bus_cb_id_ = gst_bus_add_watch(gst_pipeline_get_bus(GST_PIPELINE(pipeline_)),
                                 BusCallback, this);

  MaybeLinkDecodeToAudio();

  return true;
}
Пример #10
0
static void
gst_nle_source_next (GstNleSource * nlesrc)
{
  GstNleSrcItem *item;
  GstStateChangeReturn ret;
  GstElement *uridecodebin;
  GstBus *bus;
  GstState state;

  nlesrc->index++;

  if (nlesrc->index >= g_list_length (nlesrc->queue)) {
    gst_nle_source_push_eos (nlesrc);
    return;
  }

  if (nlesrc->source != NULL) {
    gst_object_unref (nlesrc->source);
    nlesrc->source = NULL;
  }

  if (nlesrc->decoder != NULL) {
    gst_element_set_state (GST_ELEMENT (nlesrc->decoder), GST_STATE_NULL);
    gst_element_get_state (GST_ELEMENT (nlesrc->decoder), NULL, NULL, 0);
    gst_object_unref (nlesrc->decoder);
  }

  nlesrc->decoder = gst_pipeline_new ("decoder");
  uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
  /* Connect signal to recover source element for queries in bytes */
  g_signal_connect (uridecodebin, "source-setup",
      G_CALLBACK (gst_nle_source_on_source_setup), nlesrc); 

  gst_bin_add (GST_BIN (nlesrc->decoder), uridecodebin);

  g_signal_connect (uridecodebin, "autoplug-select",
      G_CALLBACK (lgm_filter_video_decoders), nlesrc);
  g_signal_connect (uridecodebin, "pad-added",
      G_CALLBACK (gst_nle_source_pad_added_cb), nlesrc);
  g_signal_connect (uridecodebin, "no-more-pads",
      G_CALLBACK (gst_nle_source_no_more_pads), nlesrc);

  bus = GST_ELEMENT_BUS (nlesrc->decoder);
  gst_bus_add_signal_watch (bus);
  g_signal_connect (bus, "message", G_CALLBACK (gst_nle_source_bus_message),
      nlesrc);
  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);

  GST_INFO_OBJECT (nlesrc, "Starting next item with uri:%s", item->file_path);
  GST_INFO_OBJECT (nlesrc, "start:%" GST_TIME_FORMAT " stop:%"
      GST_TIME_FORMAT " rate:%f", GST_TIME_ARGS (item->start),
      GST_TIME_ARGS (item->stop), item->rate);

  g_object_set (uridecodebin, "uri", item->file_path, NULL);

  nlesrc->seek_done = FALSE;
  if (GST_CLOCK_TIME_IS_VALID (item->stop)) {
    nlesrc->video_seek_done = FALSE;
    nlesrc->audio_seek_done = FALSE;
  } else {
    nlesrc->video_seek_done = TRUE;
    nlesrc->audio_seek_done = TRUE;
  }
  nlesrc->audio_eos = TRUE;
  nlesrc->video_eos = TRUE;
  nlesrc->audio_ts = 0;
  nlesrc->video_ts = 0;
  nlesrc->start_ts = nlesrc->accu_time;
  nlesrc->video_linked = FALSE;
  nlesrc->audio_linked = FALSE;
  nlesrc->item_setup = FALSE;
  nlesrc->cached_duration = 0;

  GST_DEBUG_OBJECT (nlesrc, "Start ts:%" GST_TIME_FORMAT,
      GST_TIME_ARGS (nlesrc->start_ts));
  gst_element_set_state (nlesrc->decoder, GST_STATE_PLAYING);
  ret = gst_element_get_state (nlesrc->decoder, &state, NULL, 5 * GST_SECOND);
  if (ret == GST_STATE_CHANGE_FAILURE) {
    GST_WARNING_OBJECT (nlesrc, "Error changing state, selecting next item.");
    gst_nle_source_check_eos (nlesrc);
    return;
  }

  nlesrc->seek_done = TRUE;
  if (!item->still_picture && GST_CLOCK_TIME_IS_VALID (item->stop)) {
    GST_DEBUG_OBJECT (nlesrc, "Sending seek event");
    gst_element_seek (nlesrc->decoder, 1, GST_FORMAT_TIME,
        GST_SEEK_FLAG_ACCURATE,
        GST_SEEK_TYPE_SET, item->start, GST_SEEK_TYPE_SET, item->stop);
  }
}
Пример #11
0
int
main (int argc, char *argv[])
{
  GstElement *filesrc, *osssink, *queue, *parse, *decode;
  GstElement *bin;
  GstElement *thread;

  gst_init (&argc, &argv);

  if (argc != 2) {
    g_print ("usage: %s <filename>\n", argv[0]);
    exit (-1);
  }

  /* create a new thread to hold the elements */
  thread = gst_thread_new ("thread");
  g_assert (thread != NULL);

  /* create a new bin to hold the elements */
  bin = gst_bin_new ("bin");
  g_assert (bin != NULL);

  /* create a disk reader */
  filesrc = gst_element_factory_make ("filesrc", "disk_source");
  g_assert (filesrc != NULL);
  g_object_set (G_OBJECT (filesrc), "location", argv[1], NULL);
  g_signal_connect (G_OBJECT (filesrc), "eos", G_CALLBACK (eos), thread);

  queue = gst_element_factory_make ("queue", "queue");

  /* and an audio sink */
  osssink = gst_element_factory_make ("osssink", "play_audio");
  g_assert (osssink != NULL);

  parse = gst_element_factory_make ("mp3parse", "parse");
  decode = gst_element_factory_make ("mpg123", "decode");

  /* add objects to the main bin */
  gst_bin_add (GST_BIN (bin), filesrc);
  gst_bin_add (GST_BIN (bin), queue);

  gst_bin_add (GST_BIN (thread), parse);
  gst_bin_add (GST_BIN (thread), decode);
  gst_bin_add (GST_BIN (thread), osssink);

  gst_element_link_many (filesrc, queue, parse, decode, osssink, NULL);

  /* make it ready */
  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_READY);
  /* start playing */
  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PLAYING);

  playing = TRUE;

  while (playing) {
    gst_bin_iterate (GST_BIN (bin));
  }

  gst_element_set_state (GST_ELEMENT (bin), GST_STATE_NULL);

  exit (0);
}
Пример #12
0
static void
gst_nle_source_pad_added_cb (GstElement * element, GstPad * pad,
    GstNleSource * nlesrc)
{
  GstCaps *caps;
  const GstStructure *s;
  const gchar *mime;
  GstElement *appsink = NULL;
  GstPad *sink_pad;
  GstAppSinkCallbacks appsink_cbs;
  GstNleSrcItem *item;

  item = (GstNleSrcItem *) g_list_nth_data (nlesrc->queue, nlesrc->index);

  caps = gst_pad_get_caps_reffed (pad);
  s = gst_caps_get_structure (caps, 0);
  mime = gst_structure_get_name (s);
  GST_DEBUG_OBJECT (nlesrc, "Found mime type: %s", mime);

  if (g_strrstr (mime, "video") && !nlesrc->video_linked) {
    appsink = gst_element_factory_make ("appsink", NULL);
    memset (&appsink_cbs, 0, sizeof (appsink_cbs));
    appsink_cbs.eos = gst_nle_source_on_video_eos;
    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;
    appsink_cbs.new_buffer = gst_nle_source_on_video_buffer;
    nlesrc->video_linked = TRUE;
    if (!nlesrc->video_srcpad_added) {
      gst_pad_set_active (nlesrc->video_srcpad, TRUE);
      gst_element_add_pad (GST_ELEMENT (nlesrc),
          gst_object_ref (nlesrc->video_srcpad));
      nlesrc->video_srcpad_added = TRUE;
    }
    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),
        (GCallback) gst_nle_source_video_pad_probe_cb, nlesrc);
    nlesrc->video_eos = FALSE;
  } else if (g_strrstr (mime, "audio") && nlesrc->with_audio
      && !nlesrc->audio_linked && (item ? item->rate == 1.0 : TRUE)) {
    appsink = gst_element_factory_make ("appsink", NULL);
    memset (&appsink_cbs, 0, sizeof (appsink_cbs));
    appsink_cbs.eos = gst_nle_source_on_audio_eos;
    appsink_cbs.new_preroll = gst_nle_source_on_preroll_buffer;
    appsink_cbs.new_buffer = gst_nle_source_on_audio_buffer;
    nlesrc->audio_linked = TRUE;
    if (!nlesrc->audio_srcpad_added) {
      gst_pad_set_active (nlesrc->audio_srcpad, TRUE);
      gst_element_add_pad (GST_ELEMENT (nlesrc),
          gst_object_ref (nlesrc->audio_srcpad));
      nlesrc->audio_srcpad_added = TRUE;
    }
    gst_pad_add_event_probe (GST_BASE_SINK_PAD (GST_BASE_SINK (appsink)),
        (GCallback) gst_nle_source_audio_pad_probe_cb, nlesrc);
    nlesrc->audio_eos = FALSE;
  }
  if (appsink != NULL) {
    g_object_set (appsink, "sync", FALSE, NULL);
    gst_app_sink_set_callbacks (GST_APP_SINK (appsink), &appsink_cbs, nlesrc,
        NULL);
    gst_bin_add (GST_BIN (nlesrc->decoder), appsink);
    sink_pad = gst_element_get_static_pad (appsink, "sink");
    gst_pad_link (pad, sink_pad);
    gst_element_sync_state_with_parent (appsink);
    gst_object_unref (sink_pad);
  }
}
Пример #13
0
SongLoader::Result SongLoader::LoadAudioCD() {
#ifdef HAVE_AUDIOCD
  // Create gstreamer cdda element
  GstElement* cdda = gst_element_make_from_uri (GST_URI_SRC, "cdda://", NULL);
  if (cdda == NULL) {
    qLog(Error) << "Error while creating CDDA GstElement";
    return Error;
  }

  // Change the element's state to ready and paused, to be able to query it
  if (gst_element_set_state(cdda, GST_STATE_READY) == GST_STATE_CHANGE_FAILURE ||
      gst_element_set_state(cdda, GST_STATE_PAUSED) == GST_STATE_CHANGE_FAILURE) {
    qLog(Error) << "Error while changing CDDA GstElement's state";
    gst_element_set_state(cdda, GST_STATE_NULL);
    gst_object_unref(GST_OBJECT(cdda));
    return Error;
  }

  // Get number of tracks
  GstFormat fmt = gst_format_get_by_nick ("track");
  GstFormat out_fmt = fmt;
  gint64 num_tracks = 0;
  if (!gst_element_query_duration (cdda, &out_fmt, &num_tracks) || out_fmt != fmt) {
    qLog(Error) << "Error while querying cdda GstElement";
    gst_object_unref(GST_OBJECT(cdda));
    return Error;
  }

  for (int track_number = 1; track_number <= num_tracks; track_number++) {
    // Init song
    Song song;
    guint64 duration = 0;
    // quint64 == ulonglong and guint64 == ulong, therefore we must cast
    if (gst_tag_list_get_uint64 (GST_CDDA_BASE_SRC(cdda)->tracks[track_number-1].tags,
                                 GST_TAG_DURATION, &duration)) {
      song.set_length_nanosec((quint64)duration);
    }
    song.set_valid(true);
    song.set_filetype(Song::Type_Cdda);
    song.set_url(QUrl(QString("cdda://%1").arg(track_number)));
    song.set_title(QString("Track %1").arg(track_number));
    song.set_track(track_number);
    songs_ << song;
  }

  // Generate MusicBrainz DiscId
  gst_tag_register_musicbrainz_tags();
  GstElement *pipe = gst_pipeline_new ("pipeline");
  gst_bin_add (GST_BIN (pipe), cdda);
  gst_element_set_state (pipe, GST_STATE_READY);
  gst_element_set_state (pipe, GST_STATE_PAUSED);
  GstMessage *msg = gst_bus_timed_pop_filtered (GST_ELEMENT_BUS (pipe),
                    GST_CLOCK_TIME_NONE,
                    GST_MESSAGE_TAG);
  GstTagList *tags = NULL;
  gst_message_parse_tag (msg, &tags);
  char *string_mb = NULL;
  if (gst_tag_list_get_string (tags, GST_TAG_CDDA_MUSICBRAINZ_DISCID, &string_mb)) {
    QString musicbrainz_discid(string_mb);
    qLog(Info) << "MusicBrainz discid: " << musicbrainz_discid;

    MusicBrainzClient *musicbrainz_client = new MusicBrainzClient(this);
    connect(musicbrainz_client,
            SIGNAL(Finished(const QString&, const QString&, MusicBrainzClient::ResultList)),
            SLOT(AudioCDTagsLoaded(const QString&, const QString&, MusicBrainzClient::ResultList)));
    musicbrainz_client->StartDiscIdRequest(musicbrainz_discid);
    g_free(string_mb);
  }
  
  // Clean all the Gstreamer objects we have used: we don't need them anymore
  gst_object_unref(GST_OBJECT(cdda));
  gst_element_set_state (pipe, GST_STATE_NULL);
  gst_object_unref(GST_OBJECT(pipe));
  gst_object_unref(GST_OBJECT(msg));
  gst_object_unref(GST_OBJECT(tags));

  return Success;
#else // HAVE_AUDIOCD
  return Error;
#endif
}
Пример #14
0
static gboolean
gst_viewfinder_bin_create_elements (GstViewfinderBin * vfbin)
{
  GstElement *csp = NULL;
  GstElement *videoscale = NULL;
  GstPad *firstpad = NULL;
  const gchar *missing_element_name;
  gboolean newsink = FALSE;
  gboolean updated_converters = FALSE;

  GST_DEBUG_OBJECT (vfbin, "Creating internal elements");

  /* First check if we need to add/replace the internal sink */
  if (vfbin->video_sink) {
    if (vfbin->user_video_sink && vfbin->video_sink != vfbin->user_video_sink) {
      gst_bin_remove (GST_BIN_CAST (vfbin), vfbin->video_sink);
      gst_object_unref (vfbin->video_sink);
      vfbin->video_sink = NULL;
    }
  }

  if (!vfbin->video_sink) {
    if (vfbin->user_video_sink)
      vfbin->video_sink = gst_object_ref (vfbin->user_video_sink);
    else {
      vfbin->video_sink = gst_element_factory_make ("autovideosink",
          "vfbin-sink");
      if (!vfbin->video_sink) {
        missing_element_name = "autovideosink";
        goto missing_element;
      }
    }

    gst_bin_add (GST_BIN_CAST (vfbin), gst_object_ref (vfbin->video_sink));
    newsink = TRUE;
  }

  /* check if we want add/remove the conversion elements */
  if (vfbin->elements_created && vfbin->disable_converters) {
    /* remove the elements, user doesn't want them */

    gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), NULL);
    csp = gst_bin_get_by_name (GST_BIN_CAST (vfbin), "vfbin-csp");
    videoscale = gst_bin_get_by_name (GST_BIN_CAST (vfbin), "vfbin-videoscale");

    gst_bin_remove (GST_BIN_CAST (vfbin), csp);
    gst_bin_remove (GST_BIN_CAST (vfbin), videoscale);

    gst_object_unref (csp);
    gst_object_unref (videoscale);

    updated_converters = TRUE;
  } else if (!vfbin->elements_created && !vfbin->disable_converters) {
    gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), NULL);

    /* add the elements, user wants them */
    csp = gst_element_factory_make ("videoconvert", "vfbin-csp");
    if (!csp) {
      missing_element_name = "videoconvert";
      goto missing_element;
    }
    gst_bin_add (GST_BIN_CAST (vfbin), csp);

    videoscale = gst_element_factory_make ("videoscale", "vfbin->videoscale");
    if (!videoscale) {
      missing_element_name = "videoscale";
      goto missing_element;
    }
    gst_bin_add (GST_BIN_CAST (vfbin), videoscale);

    gst_element_link_pads_full (csp, "src", videoscale, "sink",
        GST_PAD_LINK_CHECK_NOTHING);

    vfbin->elements_created = TRUE;
    GST_DEBUG_OBJECT (vfbin, "Elements succesfully created and linked");

    updated_converters = TRUE;
  }
  /* otherwise, just leave it as is */

  /* if sink was replaced -> link it to the internal converters */
  if (newsink && !vfbin->disable_converters) {
    gboolean unref = FALSE;
    if (!videoscale) {
      videoscale = gst_bin_get_by_name (GST_BIN_CAST (vfbin),
          "vfbin-videscale");
      unref = TRUE;
    }

    if (!gst_element_link_pads_full (videoscale, "src", vfbin->video_sink,
            "sink", GST_PAD_LINK_CHECK_CAPS)) {
      GST_ELEMENT_ERROR (vfbin, CORE, NEGOTIATION, (NULL),
          ("linking videoscale and viewfindersink failed"));
    }

    if (unref)
      gst_object_unref (videoscale);
    videoscale = NULL;
  }

  /* Check if we need a new ghostpad target */
  if (updated_converters || (newsink && vfbin->disable_converters)) {
    if (vfbin->disable_converters) {
      firstpad = gst_element_get_static_pad (vfbin->video_sink, "sink");
    } else {
      /* csp should always exist at this point */
      firstpad = gst_element_get_static_pad (csp, "sink");
    }
  }

  /* need to change the ghostpad target if firstpad is set */
  if (firstpad) {
    if (!gst_ghost_pad_set_target (GST_GHOST_PAD (vfbin->ghostpad), firstpad))
      goto error;
    gst_object_unref (firstpad);
    firstpad = NULL;
  }

  return TRUE;

missing_element:
  gst_element_post_message (GST_ELEMENT_CAST (vfbin),
      gst_missing_element_message_new (GST_ELEMENT_CAST (vfbin),
          missing_element_name));
  GST_ELEMENT_ERROR (vfbin, CORE, MISSING_PLUGIN,
      (_("Missing element '%s' - check your GStreamer installation."),
          missing_element_name), (NULL));
  goto error;

error:
  GST_WARNING_OBJECT (vfbin, "Creating internal elements failed");
  if (firstpad)
    gst_object_unref (firstpad);
  return FALSE;
}
Пример #15
0
int
main (int argc, gchar * argv[])
{
  GstElement *thread, *element;
  long usage1;
  gint i, iters;

  gst_init (&argc, &argv);

  if (argc == 2)
    iters = atoi (argv[1]);
  else
    iters = ITERS;

  g_print ("starting test\n");
  usage1 = vmsize ();

  thread = gst_thread_new ("somethread");
  gst_object_unref (thread);
  g_print ("create/unref new thread %ld\n", vmsize () - usage1);

  for (i = 0; i < iters; i++) {
    thread = gst_thread_new ("somethread");
    gst_object_unref (thread);
  }
  g_print ("create/unref %d threads %ld\n", iters, vmsize () - usage1);

  thread = gst_thread_new ("somethread");
  g_assert (GST_OBJECT_FLOATING (thread));
  gst_object_ref (thread);
  gst_object_sink (GST_OBJECT (thread));
  g_assert (!GST_OBJECT_FLOATING (thread));
  gst_object_unref (thread);
  g_print ("create/ref/sink/unref new thread %ld\n", vmsize () - usage1);


  for (i = 0; i < iters; i++) {
    thread = gst_thread_new ("somethread");
    gst_object_ref (thread);
    gst_object_sink (GST_OBJECT (thread));
    gst_object_unref (thread);
  }
  g_print ("create/ref/sink/unref %d threads %ld\n", iters, vmsize () - usage1);

  thread = gst_thread_new ("somethread");
  g_assert (!GST_OBJECT_DESTROYED (thread));
  gst_object_unref (thread);
  g_assert (GST_OBJECT_DESTROYED (thread));
  gst_object_unref (thread);
  g_print ("create/destroy/unref new thread %ld\n", vmsize () - usage1);

  for (i = 0; i < iters; i++) {
    thread = gst_thread_new ("somethread");
    gst_object_unref (thread);
    gst_object_unref (thread);
  }
  g_print ("create/destroy/unref %d thread %ld\n", iters, vmsize () - usage1);

  thread = gst_thread_new ("somethread");
  gst_object_ref (thread);
  gst_object_unref (thread);
  gst_object_unref (thread);
  g_print ("create/ref/unref/unref new thread %ld\n", vmsize () - usage1);

  for (i = 0; i < iters; i++) {
    thread = gst_thread_new ("somethread");
    gst_object_ref (thread);
    gst_object_unref (thread);
    gst_object_unref (thread);
  }
  g_print ("create/ref/unref/unref %d thread %ld\n", iters, vmsize () - usage1);

  thread = gst_thread_new ("somethread");
  gst_object_ref (thread);
  gst_object_unref (thread);
  gst_object_unref (thread);
  gst_object_unref (thread);
  g_print ("craete/ref/destroy/unref/unref new thread %ld\n",
      vmsize () - usage1);

  for (i = 0; i < iters; i++) {
    thread = gst_thread_new ("somethread");
    gst_object_ref (thread);
    gst_object_unref (thread);
    gst_object_unref (thread);
    gst_object_unref (thread);
  }
  g_print ("craete/ref/destroy/unref/unref %d threads %ld\n", iters,
      vmsize () - usage1);

  for (i = 0; i < iters; i++) {
    thread = gst_thread_new ("somethread");
    gst_object_ref (thread);
    gst_element_set_name (thread, "testing123");
    gst_object_unref (thread);
    gst_element_set_name (thread, "testing123");
    gst_object_unref (thread);
    gst_object_unref (thread);
  }
  g_print ("craete/ref/destroy/unref/unref %d threads with name %ld\n", iters,
      vmsize () - usage1);

  thread = gst_thread_new ("somethread");
  for (i = 0; i < iters; i++) {
    gst_element_set_name (thread, "testing");
  }
  gst_object_unref (thread);
  g_print ("set name %d times %ld\n", iters, vmsize () - usage1);

  for (i = 0; i < iters; i++) {
    thread = gst_thread_new ("somethread");
    element = gst_element_new ();
    gst_element_set_name (element, "test1");
    gst_bin_add (GST_BIN (thread), element);
    gst_object_unref (thread);
  }
  g_print ("create/unref %d thread with one element %ld\n", iters,
      vmsize () - usage1);

  for (i = 0; i < iters; i++) {
    thread = create_thread ();
    gst_object_unref (thread);
  }
  g_print ("create/unref %d thread with children %ld\n", iters,
      vmsize () - usage1);

  for (i = 0; i < iters / 2; i++) {
    thread = create_thread_ghostpads ();
    gst_object_unref (thread);
  }
  g_print ("create/unref %d thread with children and ghostpads %ld\n",
      iters / 2, vmsize () - usage1);

  for (i = 0; i < iters; i++) {
    add_remove_test1 ();
  }
  g_print ("add/remove test1 %d in thread %ld\n", iters, vmsize () - usage1);

  for (i = 0; i < iters; i++) {
    add_remove_test2 ();
  }
  g_print ("add/remove test2 %d in thread %ld\n", iters, vmsize () - usage1);

  for (i = 0; i < iters; i++) {
    add_remove_test3 ();
  }
  g_print ("add/destroy/remove test3 %d in thread %ld\n", iters,
      vmsize () - usage1);

  for (i = 0; i < iters; i++) {
    add_remove_test4 ();
  }
  g_print ("add/destroy/remove test4 %d in thread %ld\n", iters,
      vmsize () - usage1);

  g_print ("leaked: %ld\n", vmsize () - usage1);

  return (vmsize () - usage1 ? -1 : 0);
}
Пример #16
0
/* build a pipeline equivalent to:
 *
 * gst-launch -v rtpbin name=rtpbin                                                \
 *      udpsrc caps=$AUDIO_CAPS port=5002 ! rtpbin.recv_rtp_sink_0              \
 *        rtpbin. ! rtppcmadepay ! alawdec ! audioconvert ! audioresample ! alsasink \
 *      udpsrc port=5003 ! rtpbin.recv_rtcp_sink_0                              \
 *        rtpbin.send_rtcp_src_0 ! udpsink port=5007 host=$DEST sync=false async=false
 */
int
main (int argc, char *argv[])
{
  GstElement *rtpbin, *rtpsrc, *rtcpsrc, *rtcpsink;
  GstElement *audiodepay, *audiodec, *audiores, *audioconv, *audiosink;
  GstElement *pipeline;
  GMainLoop *loop;
  GstCaps *caps;
  gboolean res;
  GstPadLinkReturn lres;
  GstPad *srcpad, *sinkpad;

  /* always init first */
  gst_init (&argc, &argv);

  /* the pipeline to hold everything */
  pipeline = gst_pipeline_new (NULL);
  g_assert (pipeline);

  /* the udp src and source we will use for RTP and RTCP */
  rtpsrc = gst_element_factory_make ("udpsrc", "rtpsrc");
  g_assert (rtpsrc);
  g_object_set (rtpsrc, "port", 5002, NULL);
  /* we need to set caps on the udpsrc for the RTP data */
  caps = gst_caps_from_string (AUDIO_CAPS);
  g_object_set (rtpsrc, "caps", caps, NULL);
  gst_caps_unref (caps);

  rtcpsrc = gst_element_factory_make ("udpsrc", "rtcpsrc");
  g_assert (rtcpsrc);
  g_object_set (rtcpsrc, "port", 5003, NULL);

  rtcpsink = gst_element_factory_make ("udpsink", "rtcpsink");
  g_assert (rtcpsink);
  g_object_set (rtcpsink, "port", 5007, "host", DEST_HOST, NULL);
  /* no need for synchronisation or preroll on the RTCP sink */
  g_object_set (rtcpsink, "async", FALSE, "sync", FALSE, NULL);

  gst_bin_add_many (GST_BIN (pipeline), rtpsrc, rtcpsrc, rtcpsink, NULL);

  /* the depayloading and decoding */
  audiodepay = gst_element_factory_make (AUDIO_DEPAY, "audiodepay");
  g_assert (audiodepay);
  audiodec = gst_element_factory_make (AUDIO_DEC, "audiodec");
  g_assert (audiodec);
  /* the audio playback and format conversion */
  audioconv = gst_element_factory_make ("audioconvert", "audioconv");
  g_assert (audioconv);
  audiores = gst_element_factory_make ("audioresample", "audiores");
  g_assert (audiores);
  audiosink = gst_element_factory_make (AUDIO_SINK, "audiosink");
  g_assert (audiosink);

  /* add depayloading and playback to the pipeline and link */
  gst_bin_add_many (GST_BIN (pipeline), audiodepay, audiodec, audioconv,
      audiores, audiosink, NULL);

  res = gst_element_link_many (audiodepay, audiodec, audioconv, audiores,
      audiosink, NULL);
  g_assert (res == TRUE);

  /* the rtpbin element */
  rtpbin = gst_element_factory_make ("rtpbin", "rtpbin");
  g_assert (rtpbin);

  gst_bin_add (GST_BIN (pipeline), rtpbin);

  /* now link all to the rtpbin, start by getting an RTP sinkpad for session 0 */
  srcpad = gst_element_get_static_pad (rtpsrc, "src");
  sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtp_sink_0");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (srcpad);

  /* get an RTCP sinkpad in session 0 */
  srcpad = gst_element_get_static_pad (rtcpsrc, "src");
  sinkpad = gst_element_get_request_pad (rtpbin, "recv_rtcp_sink_0");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (srcpad);
  gst_object_unref (sinkpad);

  /* get an RTCP srcpad for sending RTCP back to the sender */
  srcpad = gst_element_get_request_pad (rtpbin, "send_rtcp_src_0");
  sinkpad = gst_element_get_static_pad (rtcpsink, "sink");
  lres = gst_pad_link (srcpad, sinkpad);
  g_assert (lres == GST_PAD_LINK_OK);
  gst_object_unref (sinkpad);

  /* the RTP pad that we have to connect to the depayloader will be created
   * dynamically so we connect to the pad-added signal, pass the depayloader as
   * user_data so that we can link to it. */
  g_signal_connect (rtpbin, "pad-added", G_CALLBACK (pad_added_cb), audiodepay);

  /* give some stats when we receive RTCP */
  g_signal_connect (rtpbin, "on-ssrc-active", G_CALLBACK (on_ssrc_active_cb),
      audiodepay);

  /* set the pipeline to playing */
  g_print ("starting receiver pipeline\n");
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* we need to run a GLib main loop to get the messages */
  loop = g_main_loop_new (NULL, FALSE);
  g_main_loop_run (loop);

  g_print ("stopping receiver pipeline\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  gst_object_unref (pipeline);

  return 0;
}
Пример #17
0
gint
main (gint argc, gchar * argv[])
{
  GstElement *pipeline;
  GstElement *bin1, *bin2;
  GstElement *fakesrc, *identity, *fakesink;

  gst_init (&argc, &argv);

  /*
   * +-pipeline----------------------------------------+
   * | +-bin2----------------------------------------+ |
   * | | +-bin1-----------------------+              | |
   * | | | +---------+   +----------+ | +----------+ | |
   * | | | | fakesrc |---| identity |---| fakesink | | |
   * | | | +---------+   +----------- | +----------+ | |
   * | | +----------------------------+              | |
   * | +---------------------------------------------+ |
   * +-------------------------------------------------+
   */

  pipeline = gst_pipeline_new ("pipeline");
  g_assert (pipeline);
  bin1 = gst_bin_new ("bin1");
  g_assert (bin1);
  bin2 = gst_bin_new ("bin2");
  g_assert (bin2);

  fakesrc = gst_element_factory_make ("fakesrc", "fakesrc");
  g_assert (fakesrc);
  g_object_set (G_OBJECT (fakesrc), "num_buffers", 5, NULL);
  identity = gst_element_factory_make ("identity", "identity");
  g_assert (identity);
  fakesink = gst_element_factory_make ("fakesink", "fakesink");
  g_assert (fakesink);

  gst_bin_add_many (GST_BIN (bin1), fakesrc, identity, NULL);
  g_assert (gst_element_link (fakesrc, identity));

  gst_bin_add_many (GST_BIN (bin2), bin1, fakesink, NULL);
  g_assert (gst_element_link (identity, fakesink));

  gst_bin_add (GST_BIN (pipeline), bin2);
  g_signal_connect (G_OBJECT (pipeline), "deep_notify",
      G_CALLBACK (gst_object_default_deep_notify), NULL);

  /* setting pipeline to READY should bring in all children to READY */
  gst_element_set_state (pipeline, GST_STATE_READY);
  g_assert (GST_STATE (bin1) == GST_STATE_READY);
  g_assert (GST_STATE (bin2) == GST_STATE_READY);
  g_assert (GST_STATE (fakesrc) == GST_STATE_READY);
  g_assert (GST_STATE (identity) == GST_STATE_READY);
  g_assert (GST_STATE (fakesink) == GST_STATE_READY);

  /* setting fakesink to PAUSED should not affect pipeline and bin2 */
  gst_element_set_state (fakesink, GST_STATE_PAUSED);
  g_assert (GST_STATE (bin1) == GST_STATE_READY);
  g_assert (GST_STATE (bin2) == GST_STATE_READY);
  g_assert (GST_STATE (fakesrc) == GST_STATE_READY);
  g_assert (GST_STATE (identity) == GST_STATE_READY);
  g_assert (GST_STATE (fakesink) == GST_STATE_READY);

  /* setting fakesrc to PAUSED should not affect bin1 */
  gst_element_set_state (fakesrc, GST_STATE_PAUSED);
  g_assert (GST_STATE (bin1) == GST_STATE_READY);
  g_assert (GST_STATE (bin2) == GST_STATE_READY);
  g_assert (GST_STATE (fakesrc) == GST_STATE_PAUSED);
  g_assert (GST_STATE (identity) == GST_STATE_READY);
  g_assert (GST_STATE (fakesink) == GST_STATE_READY);

  /* setting bin1 to PAUSED, even though it is already, should set
   * identity to PAUSED as well */
  gst_element_set_state (bin1, GST_STATE_PAUSED);
  gst_element_get_state (bin2, NULL, NULL, NULL);
  g_assert (GST_STATE (bin1) == GST_STATE_PAUSED);
  g_assert (GST_STATE (bin2) == GST_STATE_READY);
  g_assert (GST_STATE (fakesrc) == GST_STATE_PAUSED);
  g_assert (GST_STATE (identity) == GST_STATE_PAUSED);
  g_assert (GST_STATE (fakesink) == GST_STATE_PAUSED);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_usleep (1000000);

  g_print ("passed.\n");
  return 0;
}
Пример #18
0
static void
gst_insert_bin_do_change (GstInsertBin * self, GstPad * pad)
{
  struct ChangeData *data;

  GST_OBJECT_LOCK (self);

  if (!is_right_direction_for_block (pad)) {
    GST_WARNING_OBJECT (self, "Block pad does not have the expected direction");
    goto next;
  }

  while ((data = g_queue_pop_head (&self->priv->change_queue)) != NULL) {
    GstPad *peer = NULL;
    GstPad *other_peer = NULL;

    GST_OBJECT_UNLOCK (self);


    if (data->action == GST_INSERT_BIN_ACTION_ADD &&
        !validate_element (self, data->element))
      goto error;

    peer = gst_pad_get_peer (pad);

    if (peer == NULL) {
      GST_WARNING_OBJECT (self, "Blocked pad has no peer");
      goto error;
    }

    if (data->action == GST_INSERT_BIN_ACTION_ADD) {
      GstPad *srcpad = NULL, *sinkpad = NULL;
      GstPad *peersrcpad, *peersinkpad;

      /* First let's make sure we have the right pad */
      if (data->sibling) {
        GstElement *parent = NULL;
        GstPad *siblingpad;

        if ((gst_pad_get_direction (pad) == GST_PAD_SRC &&
                data->direction == DIRECTION_BEFORE) ||
            (gst_pad_get_direction (pad) == GST_PAD_SINK &&
                data->direction == DIRECTION_AFTER))
          siblingpad = peer;
        else
          siblingpad = pad;

        parent = gst_pad_get_parent_element (siblingpad);
        if (parent != NULL)
          gst_object_unref (parent);

        if (parent != data->sibling)
          goto retry;
      } else {
        GstObject *parent;
        GstPad *ghost;
        GstPad *proxypad;

        if (data->direction == DIRECTION_BEFORE) {
          ghost = self->priv->srcpad;
          if (gst_pad_get_direction (pad) == GST_PAD_SINK)
            proxypad = pad;
          else
            proxypad = peer;
        } else {
          ghost = self->priv->sinkpad;
          if (gst_pad_get_direction (pad) == GST_PAD_SINK)
            proxypad = peer;
          else
            proxypad = pad;
        }

        if (!GST_IS_PROXY_PAD (proxypad))
          goto retry;
        parent = gst_pad_get_parent (proxypad);
        if (!parent)
          goto retry;
        gst_object_unref (parent);
        if (GST_PAD_CAST (parent) != ghost)
          goto retry;
      }

      if (gst_pad_get_direction (pad) == GST_PAD_SRC) {
        peersrcpad = pad;
        peersinkpad = peer;
      } else {
        peersrcpad = peer;
        peersinkpad = pad;
      }

      if (GST_IS_PROXY_PAD (peersrcpad)) {
        GstObject *parent = gst_pad_get_parent (peersrcpad);

        if (GST_PAD_CAST (parent) == self->priv->sinkpad)
          peersrcpad = NULL;

        if (parent)
          gst_object_unref (parent);
      }

      if (GST_IS_PROXY_PAD (peersinkpad)) {
        GstObject *parent = gst_pad_get_parent (peersinkpad);

        if (GST_PAD_CAST (parent) == self->priv->srcpad)
          peersinkpad = NULL;

        if (parent)
          gst_object_unref (parent);
      }

      if (peersinkpad && peersrcpad) {
        gst_pad_unlink (peersrcpad, peersinkpad);
      } else {
        if (!peersinkpad)
          gst_ghost_pad_set_target (GST_GHOST_PAD (self->priv->srcpad), NULL);
        if (!peersrcpad)
          gst_ghost_pad_set_target (GST_GHOST_PAD (self->priv->sinkpad), NULL);
      }

      srcpad = get_single_pad (data->element, GST_PAD_SRC);
      sinkpad = get_single_pad (data->element, GST_PAD_SINK);

      if (srcpad == NULL || sinkpad == NULL) {
        GST_WARNING_OBJECT (self, "Can not get element src or sink pad");
        goto error;
      }

      if (!gst_bin_add (GST_BIN (self), data->element)) {
        GST_WARNING_OBJECT (self, "Can not add element to bin");
        goto error;
      }

      if (peersrcpad) {
        if (GST_PAD_LINK_FAILED (gst_pad_link (peersrcpad, sinkpad))) {
          GST_WARNING_OBJECT (self, "Can not link sibling's %s:%s pad"
              " to element's %s:%s pad", GST_DEBUG_PAD_NAME (peersrcpad),
              GST_DEBUG_PAD_NAME (sinkpad));
          goto error;
        }
      } else {
        if (!gst_ghost_pad_set_target (GST_GHOST_PAD (self->priv->sinkpad),
                sinkpad)) {
          GST_WARNING_OBJECT (self, "Can not set %s:%s as target for %s:%s",
              GST_DEBUG_PAD_NAME (sinkpad),
              GST_DEBUG_PAD_NAME (self->priv->sinkpad));
          goto error;
        }
      }

      if (peersinkpad) {
        if (GST_PAD_LINK_FAILED (gst_pad_link (srcpad, peersinkpad))) {
          GST_WARNING_OBJECT (self, "Can not link element's %s:%s pad"
              " to sibling's %s:%s pad", GST_DEBUG_PAD_NAME (srcpad),
              GST_DEBUG_PAD_NAME (peersinkpad));
          goto error;
        }
      } else {
        if (!gst_ghost_pad_set_target (GST_GHOST_PAD (self->priv->srcpad),
                srcpad)) {
          GST_WARNING_OBJECT (self, "Can not set %s:%s as target for %s:%s",
              GST_DEBUG_PAD_NAME (srcpad),
              GST_DEBUG_PAD_NAME (self->priv->srcpad));
          goto error;
        }
      }

      gst_object_unref (srcpad);
      gst_object_unref (sinkpad);

      if (!gst_element_sync_state_with_parent (data->element)) {
        GST_WARNING_OBJECT (self, "Can not sync element's state with parent");
        goto error;
      }
    } else {
      GstElement *parent = NULL;
      GstPad *other_pad;
      GstCaps *caps = NULL, *peercaps = NULL;
      gboolean can_intersect;
      gboolean success;

      parent = gst_pad_get_parent_element (peer);
      if (parent != NULL)
        gst_object_unref (parent);

      if (parent != data->element)
        goto retry;

      if (gst_pad_get_direction (peer) == GST_PAD_SRC)
        other_pad = get_single_pad (data->element, GST_PAD_SINK);
      else
        other_pad = get_single_pad (data->element, GST_PAD_SRC);

      if (!other_pad) {
        GST_WARNING_OBJECT (self, "Can not get element's other pad");
        goto error;
      }

      other_peer = gst_pad_get_peer (other_pad);
      gst_object_unref (other_pad);

      if (!other_peer) {
        GST_WARNING_OBJECT (self, "Can not get element's other peer");
        goto error;
      }

      /* Get the negotiated caps for the source pad peer,
       * because renegotiation while the pipeline is playing doesn't work
       * that fast.
       */
      if (gst_pad_get_direction (pad) == GST_PAD_SRC)
        caps = gst_pad_get_current_caps (pad);
      else
        peercaps = gst_pad_get_current_caps (other_peer);
      if (!caps)
        caps = gst_pad_query_caps (pad, NULL);
      if (!peercaps)
        peercaps = gst_pad_query_caps (other_peer, NULL);
      can_intersect = gst_caps_can_intersect (caps, peercaps);
      gst_caps_unref (caps);
      gst_caps_unref (peercaps);

      if (!can_intersect) {
        GST_WARNING_OBJECT (self, "Pads are incompatible without the element");
        goto error;
      }

      if (gst_pad_get_direction (other_peer) == GST_PAD_SRC &&
          gst_pad_is_active (other_peer)) {
        gulong probe_id;

        probe_id = gst_pad_add_probe (other_peer,
            GST_PAD_PROBE_TYPE_EVENT_DOWNSTREAM,
            wait_and_drop_eos_cb, NULL, NULL);
        gst_pad_send_event (peer, gst_event_new_eos ());
        gst_pad_remove_probe (other_peer, probe_id);
      }

      gst_element_set_locked_state (data->element, TRUE);
      gst_element_set_state (data->element, GST_STATE_NULL);
      if (!gst_bin_remove (GST_BIN (self), data->element)) {
        GST_WARNING_OBJECT (self, "Element removal rejected");
        goto error;
      }
      gst_element_set_locked_state (data->element, FALSE);

      if (gst_pad_get_direction (pad) == GST_PAD_SRC)
        success = GST_PAD_LINK_SUCCESSFUL (gst_pad_link_full (pad, other_peer,
                GST_PAD_LINK_CHECK_HIERARCHY |
                GST_PAD_LINK_CHECK_TEMPLATE_CAPS));
      else
        success = GST_PAD_LINK_SUCCESSFUL (gst_pad_link_full (other_peer, pad,
                GST_PAD_LINK_CHECK_HIERARCHY |
                GST_PAD_LINK_CHECK_TEMPLATE_CAPS));
      gst_object_unref (other_peer);
      other_peer = NULL;

      if (!success) {
        GST_ERROR_OBJECT (self, "Could not re-link after the element's"
            " removal");
        goto error;
      }
    }


    gst_insert_bin_change_data_complete (self, data, TRUE);
    gst_object_unref (peer);

    GST_OBJECT_LOCK (self);
    continue;
  done:
    if (other_peer != NULL)
      gst_object_unref (other_peer);

    if (peer != NULL)
      gst_object_unref (peer);
    break;
  retry:
    GST_OBJECT_LOCK (self);
    g_queue_push_head (&self->priv->change_queue, data);
    goto done;
  error:
    /* Handle error */
    gst_insert_bin_change_data_complete (self, data, FALSE);
    GST_OBJECT_LOCK (self);
    goto done;
  }

next:
  gst_insert_bin_block_pad_unlock (self);
}
Пример #19
0
// This function creates and initializes some internal variables, and returns a
// pointer to the element that should receive the data flow first
GstElement* MediaPlayerPrivateGStreamerBase::createVideoSink(GstElement* pipeline)
{
    if (!initializeGStreamer())
        return 0;

#if USE(NATIVE_FULLSCREEN_VIDEO)
    m_gstGWorld = GStreamerGWorld::createGWorld(pipeline);
    m_webkitVideoSink = webkitVideoSinkNew(m_gstGWorld.get());
#else
    UNUSED_PARAM(pipeline);
    m_webkitVideoSink = webkitVideoSinkNew();
#endif

    m_repaintHandler = g_signal_connect(m_webkitVideoSink.get(), "repaint-requested", G_CALLBACK(mediaPlayerPrivateRepaintCallback), this);

#if USE(NATIVE_FULLSCREEN_VIDEO)
    // Build a new video sink consisting of a bin containing a tee
    // (meant to distribute data to multiple video sinks) and our
    // internal video sink. For fullscreen we create an autovideosink
    // and initially block the data flow towards it and configure it

    m_videoSinkBin = gst_bin_new("video-sink");

    GstElement* videoTee = gst_element_factory_make("tee", "videoTee");
    GstElement* queue = gst_element_factory_make("queue", 0);

#ifdef GST_API_VERSION_1
    GRefPtr<GstPad> sinkPad = adoptGRef(gst_element_get_static_pad(videoTee, "sink"));
    GST_OBJECT_FLAG_SET(GST_OBJECT(sinkPad.get()), GST_PAD_FLAG_PROXY_ALLOCATION);
#endif

    gst_bin_add_many(GST_BIN(m_videoSinkBin.get()), videoTee, queue, NULL);

    // Link a new src pad from tee to queue1.
    gst_element_link_pads_full(videoTee, 0, queue, "sink", GST_PAD_LINK_CHECK_NOTHING);
#endif

    GstElement* actualVideoSink = 0;
    m_fpsSink = gst_element_factory_make("fpsdisplaysink", "sink");
    if (m_fpsSink) {
        // The verbose property has been added in -bad 0.10.22. Making
        // this whole code depend on it because we don't want
        // fpsdiplaysink to spit data on stdout.
        GstElementFactory* factory = GST_ELEMENT_FACTORY(GST_ELEMENT_GET_CLASS(m_fpsSink)->elementfactory);
        if (gst_plugin_feature_check_version(GST_PLUGIN_FEATURE(factory), 0, 10, 22)) {
            g_object_set(m_fpsSink, "silent", TRUE , NULL);

            // Turn off text overlay unless logging is enabled.
#if LOG_DISABLED
            g_object_set(m_fpsSink, "text-overlay", FALSE , NULL);
#else
            WTFLogChannel* channel = logChannelByName("Media");
            if (channel->state != WTFLogChannelOn)
                g_object_set(m_fpsSink, "text-overlay", FALSE , NULL);
#endif // LOG_DISABLED

            if (g_object_class_find_property(G_OBJECT_GET_CLASS(m_fpsSink), "video-sink")) {
                g_object_set(m_fpsSink, "video-sink", m_webkitVideoSink.get(), NULL);
#if USE(NATIVE_FULLSCREEN_VIDEO)
                gst_bin_add(GST_BIN(m_videoSinkBin.get()), m_fpsSink);
#endif
                actualVideoSink = m_fpsSink;
            } else
                m_fpsSink = 0;
        } else
            m_fpsSink = 0;
    }

    if (!m_fpsSink) {
#if USE(NATIVE_FULLSCREEN_VIDEO)
        gst_bin_add(GST_BIN(m_videoSinkBin.get()), m_webkitVideoSink.get());
#endif
        actualVideoSink = m_webkitVideoSink.get();
    }

    ASSERT(actualVideoSink);

#if USE(NATIVE_FULLSCREEN_VIDEO)
    // Faster elements linking.
    gst_element_link_pads_full(queue, "src", actualVideoSink, "sink", GST_PAD_LINK_CHECK_NOTHING);

    // Add a ghostpad to the bin so it can proxy to tee.
    GRefPtr<GstPad> pad = adoptGRef(gst_element_get_static_pad(videoTee, "sink"));
    gst_element_add_pad(m_videoSinkBin.get(), gst_ghost_pad_new("sink", pad.get()));

    // Set the bin as video sink of playbin.
    return m_videoSinkBin.get();
#else
    return actualVideoSink;
#endif
}
Пример #20
0
static void
kms_agnostic_bin2_link_to_tee (KmsAgnosticBin2 * self, GstPad * pad,
    GstElement * tee, GstCaps * caps)
{
  GstElement *queue = gst_element_factory_make ("queue", NULL);
  GstPad *target;
  GstProxyPad *proxy;

  gst_bin_add (GST_BIN (self), queue);
  gst_element_sync_state_with_parent (queue);

  if (!(gst_caps_is_any (caps) || gst_caps_is_empty (caps))
      && kms_utils_caps_are_raw (caps)) {
    GstElement *convert = kms_utils_create_convert_for_caps (caps);
    GstElement *rate = kms_utils_create_rate_for_caps (caps);
    GstElement *mediator = kms_utils_create_mediator_element (caps);

    if (kms_utils_caps_are_video (caps)) {
      g_object_set (queue, "leaky", 2, "max-size-time", LEAKY_TIME, NULL);
    }

    remove_element_on_unlinked (convert, "src", "sink");
    if (rate) {
      remove_element_on_unlinked (rate, "src", "sink");
    }
    remove_element_on_unlinked (mediator, "src", "sink");

    if (rate) {
      gst_bin_add (GST_BIN (self), rate);
    }

    gst_bin_add_many (GST_BIN (self), convert, mediator, NULL);

    gst_element_sync_state_with_parent (mediator);
    gst_element_sync_state_with_parent (convert);
    if (rate) {
      gst_element_sync_state_with_parent (rate);
    }

    if (rate) {
      gst_element_link_many (queue, rate, mediator, NULL);
    } else {
      gst_element_link (queue, mediator);
    }

    gst_element_link_many (mediator, convert, NULL);
    target = gst_element_get_static_pad (convert, "src");
  } else {
    target = gst_element_get_static_pad (queue, "src");
  }

  gst_ghost_pad_set_target (GST_GHOST_PAD (pad), target);

  proxy = gst_proxy_pad_get_internal (GST_PROXY_PAD (pad));
  gst_pad_set_query_function (GST_PAD_CAST (proxy),
      proxy_src_pad_query_function);
  g_object_unref (proxy);

  g_object_unref (target);
  link_element_to_tee (tee, queue);
}
Пример #21
0
/*
 * create_post_tee_bin
 *
 * The following chain is created after the tee for each output from the
 * source:
 *
 *    +-------+   +---------------------+   +-------+
 * ---+ queue +---+ conversion elements +---+ queue +---
 *    +-------+   +---------------------+   +-------+
 */
static GstElement *create_post_tee_bin(OwrMediaSource *media_source, GstElement *source_bin,
    GstCaps *caps, GstPad *ghostpad, guint source_id)
{
    OwrMediaType media_type;
    GstElement *post_tee_bin, *queue_pre, *queue_post, *capsfilter;
    GstPad *bin_pad, *queue_pre_pad, *srcpad;
    GSList *list = NULL;
    gchar *bin_name;

    bin_name = g_strdup_printf("source-post-tee-bin-%u", source_id);
    post_tee_bin = gst_bin_new(bin_name);
    if (!gst_bin_add(GST_BIN(source_bin), post_tee_bin)) {
        GST_ERROR("Failed to add %s to source bin", bin_name);
        g_free(bin_name);
        g_object_unref(post_tee_bin);
        post_tee_bin = NULL;
        goto done;
    }
    g_free(bin_name);
    gst_element_sync_state_with_parent(post_tee_bin);

    CREATE_ELEMENT_WITH_ID(queue_pre, "queue", "source-post-tee-queue", source_id);
    CREATE_ELEMENT_WITH_ID(capsfilter, "capsfilter", "source-output-capsfilter", source_id);
    list = g_slist_append(list, capsfilter);
    CREATE_ELEMENT_WITH_ID(queue_post, "queue", "source-output-queue", source_id);
    list = g_slist_append(list, queue_post);

    g_object_get(media_source, "media-type", &media_type, NULL);
    switch (media_type) {
    case OWR_MEDIA_TYPE_AUDIO:
        {
        GstElement *audioresample, *audioconvert;

        g_object_set(capsfilter, "caps", caps, NULL);

        CREATE_ELEMENT_WITH_ID(audioresample, "audioresample", "source-audio-resample", source_id);
        list = g_slist_prepend(list, audioresample);
        CREATE_ELEMENT_WITH_ID(audioconvert, "audioconvert", "source-audio-convert", source_id);
        list = g_slist_prepend(list, audioconvert);
        list = g_slist_prepend(list, queue_pre);

        gst_bin_add_many(GST_BIN(post_tee_bin),
            queue_pre, audioconvert, audioresample, capsfilter, queue_post, NULL);
        LINK_ELEMENTS(capsfilter, queue_post);
        LINK_ELEMENTS(audioresample, capsfilter);
        LINK_ELEMENTS(audioconvert, audioresample);
        LINK_ELEMENTS(queue_pre, audioconvert);

        break;
        }
    case OWR_MEDIA_TYPE_VIDEO:
        {
        GstElement *videorate, *videoscale, *videoconvert;
        GstCaps *source_caps;
        GstStructure *source_structure;
        gint fps_n = 0, fps_d = 1;

        source_caps = gst_caps_copy(caps);
        source_structure = gst_caps_get_structure(source_caps, 0);
        if (gst_structure_get_fraction(source_structure, "framerate", &fps_n, &fps_d))
            gst_structure_remove_field(source_structure, "framerate");
        g_object_set(capsfilter, "caps", source_caps, NULL);
        gst_caps_unref(source_caps);

        CREATE_ELEMENT_WITH_ID(videoconvert, VIDEO_CONVERT, "source-video-convert", source_id);
        list = g_slist_prepend(list, videoconvert);
        CREATE_ELEMENT_WITH_ID(videoscale, "videoscale", "source-video-scale", source_id);
        list = g_slist_prepend(list, videoscale);
        CREATE_ELEMENT_WITH_ID(videorate, "videorate", "source-video-rate", source_id);
        g_object_set(videorate, "drop-only", TRUE, "max-rate", fps_n / fps_d, NULL);
        list = g_slist_prepend(list, videorate);
        list = g_slist_prepend(list, queue_pre);

        gst_bin_add_many(GST_BIN(post_tee_bin),
            queue_pre, videorate, videoscale, videoconvert, capsfilter, queue_post, NULL);
        LINK_ELEMENTS(capsfilter, queue_post);
        LINK_ELEMENTS(videoconvert, capsfilter);
        LINK_ELEMENTS(videoscale, videoconvert);
        LINK_ELEMENTS(videorate, videoscale);
        LINK_ELEMENTS(queue_pre, videorate);

        break;
        }
    case OWR_MEDIA_TYPE_UNKNOWN:
    default:
        g_assert_not_reached();
        goto done;
    }

    srcpad = gst_element_get_static_pad(queue_post, "src");
    g_assert(srcpad);

    bin_pad = gst_ghost_pad_new("src", srcpad);
    gst_pad_set_active(bin_pad, TRUE);
    gst_element_add_pad(post_tee_bin, bin_pad);
    gst_object_unref(srcpad);

    gst_ghost_pad_set_target(GST_GHOST_PAD(ghostpad), bin_pad);
    gst_pad_set_active(ghostpad, TRUE);
    gst_element_add_pad(source_bin, ghostpad);

    g_slist_foreach(list, sync_to_parent, NULL);

    queue_pre_pad = gst_element_get_static_pad(queue_pre, "sink");
    g_assert(queue_pre_pad);

    bin_pad = gst_ghost_pad_new("sink", queue_pre_pad);
    gst_pad_set_active(bin_pad, TRUE);
    gst_element_add_pad(post_tee_bin, bin_pad);
    gst_object_unref(queue_pre_pad);

done:
    g_slist_free(list);
    list = NULL;

    return post_tee_bin;
}
Пример #22
0
void EntradaFitxer::crea(int k, GstElement *pipeline, QString nom_fitxer)
{
    //Elements de font d'entrada de fitxer
    QString sbin("bin_font%1"),  ssource("source_%1"), sdec("decoder%1"), svolumen_m("volumen_mix%1"), squeue("audio_queue%1");
    QString saconv("audio_conv_%1"), sabin("bin_audio_%1"), sconv("video_conv_%1"), ssink("video_sink_%1");

    //Creem entrada de fitxer i el decodebin, els afegim al pipeline i els linkem.
    bin_font = gst_bin_new ((char*)sbin.arg(k).toStdString().c_str());

    source = gst_element_factory_make ("filesrc",   (char*)ssource.arg(k).toStdString().c_str());
    dec = gst_element_factory_make ("decodebin2",   (char*)sdec.arg(k).toStdString().c_str());

    //Comprovem que s'han pogut crear tots els elements d'entrada
    if(!bin_font || !source || !dec){
      g_printerr ("Un dels elements de l'entrada de fitxer no s'ha pogut crear. Sortint.\n");
    }

    g_signal_connect (dec, "new-decoded-pad", G_CALLBACK (cb_newpad_audio), this);
    g_signal_connect (dec, "new-decoded-pad", G_CALLBACK (cb_newpad_video), this);
    gst_bin_add_many (GST_BIN (bin_font), source, dec, NULL);
    gst_element_link (source, dec);

    //Creem l'entrada d'àudio
    a.bin = gst_bin_new ((char*)sabin.arg(k).toStdString().c_str());
    conv_audio =    gst_element_factory_make("audioconvert",    (char*)saconv.arg(k).toStdString().c_str());
    audiopad =      gst_element_get_static_pad (conv_audio, "sink");
    a.queue_mix=    gst_element_factory_make("queue2",          (char*)squeue.arg(k).toStdString().c_str());
    a.volume_mix =  gst_element_factory_make("volume",          (char*)svolumen_m.arg(k).toStdString().c_str());

    //Comprovem que s'han pogut crear tots els elements d'entrada
    if(!a.bin || !conv_audio || !audiopad || !a.queue_mix || !a.volume_mix){
      g_printerr ("Un dels elements de l'entrada de fitxer d'àudio no s'ha pogut crear. Sortint.\n");
    }

    gst_bin_add_many (GST_BIN (a.bin), conv_audio, a.queue_mix, a.volume_mix, NULL);
    gst_element_link_many (conv_audio, a.queue_mix, a.volume_mix, NULL);
    gst_element_add_pad (a.bin, gst_ghost_pad_new ("sink", audiopad));
    gst_object_unref (audiopad);
    gst_bin_add (GST_BIN (bin_font), a.bin);

    //Creem l'entrada de vídeo
    v.creacomuns(k,"video_fitxer");
    v.creatransformadors(k);
    conv_video =    gst_element_factory_make ("ffmpegcolorspace",   (char*)sconv.arg(k).toStdString().c_str());
    videopad =      gst_element_get_static_pad (conv_video,         "sink");
    v.sink =        gst_element_factory_make ("xvimagesink",        (char*)ssink.arg(k).toStdString().c_str());

    //Comprovem que s'han pogut crear tots els elements d'entrada
    if( !videopad || !conv_video || !v.sink){
      g_printerr ("Un dels elements de l'entrada de fitxer de vídeo no s'ha pogut crear. Sortint.\n");
    }

    gst_bin_add_many (GST_BIN (v.bin), conv_video, v.tee, v.queue, v.scale, v.sink, v.queue_mix, v.color_conv, v.scale_mix, NULL);
    gst_element_link_many (conv_video, v.tee, v.queue, v.scale, v.sink, NULL);
    gst_element_add_pad (v.bin, gst_ghost_pad_new ("sink", videopad));
    gst_object_unref (videopad);
    gst_bin_add (GST_BIN (bin_font), v.bin);

    //Seleccionem el fitxer d'entrada
    const char *c_nom_fitxer = nom_fitxer.toStdString().c_str();
    g_object_set (G_OBJECT (source), "location", c_nom_fitxer, NULL);
    gst_element_set_state(v.sink, GST_STATE_READY);

    //Afegim el bin_video_pgm al pipeline
    gst_bin_add (GST_BIN (pipeline),bin_font);
}
Пример #23
0
void test_queries()
{
  GstBin *bin;
  GstElement *src, *sink;
  GstStateChangeReturn ret;
  GstPad *pad;
  GstQuery *dur, *pos;
	xmlfile = "gstquery_test_queries";
  std_log(LOG_FILENAME_LINE, "Test Started test_queries");
  fail_unless ((bin = (GstBin *) gst_pipeline_new (NULL)) != NULL,
      "Could not create pipeline");
  fail_unless ((src = gst_element_factory_make ("fakesrc", NULL)) != NULL,
      "Could not create fakesrc");
  g_object_set (src, "datarate", 200, "sizetype", 2, NULL);

  fail_unless ((sink = gst_element_factory_make ("fakesink", NULL)) != NULL,
      "Could not create fakesink");
  g_object_set (sink, "sync", TRUE, NULL);
  fail_unless ((dur = gst_query_new_duration (GST_FORMAT_BYTES)) != NULL,
      "Could not prepare duration query");
  fail_unless ((pos = gst_query_new_position (GST_FORMAT_BYTES)) != NULL,
      "Could not prepare position query");

  fail_unless (gst_bin_add (bin, src), "Could not add src to bin");
  fail_unless (gst_bin_add (bin, sink), "Could not add sink to bin");
  fail_unless (gst_element_link (src, sink), "could not link src and sink");

  ret = gst_element_set_state (GST_ELEMENT (bin), GST_STATE_PLAYING);
  fail_if (ret == GST_STATE_CHANGE_FAILURE, "Failed to set pipeline PLAYING");
  if (ret == GST_STATE_CHANGE_ASYNC)
    gst_element_get_state (GST_ELEMENT (bin), NULL, NULL, GST_CLOCK_TIME_NONE);

  /* Query the bin */
  fail_unless (gst_element_query (GST_ELEMENT (bin), pos),
      "Could not query pipeline position");
  fail_unless (gst_element_query (GST_ELEMENT (bin), dur),
      "Could not query pipeline duration");

  /* Query elements */
  fail_unless (gst_element_query (GST_ELEMENT (src), pos),
      "Could not query position of fakesrc");
  fail_unless (gst_element_query (GST_ELEMENT (src), pos),
      "Could not query duration of fakesrc");

  fail_unless (gst_element_query (GST_ELEMENT (sink), pos),
      "Could not query position of fakesink");
  fail_unless (gst_element_query (GST_ELEMENT (sink), pos),
      "Could not query duration of fakesink");

  /* Query pads */
  fail_unless ((pad = gst_element_get_pad (src, "src")) != NULL,
      "Could not get source pad of fakesrc");
  fail_unless (gst_pad_query (pad, pos),
      "Could not query position of fakesrc src pad");
  fail_unless (gst_pad_query (pad, dur),
      "Could not query duration of fakesrc src pad");
  gst_object_unref (pad);

  /* We don't query the sink pad of fakesink, it doesn't 
   * handle downstream queries atm, but it might later, who knows? */

  ret = gst_element_set_state (GST_ELEMENT (bin), GST_STATE_NULL);
  fail_if (ret == GST_STATE_CHANGE_FAILURE, "Failed to set pipeline NULL");
  if (ret == GST_STATE_CHANGE_ASYNC)
    gst_element_get_state (GST_ELEMENT (bin), NULL, NULL, GST_CLOCK_TIME_NONE);

  gst_query_unref (dur);
  gst_query_unref (pos);
  gst_object_unref (bin);
	std_log(LOG_FILENAME_LINE, "Test Successful");
	create_xml(0);

}
static void
pad_blocked_cb (GstPad * pad, gboolean blocked, GstPlaySinkAudioConvert * self)
{
  GstPad *peer;
  GstCaps *caps;
  gboolean raw;

  GST_PLAY_SINK_AUDIO_CONVERT_LOCK (self);
  self->sink_proxypad_blocked = blocked;
  GST_DEBUG_OBJECT (self, "Pad blocked: %d", blocked);
  if (!blocked)
    goto done;

  /* There must be a peer at this point */
  peer = gst_pad_get_peer (self->sinkpad);
  caps = gst_pad_get_negotiated_caps (peer);
  if (!caps)
    caps = gst_pad_get_caps_reffed (peer);
  gst_object_unref (peer);

  raw = is_raw_caps (caps);
  GST_DEBUG_OBJECT (self, "Caps %" GST_PTR_FORMAT " are raw: %d", caps, raw);
  gst_caps_unref (caps);

  if (raw == self->raw)
    goto unblock;
  self->raw = raw;

  if (raw) {
    GstBin *bin = GST_BIN_CAST (self);
    GstElement *head = NULL, *prev = NULL;
    GstPad *pad;

    GST_DEBUG_OBJECT (self, "Creating raw conversion pipeline");

    gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->sinkpad), NULL);
    gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), NULL);

    if (self->use_converters) {
      self->conv = gst_element_factory_make ("audioconvert", "conv");
      if (self->conv == NULL) {
        post_missing_element_message (self, "audioconvert");
        GST_ELEMENT_WARNING (self, CORE, MISSING_PLUGIN,
            (_("Missing element '%s' - check your GStreamer installation."),
                "audioconvert"), ("audio rendering might fail"));
      } else {
        gst_bin_add (bin, self->conv);
        gst_element_sync_state_with_parent (self->conv);
        distribute_running_time (self->conv, &self->segment);
        prev = head = self->conv;
      }

      self->resample = gst_element_factory_make ("audioresample", "resample");
      if (self->resample == NULL) {
        post_missing_element_message (self, "audioresample");
        GST_ELEMENT_WARNING (self, CORE, MISSING_PLUGIN,
            (_("Missing element '%s' - check your GStreamer installation."),
                "audioresample"), ("possibly a liboil version mismatch?"));
      } else {
        gst_bin_add (bin, self->resample);
        gst_element_sync_state_with_parent (self->resample);
        distribute_running_time (self->resample, &self->segment);
        if (prev) {
          if (!gst_element_link_pads_full (prev, "src", self->resample, "sink",
                  GST_PAD_LINK_CHECK_TEMPLATE_CAPS))
            goto link_failed;
        } else {
          head = self->resample;
        }
        prev = self->resample;
      }
    }

    if (self->use_volume && self->volume) {
      gst_bin_add (bin, gst_object_ref (self->volume));
      gst_element_sync_state_with_parent (self->volume);
      distribute_running_time (self->volume, &self->segment);
      if (prev) {
        if (!gst_element_link_pads_full (prev, "src", self->volume, "sink",
                GST_PAD_LINK_CHECK_TEMPLATE_CAPS))
          goto link_failed;
      } else {
        head = self->volume;
      }
      prev = self->volume;
    }

    if (head) {
      pad = gst_element_get_static_pad (head, "sink");
      gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->sinkpad), pad);
      gst_object_unref (pad);
    }

    if (prev) {
      pad = gst_element_get_static_pad (prev, "src");
      gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), pad);
      gst_object_unref (pad);
    }

    if (!head && !prev) {
      gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad),
          self->sink_proxypad);
    }

    GST_DEBUG_OBJECT (self, "Raw conversion pipeline created");
  } else {
    GstBin *bin = GST_BIN_CAST (self);

    GST_DEBUG_OBJECT (self, "Removing raw conversion pipeline");

    gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->sinkpad), NULL);
    gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad), NULL);

    if (self->conv) {
      gst_element_set_state (self->conv, GST_STATE_NULL);
      gst_bin_remove (bin, self->conv);
      self->conv = NULL;
    }
    if (self->resample) {
      gst_element_set_state (self->resample, GST_STATE_NULL);
      gst_bin_remove (bin, self->resample);
      self->resample = NULL;
    }
    if (self->volume) {
      gst_element_set_state (self->volume, GST_STATE_NULL);
      if (GST_OBJECT_PARENT (self->volume) == GST_OBJECT_CAST (self)) {
        gst_bin_remove (GST_BIN_CAST (self), self->volume);
      }
    }

    gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad),
        self->sink_proxypad);

    GST_DEBUG_OBJECT (self, "Raw conversion pipeline removed");
  }

unblock:
  gst_pad_set_blocked_async_full (self->sink_proxypad, FALSE,
      (GstPadBlockCallback) pad_blocked_cb, gst_object_ref (self),
      (GDestroyNotify) gst_object_unref);

done:
  GST_PLAY_SINK_AUDIO_CONVERT_UNLOCK (self);
  return;

link_failed:
  {
    GST_ELEMENT_ERROR (self, CORE, PAD,
        (NULL), ("Failed to configure the audio converter."));
    gst_ghost_pad_set_target (GST_GHOST_PAD_CAST (self->srcpad),
        self->sink_proxypad);
    gst_pad_set_blocked_async_full (self->sink_proxypad, FALSE,
        (GstPadBlockCallback) pad_blocked_cb, gst_object_ref (self),
        (GDestroyNotify) gst_object_unref);
    return;
  }
}
Пример #25
0
static GstElement *
gst_auto_convert_add_element (GstAutoConvert * autoconvert,
    GstElementFactory * factory)
{
  GstElement *element = NULL;
  GstPad *internal_sinkpad = NULL;
  GstPad *internal_srcpad = NULL;
  GstPad *sinkpad;
  GstPad *srcpad;
  GstPadLinkReturn padlinkret;

  GST_DEBUG_OBJECT (autoconvert, "Adding element %s to the autoconvert bin",
      gst_plugin_feature_get_name (GST_PLUGIN_FEATURE (factory)));

  element = gst_element_factory_create (factory, NULL);
  if (!element)
    return NULL;

  if (!gst_bin_add (GST_BIN (autoconvert), element)) {
    GST_ERROR_OBJECT (autoconvert, "Could not add element %s to the bin",
        GST_OBJECT_NAME (element));
    gst_object_unref (element);
    return NULL;
  }

  srcpad = get_pad_by_direction (element, GST_PAD_SRC);
  if (!srcpad) {
    GST_ERROR_OBJECT (autoconvert, "Could not find source in %s",
        GST_OBJECT_NAME (element));
    goto error;
  }

  sinkpad = get_pad_by_direction (element, GST_PAD_SINK);
  if (!sinkpad) {
    GST_ERROR_OBJECT (autoconvert, "Could not find sink in %s",
        GST_OBJECT_NAME (element));
    goto error;
  }

  internal_sinkpad =
      gst_pad_new_from_static_template (&sink_internal_template,
      "sink_internal");
  internal_srcpad =
      gst_pad_new_from_static_template (&src_internal_template, "src_internal");

  if (!internal_sinkpad || !internal_srcpad) {
    GST_ERROR_OBJECT (autoconvert, "Could not create internal pads");
    goto error;
  }

  g_object_weak_ref (G_OBJECT (element), (GWeakNotify) gst_object_unref,
      internal_sinkpad);
  g_object_weak_ref (G_OBJECT (element), (GWeakNotify) gst_object_unref,
      internal_srcpad);

  gst_pad_set_active (internal_sinkpad, TRUE);
  gst_pad_set_active (internal_srcpad, TRUE);

  g_object_set_qdata (G_OBJECT (internal_srcpad), parent_quark, autoconvert);
  g_object_set_qdata (G_OBJECT (internal_sinkpad), parent_quark, autoconvert);

  gst_pad_set_chain_function (internal_sinkpad,
      GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_chain));
  gst_pad_set_event_function (internal_sinkpad,
      GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_event));
  gst_pad_set_query_function (internal_sinkpad,
      GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_query));
  gst_pad_set_query_type_function (internal_sinkpad,
      GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_query_type));
  gst_pad_set_getcaps_function (internal_sinkpad,
      GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_getcaps));
  gst_pad_set_bufferalloc_function (internal_sinkpad,
      GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_buffer_alloc));
  gst_pad_set_fixatecaps_function (internal_sinkpad,
      GST_DEBUG_FUNCPTR (gst_auto_convert_internal_sink_fixatecaps));

  gst_pad_set_event_function (internal_srcpad,
      GST_DEBUG_FUNCPTR (gst_auto_convert_internal_src_event));
  gst_pad_set_query_function (internal_srcpad,
      GST_DEBUG_FUNCPTR (gst_auto_convert_internal_src_query));
  gst_pad_set_query_type_function (internal_srcpad,
      GST_DEBUG_FUNCPTR (gst_auto_convert_internal_src_query_type));

  padlinkret = gst_pad_link (internal_srcpad, sinkpad);
  if (GST_PAD_LINK_FAILED (padlinkret)) {
    GST_WARNING_OBJECT (autoconvert, "Could not links pad %s:%s to %s:%s"
        " for reason %d",
        GST_DEBUG_PAD_NAME (internal_srcpad),
        GST_DEBUG_PAD_NAME (sinkpad), padlinkret);
    goto error;
  }

  padlinkret = gst_pad_link (srcpad, internal_sinkpad);
  if (GST_PAD_LINK_FAILED (padlinkret)) {
    GST_WARNING_OBJECT (autoconvert, "Could not links pad %s:%s to %s:%s"
        " for reason %d",
        GST_DEBUG_PAD_NAME (internal_srcpad),
        GST_DEBUG_PAD_NAME (sinkpad), padlinkret);
    goto error;
  }

  g_object_set_qdata (G_OBJECT (element),
      internal_srcpad_quark, internal_srcpad);
  g_object_set_qdata (G_OBJECT (element),
      internal_sinkpad_quark, internal_sinkpad);

  /* Iffy */
  gst_element_sync_state_with_parent (element);

  /* Increment the reference count we will return to the caller */
  gst_object_ref (element);

  return element;

error:
  gst_bin_remove (GST_BIN (autoconvert), element);

  return NULL;
}
Пример #26
0
static void
brasero_transcode_new_decoded_pad_cb (GstElement *decode,
				      GstPad *pad,
				      gboolean arg2,
				      BraseroTranscode *transcode)
{
	GstCaps *caps;
	GstStructure *structure;
	BraseroTranscodePrivate *priv;

	priv = BRASERO_TRANSCODE_PRIVATE (transcode);

	BRASERO_JOB_LOG (transcode, "New pad");

	/* make sure we only have audio */
	caps = gst_pad_get_caps (pad);
	if (!caps)
		return;

	structure = gst_caps_get_structure (caps, 0);
	if (structure) {
		if (g_strrstr (gst_structure_get_name (structure), "audio")) {
			GstPad *sink;
			GstElement *queue;
			GstPadLinkReturn res;

			/* before linking pads (before any data reach grvolume), send tags */
			brasero_transcode_send_volume_event (transcode);

			/* This is necessary in case there is a video stream
			 * (see brasero-metadata.c). we need to queue to avoid
			 * a deadlock. */
			queue = gst_element_factory_make ("queue", NULL);
			gst_bin_add (GST_BIN (priv->pipeline), queue);
			if (!gst_element_link (queue, priv->link)) {
				brasero_transcode_error_on_pad_linking (transcode, "Sent by brasero_transcode_new_decoded_pad_cb");
				goto end;
			}

			sink = gst_element_get_pad (queue, "sink");
			if (GST_PAD_IS_LINKED (sink)) {
				brasero_transcode_error_on_pad_linking (transcode, "Sent by brasero_transcode_new_decoded_pad_cb");
				goto end;
			}

			res = gst_pad_link (pad, sink);
			if (res == GST_PAD_LINK_OK)
				gst_element_set_state (queue, GST_STATE_PLAYING);
			else
				brasero_transcode_error_on_pad_linking (transcode, "Sent by brasero_transcode_new_decoded_pad_cb");

			gst_object_unref (sink);
		}
		/* For video streams add a fakesink (see brasero-metadata.c) */
		else if (g_strrstr (gst_structure_get_name (structure), "video")) {
			GstPad *sink;
			GstElement *fakesink;
			GstPadLinkReturn res;

			BRASERO_JOB_LOG (transcode, "Adding a fakesink for video stream");

			fakesink = gst_element_factory_make ("fakesink", NULL);
			if (!fakesink) {
				brasero_transcode_error_on_pad_linking (transcode, "Sent by brasero_transcode_new_decoded_pad_cb");
				goto end;
			}

			sink = gst_element_get_static_pad (fakesink, "sink");
			if (!sink) {
				brasero_transcode_error_on_pad_linking (transcode, "Sent by brasero_transcode_new_decoded_pad_cb");
				gst_object_unref (fakesink);
				goto end;
			}

			gst_bin_add (GST_BIN (priv->pipeline), fakesink);
			res = gst_pad_link (pad, sink);

			if (res == GST_PAD_LINK_OK)
				gst_element_set_state (fakesink, GST_STATE_PLAYING);
			else
				brasero_transcode_error_on_pad_linking (transcode, "Sent by brasero_transcode_new_decoded_pad_cb");

			gst_object_unref (sink);
		}
	}

end:
	gst_caps_unref (caps);
}
Пример #27
0
static void
fs_rtp_sub_stream_constructed (GObject *object)
{
  FsRtpSubStream *self = FS_RTP_SUB_STREAM (object);
  GstPad *valve_sink_pad = NULL;
  GstPadLinkReturn linkret;
  gchar *tmp;

  GST_DEBUG ("New substream in session %u for ssrc %x and pt %u",
      self->priv->session->id, self->ssrc, self->pt);

  if (!self->priv->conference) {
    self->priv->construction_error = g_error_new (FS_ERROR,
      FS_ERROR_INVALID_ARGUMENTS, "A Substream needs a conference object");
    return;
  }

  self->priv->rtpbin_unlinked_sig = g_signal_connect_object (
      self->priv->rtpbin_pad, "unlinked", G_CALLBACK (rtpbin_pad_unlinked),
      self, 0);

  tmp = g_strdup_printf ("output_recv_valve_%d_%d_%d", self->priv->session->id,
      self->ssrc, self->pt);
  self->priv->output_valve = gst_element_factory_make ("valve", tmp);
  g_free (tmp);

  if (!self->priv->output_valve) {
    self->priv->construction_error = g_error_new (FS_ERROR,
      FS_ERROR_CONSTRUCTION, "Could not create a valve element for"
      " session substream with ssrc: %u and pt:%d", self->ssrc,
      self->pt);
    return;
  }

  if (!gst_bin_add (GST_BIN (self->priv->conference), self->priv->output_valve))
  {
    self->priv->construction_error = g_error_new (FS_ERROR,
      FS_ERROR_CONSTRUCTION, "Could not add the valve element for session"
      " substream with ssrc: %u and pt:%d to the conference bin",
      self->ssrc, self->pt);
    return;
  }

  /* We set the valve to dropping, the stream will unblock it when its linked */
  g_object_set (self->priv->output_valve, "drop", TRUE, NULL);

  if (gst_element_set_state (self->priv->output_valve, GST_STATE_PLAYING) ==
    GST_STATE_CHANGE_FAILURE) {
    self->priv->construction_error = g_error_new (FS_ERROR,
      FS_ERROR_CONSTRUCTION, "Could not set the valve element for session"
      " substream with ssrc: %u and pt:%d to the playing state",
      self->ssrc, self->pt);
    return;
  }

  tmp = g_strdup_printf ("recv_capsfilter_%d_%d_%d", self->priv->session->id,
      self->ssrc, self->pt);
  self->priv->capsfilter = gst_element_factory_make ("capsfilter", tmp);
  g_free (tmp);

  if (!self->priv->capsfilter) {
    self->priv->construction_error = g_error_new (FS_ERROR,
      FS_ERROR_CONSTRUCTION, "Could not create a capsfilter element for"
      " session substream with ssrc: %u and pt:%d", self->ssrc,
      self->pt);
    return;
  }

  if (!gst_bin_add (GST_BIN (self->priv->conference), self->priv->capsfilter)) {
    self->priv->construction_error = g_error_new (FS_ERROR,
      FS_ERROR_CONSTRUCTION, "Could not add the capsfilter element for session"
      " substream with ssrc: %u and pt:%d to the conference bin",
      self->ssrc, self->pt);
    return;
  }

  if (gst_element_set_state (self->priv->capsfilter, GST_STATE_PLAYING) ==
    GST_STATE_CHANGE_FAILURE) {
    self->priv->construction_error = g_error_new (FS_ERROR,
      FS_ERROR_CONSTRUCTION, "Could not set the capsfilter element for session"
      " substream with ssrc: %u and pt:%d to the playing state",
      self->ssrc, self->pt);
    return;
  }

  tmp = g_strdup_printf ("input_recv_valve_%d_%d_%d", self->priv->session->id,
      self->ssrc, self->pt);
  self->priv->input_valve = gst_element_factory_make ("valve", tmp);
  g_free (tmp);

  if (!self->priv->input_valve) {
    self->priv->construction_error = g_error_new (FS_ERROR,
      FS_ERROR_CONSTRUCTION, "Could not create a valve element for"
      " session substream with ssrc: %u and pt:%d", self->ssrc,
      self->pt);
    return;
  }

  if (!gst_bin_add (GST_BIN (self->priv->conference), self->priv->input_valve))
  {
    self->priv->construction_error = g_error_new (FS_ERROR,
      FS_ERROR_CONSTRUCTION, "Could not add the valve element for session"
      " substream with ssrc: %u and pt:%d to the conference bin",
      self->ssrc, self->pt);
    return;
  }

  if (gst_element_set_state (self->priv->input_valve, GST_STATE_PLAYING) ==
    GST_STATE_CHANGE_FAILURE) {
    self->priv->construction_error = g_error_new (FS_ERROR,
      FS_ERROR_CONSTRUCTION, "Could not set the valve element for session"
      " substream with ssrc: %u and pt:%d to the playing state",
      self->ssrc, self->pt);
    return;
  }

  if (!gst_element_link (self->priv->input_valve, self->priv->capsfilter))
  {
    self->priv->construction_error = g_error_new (FS_ERROR,
        FS_ERROR_CONSTRUCTION, "Could not link the input valve"
        " and the capsfilter");
    return;
  }

  valve_sink_pad = gst_element_get_static_pad (self->priv->input_valve,
      "sink");
  if (!valve_sink_pad)
  {
    self->priv->construction_error = g_error_new (FS_ERROR,
        FS_ERROR_CONSTRUCTION,
        "Could not get the valve's sink pad");
    return;
  }

  linkret = gst_pad_link (self->priv->rtpbin_pad, valve_sink_pad);

  gst_object_unref (valve_sink_pad);

  if (GST_PAD_LINK_FAILED (linkret))
  {
    self->priv->construction_error = g_error_new (FS_ERROR,
        FS_ERROR_CONSTRUCTION,
        "Could not link the rtpbin to the codec bin (%d)", linkret);
    return;
  }

  if (self->no_rtcp_timeout > 0)
    if (!fs_rtp_sub_stream_start_no_rtcp_timeout_thread (self,
            &self->priv->construction_error))
      return;

  GST_CALL_PARENT (G_OBJECT_CLASS, constructed, (object));
}
Пример #28
0
static gboolean
brasero_transcode_create_pipeline (BraseroTranscode *transcode,
				   GError **error)
{
	gchar *uri;
	gboolean keep_dts;
	GstElement *decode;
	GstElement *source;
	GstBus *bus = NULL;
	GstCaps *filtercaps;
	GValue *value = NULL;
	GstElement *pipeline;
	GstElement *sink = NULL;
	BraseroJobAction action;
	GstElement *filter = NULL;
	GstElement *volume = NULL;
	GstElement *convert = NULL;
	BraseroTrack *track = NULL;
	GstElement *resample = NULL;
	BraseroTranscodePrivate *priv;

	priv = BRASERO_TRANSCODE_PRIVATE (transcode);

	BRASERO_JOB_LOG (transcode, "Creating new pipeline");

	priv->set_active_state = 0;

	/* free the possible current pipeline and create a new one */
	if (priv->pipeline) {
		gst_element_set_state (priv->pipeline, GST_STATE_NULL);
		gst_object_unref (G_OBJECT (priv->pipeline));
		priv->link = NULL;
		priv->sink = NULL;
		priv->source = NULL;
		priv->convert = NULL;
		priv->pipeline = NULL;
	}

	/* create three types of pipeline according to the needs: (possibly adding grvolume)
	 * - filesrc ! decodebin ! audioconvert ! fakesink (find size) and filesrc!mp3parse!fakesink for mp3s
	 * - filesrc ! decodebin ! audioresample ! audioconvert ! audio/x-raw-int,rate=44100,width=16,depth=16,endianness=4321,signed ! filesink
	 * - filesrc ! decodebin ! audioresample ! audioconvert ! audio/x-raw-int,rate=44100,width=16,depth=16,endianness=4321,signed ! fdsink
	 */
	pipeline = gst_pipeline_new (NULL);

	bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
	gst_bus_add_watch (bus,
			   (GstBusFunc) brasero_transcode_bus_messages,
			   transcode);
	gst_object_unref (bus);

	/* source */
	brasero_job_get_current_track (BRASERO_JOB (transcode), &track);
	uri = brasero_track_stream_get_source (BRASERO_TRACK_STREAM (track), TRUE);
	source = gst_element_make_from_uri (GST_URI_SRC, uri, NULL);
	g_free (uri);

	if (source == NULL) {
		g_set_error (error,
			     BRASERO_BURN_ERROR,
			     BRASERO_BURN_ERROR_GENERAL,
			     /* Translators: %s is the name of the object (as in
			      * GObject) from the Gstreamer library that could
			      * not be created */
			     _("%s element could not be created"),
			     "\"Source\"");
		goto error;
	}
	gst_bin_add (GST_BIN (pipeline), source);
	g_object_set (source,
		      "typefind", FALSE,
		      NULL);

	/* sink */
	brasero_job_get_action (BRASERO_JOB (transcode), &action);
	switch (action) {
	case BRASERO_JOB_ACTION_SIZE:
		if (priv->mp3_size_pipeline)
			return brasero_transcode_create_pipeline_size_mp3 (transcode, pipeline, source, error);

		sink = gst_element_factory_make ("fakesink", NULL);
		break;

	case BRASERO_JOB_ACTION_IMAGE:
		volume = brasero_transcode_create_volume (transcode, track);

		if (brasero_job_get_fd_out (BRASERO_JOB (transcode), NULL) != BRASERO_BURN_OK) {
			gchar *output;

			brasero_job_get_image_output (BRASERO_JOB (transcode),
						      &output,
						      NULL);
			sink = gst_element_factory_make ("filesink", NULL);
			g_object_set (sink,
				      "location", output,
				      NULL);
			g_free (output);
		}
		else {
			int fd;

			brasero_job_get_fd_out (BRASERO_JOB (transcode), &fd);
			sink = gst_element_factory_make ("fdsink", NULL);
			g_object_set (sink,
				      "fd", fd,
				      NULL);
		}
		break;

	default:
		goto error;
	}

	if (!sink) {
		g_set_error (error,
			     BRASERO_BURN_ERROR,
			     BRASERO_BURN_ERROR_GENERAL,
			     _("%s element could not be created"),
			     "\"Sink\"");
		goto error;
	}

	gst_bin_add (GST_BIN (pipeline), sink);
	g_object_set (sink,
		      "sync", FALSE,
		      NULL);

	brasero_job_tag_lookup (BRASERO_JOB (transcode),
				BRASERO_SESSION_STREAM_AUDIO_FORMAT,
				&value);
	if (value)
		keep_dts = (g_value_get_int (value) & BRASERO_AUDIO_FORMAT_DTS) != 0;
	else
		keep_dts = FALSE;

	if (keep_dts
	&&  action == BRASERO_JOB_ACTION_IMAGE
	&& (brasero_track_stream_get_format (BRASERO_TRACK_STREAM (track)) & BRASERO_AUDIO_FORMAT_DTS) != 0) {
		GstElement *wavparse;
		GstPad *sinkpad;

		BRASERO_JOB_LOG (transcode, "DTS wav pipeline");

		/* FIXME: volume normalization won't work here. We'd need to 
		 * reencode it afterwards otherwise. */
		/* This is a special case. This is DTS wav. So we only decode wav. */
		wavparse = gst_element_factory_make ("wavparse", NULL);
		if (wavparse == NULL) {
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
				     _("%s element could not be created"),
				     "\"Wavparse\"");
			goto error;
		}
		gst_bin_add (GST_BIN (pipeline), wavparse);

		if (!gst_element_link (source, wavparse)) {
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
			             _("Impossible to link plugin pads"));
			goto error;
		}

		g_signal_connect (wavparse,
		                  "pad-added",
		                  G_CALLBACK (brasero_transcode_wavparse_pad_added_cb),
		                  transcode);

		/* This is an ugly workaround for the lack of accuracy with
		 * gstreamer. Yet this is unfortunately a necessary evil. */
		priv->pos = 0;
		priv->size = 0;
		sinkpad = gst_element_get_pad (sink, "sink");
		priv->probe = gst_pad_add_buffer_probe (sinkpad,
							G_CALLBACK (brasero_transcode_buffer_handler),
							transcode);
		gst_object_unref (sinkpad);


		priv->link = NULL;
		priv->sink = sink;
		priv->decode = NULL;
		priv->source = source;
		priv->convert = NULL;
		priv->pipeline = pipeline;

		gst_element_set_state (pipeline, GST_STATE_PLAYING);
		return TRUE;
	}

	/* audioconvert */
	convert = gst_element_factory_make ("audioconvert", NULL);
	if (convert == NULL) {
		g_set_error (error,
			     BRASERO_BURN_ERROR,
			     BRASERO_BURN_ERROR_GENERAL,
			     _("%s element could not be created"),
			     "\"Audioconvert\"");
		goto error;
	}
	gst_bin_add (GST_BIN (pipeline), convert);

	if (action == BRASERO_JOB_ACTION_IMAGE) {
		BraseroStreamFormat session_format;
		BraseroTrackType *output_type;

		output_type = brasero_track_type_new ();
		brasero_job_get_output_type (BRASERO_JOB (transcode), output_type);
		session_format = brasero_track_type_get_stream_format (output_type);
		brasero_track_type_free (output_type);

		/* audioresample */
		resample = gst_element_factory_make ("audioresample", NULL);
		if (resample == NULL) {
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
				     _("%s element could not be created"),
				     "\"Audioresample\"");
			goto error;
		}
		gst_bin_add (GST_BIN (pipeline), resample);

		/* filter */
		filter = gst_element_factory_make ("capsfilter", NULL);
		if (!filter) {
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
				     _("%s element could not be created"),
				     "\"Filter\"");
			goto error;
		}
		gst_bin_add (GST_BIN (pipeline), filter);
		filtercaps = gst_caps_new_full (gst_structure_new ("audio/x-raw-int",
								   "channels", G_TYPE_INT, 2,
								   "width", G_TYPE_INT, 16,
								   "depth", G_TYPE_INT, 16,
								   /* NOTE: we use little endianness only for libburn which requires little */
								   "endianness", G_TYPE_INT, (session_format & BRASERO_AUDIO_FORMAT_RAW_LITTLE_ENDIAN) != 0 ? 1234:4321,
								   "rate", G_TYPE_INT, 44100,
								   "signed", G_TYPE_BOOLEAN, TRUE,
								   NULL),
						NULL);
		g_object_set (GST_OBJECT (filter), "caps", filtercaps, NULL);
		gst_caps_unref (filtercaps);
	}

	/* decode */
	decode = gst_element_factory_make ("decodebin", NULL);
	if (decode == NULL) {
		g_set_error (error,
			     BRASERO_BURN_ERROR,
			     BRASERO_BURN_ERROR_GENERAL,
			     _("%s element could not be created"),
			     "\"Decodebin\"");
		goto error;
	}
	gst_bin_add (GST_BIN (pipeline), decode);

	if (action == BRASERO_JOB_ACTION_IMAGE) {
		GstPad *sinkpad;
		gboolean res;

		if (!gst_element_link (source, decode)) {
			BRASERO_JOB_LOG (transcode, "Impossible to link plugin pads");
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
			             _("Impossible to link plugin pads"));
			goto error;
		}

		priv->link = resample;
		g_signal_connect (G_OBJECT (decode),
				  "new-decoded-pad",
				  G_CALLBACK (brasero_transcode_new_decoded_pad_cb),
				  transcode);

		if (volume) {
			gst_bin_add (GST_BIN (pipeline), volume);
			res = gst_element_link_many (resample,
			                             volume,
						     convert,
			                             filter,
			                             sink,
			                             NULL);
		}
		else
			res = gst_element_link_many (resample,
			                             convert,
			                             filter,
			                             sink,
			                             NULL);

		if (!res) {
			BRASERO_JOB_LOG (transcode, "Impossible to link plugin pads");
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
				     _("Impossible to link plugin pads"));
			goto error;
		}

		/* This is an ugly workaround for the lack of accuracy with
		 * gstreamer. Yet this is unfortunately a necessary evil. */
		priv->pos = 0;
		priv->size = 0;
		sinkpad = gst_element_get_pad (sink, "sink");
		priv->probe = gst_pad_add_buffer_probe (sinkpad,
							G_CALLBACK (brasero_transcode_buffer_handler),
							transcode);
		gst_object_unref (sinkpad);
	}
	else {
		if (!gst_element_link (source, decode)
		||  !gst_element_link (convert, sink)) {
			BRASERO_JOB_LOG (transcode, "Impossible to link plugin pads");
			g_set_error (error,
				     BRASERO_BURN_ERROR,
				     BRASERO_BURN_ERROR_GENERAL,
				     _("Impossible to link plugin pads"));
			goto error;
		}

		priv->link = convert;
		g_signal_connect (G_OBJECT (decode),
				  "new-decoded-pad",
				  G_CALLBACK (brasero_transcode_new_decoded_pad_cb),
				  transcode);
	}

	priv->sink = sink;
	priv->decode = decode;
	priv->source = source;
	priv->convert = convert;
	priv->pipeline = pipeline;

	gst_element_set_state (pipeline, GST_STATE_PLAYING);
	return TRUE;

error:

	if (error && (*error))
		BRASERO_JOB_LOG (transcode,
				 "can't create object : %s \n",
				 (*error)->message);

	gst_object_unref (GST_OBJECT (pipeline));
	return FALSE;
}
Пример #29
0
	bool GSPipe::init_stream() {
		if(!gst_is_initialized()) {
		  // Initialize gstreamer pipeline
		  ROS_DEBUG_STREAM( "Initializing gstreamer..." );
		  gst_init(0,0);
		}

		ROS_DEBUG_STREAM( "Gstreamer Version: " << gst_version_string() );

		GError *error = 0; // Assignment to zero is a gst requirement

		//pipeline_ = gst_parse_launch(gsconfig_.c_str(), &error);
		pipeline_ = gst_parse_launch(pipeline_str.c_str(), &error);
		if (pipeline_ == NULL) {
		  ROS_FATAL_STREAM( error->message );
		  return false;
		}

		sink_ = gst_element_factory_make("appsink",NULL);
		GstCaps * caps = image_encoding_ == sensor_msgs::image_encodings::RGB8 ?
			gst_caps_new_simple("video/x-raw-rgb", NULL) :
			gst_caps_new_simple("video/x-raw-gray", NULL);
		gst_app_sink_set_caps(GST_APP_SINK(sink_), caps);
		gst_caps_unref(caps);

		gst_base_sink_set_sync(
		        GST_BASE_SINK(sink_),
		        (sync_sink_) ? TRUE : FALSE);

		if(GST_IS_PIPELINE(pipeline_)) {
		  GstPad *outpad = gst_bin_find_unlinked_pad(GST_BIN(pipeline_), GST_PAD_SRC);
		  g_assert(outpad);

		  GstElement *outelement = gst_pad_get_parent_element(outpad);
		  g_assert(outelement);
		  gst_object_unref(outpad);

		  if(!gst_bin_add(GST_BIN(pipeline_), sink_)) {
			ROS_FATAL("gst_bin_add() failed");
			gst_object_unref(outelement);
			gst_object_unref(pipeline_);
			return false;
		  }

		  if(!gst_element_link(outelement, sink_)) {
			ROS_FATAL("GStreamer: cannot link outelement(\"%s\") -> sink\n", gst_element_get_name(outelement));
			gst_object_unref(outelement);
			gst_object_unref(pipeline_);
			return false;
		  }

		  gst_object_unref(outelement);
		} else {
		  GstElement* launchpipe = pipeline_;
		  pipeline_ = gst_pipeline_new(NULL);
		  g_assert(pipeline_);

		  gst_object_unparent(GST_OBJECT(launchpipe));

		  gst_bin_add_many(GST_BIN(pipeline_), launchpipe, sink_, NULL);

		  if(!gst_element_link(launchpipe, sink_)) {
			ROS_FATAL("GStreamer: cannot link launchpipe -> sink");
			gst_object_unref(pipeline_);
			return false;
		  }
		}

		gst_element_set_state(pipeline_, GST_STATE_PAUSED);

		if (gst_element_get_state(pipeline_, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
		  ROS_FATAL("Failed to PAUSE stream, check your gstreamer configuration.");
		  return false;
		} else {
		  ROS_DEBUG_STREAM("Stream is PAUSED.");
		}
		// Create ROS camera interface
		camera_pub_ = image_transport_.advertiseCamera("camera/image_raw", 1);

		return true;
	}
static GstElement *
fs_rtp_dtmf_sound_source_build (FsRtpSpecialSource *source,
    GList *negotiated_codecs,
    FsCodec *selected_codec,
    GError **error)
{
  FsCodec *telephony_codec = NULL;
  GstCaps *caps = NULL;
  GstPad *pad = NULL;
  GstElement *dtmfsrc = NULL;
  GstElement *capsfilter = NULL;
  GstPad *ghostpad = NULL;
  GstElement *bin = NULL;
  GstElement *encoder = NULL;
  GstElement *payloader = NULL;
  gchar *encoder_name = NULL;
  gchar *payloader_name = NULL;

  telephony_codec = get_pcm_law_sound_codec (negotiated_codecs,
      &encoder_name, &payloader_name);

  if (!telephony_codec)
  {
    g_set_error (error, FS_ERROR, FS_ERROR_INTERNAL,
        "Could not find a pcma/pcmu to send dtmf on");
    return NULL;
  }

  bin = gst_bin_new (NULL);

  dtmfsrc = gst_element_factory_make ("dtmfsrc", NULL);
  if (!dtmfsrc)
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not make rtpdtmfsrc");
    goto error;
  }
  if (!gst_bin_add (GST_BIN (bin), dtmfsrc))
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not add rtpdtmfsrc to bin");
    gst_object_unref (dtmfsrc);
    goto error;
  }

  encoder = gst_element_factory_make (encoder_name, NULL);
  if (!encoder)
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not make %s", encoder_name);
    goto error;
  }
  if (!gst_bin_add (GST_BIN (bin), encoder))
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not add %s to bin", encoder_name);
    gst_object_unref (dtmfsrc);
    goto error;
  }

  if (!gst_element_link_pads (dtmfsrc, "src", encoder, "sink"))
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not link the rtpdtmfsrc and %s", encoder_name);
    goto error;
  }

  payloader = gst_element_factory_make (payloader_name, NULL);
  if (!payloader)
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not make %s", payloader_name);
    goto error;
  }
  if (!gst_bin_add (GST_BIN (bin), payloader))
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not add %s to bin", payloader_name);
    gst_object_unref (dtmfsrc);
    goto error;
  }

  if (!gst_element_link_pads (encoder, "src", payloader, "sink"))
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not link the %s and %s", encoder_name, payloader_name);
    goto error;
  }

  capsfilter = gst_element_factory_make ("capsfilter", NULL);
  if (!capsfilter)
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not make capsfilter");
    goto error;
  }
  if (!gst_bin_add (GST_BIN (bin), capsfilter))
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not add capsfilter to bin");
    gst_object_unref (capsfilter);
    goto error;
  }

  caps = fs_codec_to_gst_caps (telephony_codec);
  g_object_set (capsfilter, "caps", caps, NULL);
  {
    gchar *str = gst_caps_to_string (caps);
    GST_DEBUG ("Using caps %s for dtmf", str);
    g_free (str);
  }
  gst_caps_unref (caps);

  if (!gst_element_link_pads (payloader, "src", capsfilter, "sink"))
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not link the %s and its capsfilter", payloader_name);
    goto error;
  }

  pad = gst_element_get_static_pad (capsfilter, "src");
  if (!pad)
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not get \"src\" pad from capsfilter");
    goto error;
  }
  ghostpad = gst_ghost_pad_new ("src", pad);
  if (!ghostpad)
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not create a ghostpad for capsfilter src pad for dtmfsrc");
    goto error;
  }
  if (!gst_element_add_pad (bin, ghostpad))
  {
    g_set_error (error, FS_ERROR, FS_ERROR_CONSTRUCTION,
        "Could not get \"src\" ghostpad to dtmf sound source bin");
    gst_object_unref (pad);
    goto error;
  }
  gst_object_unref (pad);

  return bin;

 error:
  gst_object_unref (bin);

  return NULL;
}