Пример #1
0
bool GstEnginePipeline::InitFromString(const QString& pipeline) {
    pipeline_ = gst_pipeline_new("pipeline");

    GstElement* new_bin =
        CreateDecodeBinFromString(pipeline.toAscii().constData());
    if (!new_bin) {
        return false;
    }

    if (!ReplaceDecodeBin(new_bin)) return false;

    if (!Init()) return false;
    return gst_element_link(new_bin, audiobin_);
}
Пример #2
0
CAMLprim value ocaml_gstreamer_element_link(value _src, value _dst)
{
  CAMLparam2(_src, _dst);
  GstElement *src = Element_val(_src);
  GstElement *dst = Element_val(_dst);
  gboolean ret;

  caml_release_runtime_system();
  ret = gst_element_link(src, dst);
  caml_acquire_runtime_system();

  if (!ret) caml_raise_constant(*caml_named_value("gstreamer_exn_failure"));
  CAMLreturn(Val_unit);
}
Пример #3
0
static void
fps_display_sink_start (GstFPSDisplaySink * self)
{
  GstPad *target_pad = NULL;

  /* Init counters */
  self->frames_rendered = 0;
  self->frames_dropped = 0;
  self->last_frames_rendered = G_GUINT64_CONSTANT (0);
  self->last_frames_dropped = G_GUINT64_CONSTANT (0);
  self->max_fps = -1;
  self->min_fps = -1;

  /* init time stamps */
  self->last_ts = self->start_ts = self->interval_ts = GST_CLOCK_TIME_NONE;

  GST_DEBUG_OBJECT (self, "Use text-overlay? %d", self->use_text_overlay);

  if (self->use_text_overlay) {
    if (!self->text_overlay) {
      self->text_overlay =
          gst_element_factory_make ("textoverlay", "fps-display-text-overlay");
      if (!self->text_overlay) {
        GST_WARNING_OBJECT (self, "text-overlay element could not be created");
        self->use_text_overlay = FALSE;
        goto no_text_overlay;
      }
      gst_object_ref (self->text_overlay);
      g_object_set (self->text_overlay,
          "font-desc", DEFAULT_FONT, "silent", FALSE, NULL);
      gst_bin_add (GST_BIN (self), self->text_overlay);

      if (!gst_element_link (self->text_overlay, self->video_sink)) {
        GST_ERROR_OBJECT (self, "Could not link elements");
      }
    }
    target_pad = gst_element_get_static_pad (self->text_overlay, "video_sink");
  }
no_text_overlay:
  if (!self->use_text_overlay) {
    if (self->text_overlay) {
      gst_element_unlink (self->text_overlay, self->video_sink);
      gst_bin_remove (GST_BIN (self), self->text_overlay);
      self->text_overlay = NULL;
    }
    target_pad = gst_element_get_static_pad (self->video_sink, "sink");
  }
  gst_ghost_pad_set_target (GST_GHOST_PAD (self->ghost_pad), target_pad);
  gst_object_unref (target_pad);
}
Пример #4
0
static GstElement *
setup_pipeline (GstElement * adder, gint num_srcs)
{
  GstElement *pipeline, *src, *sink;
  gint i;

  pipeline = gst_pipeline_new ("pipeline");
  if (!adder) {
    adder = gst_element_factory_make ("adder", "adder");
  }

  sink = gst_element_factory_make ("fakesink", "sink");
  gst_bin_add_many (GST_BIN (pipeline), adder, sink, NULL);
  gst_element_link (adder, sink);

  for (i = 0; i < num_srcs; i++) {
    src = gst_element_factory_make ("audiotestsrc", NULL);
    g_object_set (src, "wave", 4, NULL);        /* silence */
    gst_bin_add (GST_BIN (pipeline), src);
    gst_element_link (src, adder);
  }
  return pipeline;
}
Пример #5
0
static void
kms_filter_element_connect_filter (KmsFilterElement * self,
    KmsElementPadType type, GstElement * filter, GstPad * target,
    GstElement * agnosticbin)
{
  gst_bin_add (GST_BIN (self), filter);

  self->priv->filter = filter;

  gst_element_link (filter, agnosticbin);
  gst_element_sync_state_with_parent (filter);

  kms_element_connect_sink_target (KMS_ELEMENT (self), target, type);
}
Пример #6
0
int main (int argc, char *argv[])
{
    
    
    xmlfile = "launch_logs";
    std_log(LOG_FILENAME_LINE, "Test Started launch");
 
    if (argc != 2) {
      g_print ("usage: %s <mp3 file>\n", argv[0]);
      std_log(LOG_FILENAME_LINE, "Test Failed");
      create_xml(1); 
      exit (-1);
    }
    
    gst_init (&argc, &argv);
    loop = g_main_loop_new (NULL, FALSE);
    
    /* create elements */
    pipeline = gst_pipeline_new ("audio-player");
    source = gst_element_factory_make ("filesrc", "file-source");
    decoder = gst_element_factory_make ("wavparse", "wavparse-decoder");
    sink = gst_element_factory_make ("devsoundsink", "sink");
        if (!pipeline || !source || !decoder) {
        g_print ("One element could not be created\n");
        return -1;
        }
    /* set filename property on the file source. Also add a message  handler. */
    g_object_set (G_OBJECT (source), "location", argv[1], NULL);
            /* put all elements in a bin */
    gst_bin_add_many (GST_BIN (pipeline),source, decoder,sink, NULL);
            /* link together - note that we cannot link the parser and  decoder yet, because the parser uses dynamic pads. For that, we set a pad-added signal handler. */
    gst_element_link (source, decoder);
    gst_bus_add_watch (gst_pipeline_get_bus (GST_PIPELINE (pipeline)), bus_call, loop);
    g_signal_connect (decoder, "pad-added", G_CALLBACK (new_pad_cb),pipeline);
            /* Now set to playing and iterate. */
    g_print ("Setting to PLAYING\n");
    gst_element_set_state (pipeline, GST_STATE_PLAYING);
    g_print ("Running\n");
    g_main_loop_run (loop);
            /* clean up nicely */
    g_print ("Returned, stopping playback\n");
    gst_element_set_state (pipeline, GST_STATE_NULL);
    g_print ("Deleting pipeline\n");
    gst_object_unref (GST_OBJECT (pipeline));
    
    g_print ("completed palying audio\n");
    //std_log(LOG_FILENAME_LINE, "Test Successful");
    //create_xml(0); 
    return 0;
}
Пример #7
0
bool GstEnginePipeline::ReplaceDecodeBin(const QUrl& url) {
  GstElement* new_bin = nullptr;

#ifdef HAVE_SPOTIFY
  if (url.scheme() == "spotify") {
    new_bin = gst_bin_new("spotify_bin");

    // Create elements
    GstElement* src = engine_->CreateElement("tcpserversrc", new_bin);
    if (!src) return false;
    GstElement* gdp = engine_->CreateElement("gdpdepay", new_bin);
    if (!gdp) return false;

    // Pick a port number
    const int port = Utilities::PickUnusedPort();
    g_object_set(G_OBJECT(src), "host", "127.0.0.1", nullptr);
    g_object_set(G_OBJECT(src), "port", port, nullptr);

    // Link the elements
    gst_element_link(src, gdp);

    // Add a ghost pad
    GstPad* pad = gst_element_get_static_pad(gdp, "src");
    gst_element_add_pad(GST_ELEMENT(new_bin), gst_ghost_pad_new("src", pad));
    gst_object_unref(GST_OBJECT(pad));

    // Tell spotify to start sending data to us.
    SpotifyServer* spotify_server =
        InternetModel::Service<SpotifyService>()->server();
    // Need to schedule this in the spotify server's thread
    QMetaObject::invokeMethod(
        spotify_server, "StartPlayback", Qt::QueuedConnection,
        Q_ARG(QString, url.toString()), Q_ARG(quint16, port));
  } else {
#endif
    new_bin = engine_->CreateElement("uridecodebin");
    if (!new_bin) return false;
    g_object_set(G_OBJECT(new_bin), "uri", url.toEncoded().constData(),
                 nullptr);
    CHECKED_GCONNECT(G_OBJECT(new_bin), "drained", &SourceDrainedCallback,
                     this);
    CHECKED_GCONNECT(G_OBJECT(new_bin), "pad-added", &NewPadCallback, this);
    CHECKED_GCONNECT(G_OBJECT(new_bin), "notify::source", &SourceSetupCallback,
                     this);
#ifdef HAVE_SPOTIFY
  }
#endif

  return ReplaceDecodeBin(new_bin);
}
Пример #8
0
/* Creates simple bin that plays a background JPEG image or sequence 
*  with 30fps. Should be used as the first input of the video mixer. 
*  Scaling should be set when linked to the mixer element! 
*  bin elements : multifilesrc ! jpegdec ! videoscale ! queue
*  src ghost pad is added as an output to the bin.*/
GstElement* bkgbin_new(CustomData *data)
{
   GstElement *bkgbin,*bkgsrc,*bkgdec,*bkgscale,*bkgqueue,*bkgfreeze;
   GstCaps *freeze_caps,*scale_caps;
   GstPad *pad,*dec_pad_sink;
   //Create bin, elements, caps and link everything.
   bkgbin=gst_bin_new("bkgbin");
   bkgsrc=gst_element_factory_make("multifilesrc","bkgsrc");
   bkgdec=gst_element_factory_make("jpegdec","bkgdec");
   bkgfreeze=gst_element_factory_make("imagefreeze","bkgfreeze");
   bkgscale=gst_element_factory_make("videoscale","bkgscale");
   bkgqueue=gst_element_factory_make("queue","bkgqueue");
   gst_bin_add_many(GST_BIN(bkgbin),bkgsrc,bkgdec,bkgscale,bkgqueue,bkgfreeze,NULL);
   freeze_caps=gst_caps_new_simple("video/x-raw",
                                "framerate",GST_TYPE_FRACTION,FRAMES_PER_SEC,1,
                                NULL);
   scale_caps=gst_caps_new_simple("video/x-raw",
//                                "format",G_TYPE_STRING,"YUV",
//                                "alpha",G_TYPE_INT,0,
//                                "framerate",GST_TYPE_FRACTION,FRAMES_PER_SEC,1,
                                "width",G_TYPE_INT,CAMERA_RES_WIDTH,
                                "height",G_TYPE_INT,CAMERA_RES_HEIGHT,
                                NULL);
//   gst_element_link(bkgsrc,bkgdec);
   gst_element_link_many(bkgsrc,bkgdec,NULL);
   /* decodebin's src pad is a sometimes pad - it gets created dynamically */
//   g_signal_connect(bkgdec,"pad-added",G_CALLBACK(on_new_decoded_pad),bkgscale);
   gst_element_link(bkgdec,bkgscale);
   gst_element_link_filtered(bkgscale,bkgfreeze,scale_caps);
   gst_element_link_filtered(bkgfreeze,bkgqueue,freeze_caps);
//   gst_element_link_filtered(bkgscale,bkgqueue,scale_caps);
   gst_caps_unref(scale_caps);
   gst_caps_unref(freeze_caps);
   //Create the ghost src pad for the bin.
   pad=gst_element_get_static_pad(bkgqueue,"src");
   gst_element_add_pad(bkgbin,gst_ghost_pad_new("src",pad));
   gst_object_unref(pad);
   
   /* set initial parameters */
   g_object_set(G_OBJECT(bkgsrc),"location",data->config[data->selected_config].background,
          "loop",TRUE,NULL);
//   g_object_set(G_OBJECT(bkgqueue),"leaky",2,NULL);
   
   /* set eos handler function */
   dec_pad_sink=gst_element_get_static_pad(bkgdec,"sink");
//   gst_pad_set_event_function(dec_pad_sink,eos_callback);

   return bkgbin;
}
Пример #9
0
/*
 * Helper function to create elements, add to the bin and link it
 * to another element.
 */
static GstElement *gst_a2dp_sink_init_element(GstA2dpSink *self,
			const gchar *elementname, const gchar *name,
			GstElement *link_to)
{
	GstElement *element;
	GstState state;

	GST_LOG_OBJECT(self, "Initializing %s", elementname);

	element = gst_element_factory_make(elementname, name);
	if (element == NULL) {
		GST_DEBUG_OBJECT(self, "Couldn't create %s", elementname);
		return NULL;
	}

	if (!gst_bin_add(GST_BIN(self), element)) {
		GST_DEBUG_OBJECT(self, "failed to add %s to the bin",
						elementname);
		goto cleanup_and_fail;
	}

	state = gst_a2dp_sink_get_state(self);
	if (gst_element_set_state(element, state) ==
			GST_STATE_CHANGE_FAILURE) {
		GST_DEBUG_OBJECT(self, "%s failed to go to playing",
						elementname);
		goto remove_element_and_fail;
	}

	if (link_to != NULL)
		if (!gst_element_link(link_to, element)) {
			GST_DEBUG_OBJECT(self, "couldn't link %s",
					elementname);
			goto remove_element_and_fail;
		}

	return element;

remove_element_and_fail:
	gst_element_set_state(element, GST_STATE_NULL);
	gst_bin_remove(GST_BIN(self), element);
	return NULL;

cleanup_and_fail:
	if (element != NULL)
		g_object_unref(G_OBJECT(element));

	return NULL;
}
Пример #10
0
static void
kms_send_data_connect_data (KmsSendData * self, GstElement * tee)
{
  GstCaps *caps;

  caps = gst_caps_from_string (KMS_AGNOSTIC_DATA_CAPS);
  self->priv->appsrc = gst_element_factory_make ("appsrc", NULL);
  gst_bin_add (GST_BIN (self), self->priv->appsrc);
  g_object_set (G_OBJECT (self->priv->appsrc), "is-live", TRUE,
      "caps", caps, "emit-signals", TRUE, "stream-type", 0,
      "format", GST_FORMAT_TIME, NULL);
  gst_caps_unref (caps);

  gst_element_link (self->priv->appsrc, tee);
}
static void
kms_show_data_connect_data (KmsShowData * self, GstElement * tee)
{
  GstElement *identity =  gst_element_factory_make ("identity", NULL);
  GstPad *identity_sink = gst_element_get_static_pad (identity, "sink");;

  gst_bin_add (GST_BIN (self), identity);

  kms_element_connect_sink_target (KMS_ELEMENT (self), identity_sink, KMS_ELEMENT_PAD_TYPE_DATA);
  gst_element_link (identity, tee);

  g_signal_connect (identity, "handoff", G_CALLBACK (new_data), self);

  g_object_unref (identity_sink);
}
static gboolean
link_again (gpointer data)
{
  GstElement *decoder = (GstElement *) data;
  GstElement *agnostic = g_object_get_data (G_OBJECT (data), AGNOSTIC_KEY);
  GstElement *fakesink = g_object_get_data (G_OBJECT (decoder), FAKESINK_KEY);

  GST_DEBUG ("Linking again %" GST_PTR_FORMAT ", %" GST_PTR_FORMAT, agnostic,
      decoder);

  g_object_set (G_OBJECT (fakesink), "signal-handoffs", TRUE, NULL);
  gst_element_link (agnostic, decoder);

  return FALSE;
}
Пример #13
0
static void hotLinkingToggleSub (GtkWidget *widget, GstElement* pipeline)
{
    /*
     * toggle subtitles
     *
     * Could have been done simply by toggling the "silent" property of subtitleOverlay cf toggleSub(...)
     * I just wanted to see how to hotlink elements ^^
     */

    GstElement *subParser = gst_bin_get_by_name(GST_BIN (pipeline), "sub-parser");
    GstElement *subOverlay = gst_bin_get_by_name(GST_BIN (pipeline), "sub-overlay");
    GstElement *videoSink = gst_bin_get_by_name(GST_BIN (pipeline), "video-output");
    GstElement *videoDecoder= gst_bin_get_by_name(GST_BIN (pipeline), "theora-decoder");

    if (hotlinkSubState==true) // subtitles enabled => need to disable them
    {
        gst_element_unlink(subParser, subOverlay);
        gst_element_unlink(videoDecoder, subOverlay);
        gst_element_unlink(subOverlay,videoSink);
        gst_element_link(videoDecoder,videoSink);

        g_print("Subtitles disabled (Hotlinking Method)\n");
        hotlinkSubState=false;
        return;
    }
    else // subtitles disabled => need to enable them
    {
        gst_element_unlink(videoDecoder,videoSink);
        gst_element_link(subParser, subOverlay);
        gst_element_link(videoDecoder, subOverlay);
        gst_element_link(subOverlay,videoSink);
        g_print("Subtitles enabled (Hotlinking Method)\n");
        hotlinkSubState=true;
        return;
    }
}
Пример #14
0
/*
 Changes webcam's sink
*/
static void
acam_webcam_change_sink (acam_webcam_device_s *acam_webcam_device, GstElement *src, GstElement *new_sink, GstElement *old_sink)
{
	/* Stop webcam's video */
	acam_webcam_device_stop (acam_webcam_device);

	gst_element_unlink (src, old_sink);
	gst_object_ref (old_sink);
	gst_bin_remove (GST_BIN (acam_webcam_device->video_pipeline), old_sink);

	gst_bin_add (GST_BIN (acam_webcam_device->video_pipeline), new_sink);
	gst_element_link (src, new_sink);

	/* Play webcam's video */
	acam_webcam_device_play (acam_webcam_device);
}
Пример #15
0
void HTTPSDPDec::uri_to_shmdata() {
  destroy_httpsdpdec();
  prune_tree(".shmdata.writer");
  init_httpsdpdec();
  g_object_set_data(
      G_OBJECT(sdpdemux_.get_raw()), "on-error-gsource", (gpointer)on_error_.back().get());
  g_debug("httpsdpdec: to_shmdata set uri %s", uri_.c_str());
  if (!is_dataurisrc_)  // for souphttpsrc
    g_object_set(G_OBJECT(souphttpsrc_.get_raw()), "location", uri_.c_str(), nullptr);
  else  // for dataurisrc
    g_object_set(G_OBJECT(souphttpsrc_.get_raw()), "uri", uri_.c_str(), nullptr);
  gst_bin_add_many(
      GST_BIN(gst_pipeline_->get_pipeline()), souphttpsrc_.get_raw(), sdpdemux_.get_raw(), nullptr);
  gst_element_link(souphttpsrc_.get_raw(), sdpdemux_.get_raw());
  gst_pipeline_->play(true);
}
static gboolean
link_source (gpointer data)
{
  GstElement *pipeline = data;
  GstElement *agnosticbin =
      gst_bin_get_by_name (GST_BIN (pipeline), "agnosticbin");
  GstElement *videosrc = gst_element_factory_make ("videotestsrc", NULL);

  gst_bin_add_many (GST_BIN (pipeline), videosrc, NULL);
  gst_element_sync_state_with_parent (videosrc);
  gst_element_link (videosrc, agnosticbin);

  g_object_unref (agnosticbin);

  return FALSE;
}
Пример #17
0
static void
gst_vaapi_decode_bin_init (GstVaapiDecodeBin * vaapidecbin)
{
  GstPad *pad, *ghostpad;

  vaapidecbin->deinterlace_method = DEFAULT_DEINTERLACE_METHOD;
  vaapidecbin->disable_vpp = (g_getenv ("GST_VAAPI_DISABLE_VPP") != NULL);

  /* create the decoder */
  vaapidecbin->decoder =
      g_object_new (g_type_from_name ("GstVaapiDecode"), NULL);
  g_assert (vaapidecbin->decoder);

  /* create the queue */
  vaapidecbin->queue = gst_element_factory_make ("queue", "vaapi-queue");
  if (!vaapidecbin->queue) {
    g_clear_object (&vaapidecbin->decoder);
    post_missing_element_message (vaapidecbin, "queue");
    return;
  }

  gst_bin_add_many (GST_BIN (vaapidecbin), vaapidecbin->decoder,
      vaapidecbin->queue, NULL);

  if (!gst_element_link (vaapidecbin->decoder, vaapidecbin->queue)) {
    g_clear_object (&vaapidecbin->decoder);
    g_clear_object (&vaapidecbin->queue);
    g_critical ("failed to link decoder and queue");
    return;
  }

  /* create ghost pad sink */
  pad = gst_element_get_static_pad (vaapidecbin->decoder, "sink");
  ghostpad = gst_ghost_pad_new_from_template ("sink", pad,
      GST_PAD_PAD_TEMPLATE (pad));
  gst_object_unref (pad);
  if (!gst_element_add_pad (GST_ELEMENT (vaapidecbin), ghostpad))
    g_critical ("failed to add decoder sink pad to bin");

  /* create ghost pad src */
  pad = gst_element_get_static_pad (GST_ELEMENT (vaapidecbin->queue), "src");
  ghostpad = gst_ghost_pad_new_from_template ("src", pad,
      GST_PAD_PAD_TEMPLATE (pad));
  gst_object_unref (pad);
  if (!gst_element_add_pad (GST_ELEMENT (vaapidecbin), ghostpad))
    g_critical ("failed to add queue source pad to bin");
}
Пример #18
0
void
gst_video_editor_start (GstVideoEditor * gve)
{
  GError *error = NULL;
  GstPad *pad;

  g_return_if_fail (GST_IS_VIDEO_EDITOR (gve));

  GST_INFO_OBJECT (gve, "Starting. output file: %s", gve->priv->output_file);

  /* Create elements */
  gve->priv->muxer = lgm_create_muxer (gve->priv->muxer_type, GVE_ERROR,
      &error);
  if (error) {
    g_signal_emit (gve, gve_signals[SIGNAL_ERROR], 0, error->message);
    g_error_free (error);
    return;
  }
  gve->priv->file_sink = gst_element_factory_make ("filesink", "filesink");
  gve_create_video_encode_bin (gve);

  /* Set elements properties */
  g_object_set (G_OBJECT (gve->priv->file_sink), "location",
      gve->priv->output_file, NULL);

  /* Link elements */
  gst_bin_add_many (GST_BIN (gve->priv->main_pipeline),
      GST_ELEMENT (gve->priv->nle_source),
      gve->priv->vencode_bin, gve->priv->muxer, gve->priv->file_sink, NULL);

  gst_element_link_many (gve->priv->vencode_bin,
      gve->priv->muxer, gve->priv->file_sink, NULL);

  if (gve->priv->audio_enabled) {
    gve_create_audio_encode_bin (gve);
    gst_bin_add (GST_BIN (gve->priv->main_pipeline), gve->priv->aencode_bin);
    gst_element_link (gve->priv->aencode_bin, gve->priv->muxer);
  }

  gve->priv->last_pos = 0;
  pad = gst_element_get_static_pad (gve->priv->file_sink, "sink");
  gst_pad_add_buffer_probe (pad, (GCallback) gve_on_buffer_cb, gve);
  gst_object_unref (pad);

  gst_element_set_state (gve->priv->main_pipeline, GST_STATE_PLAYING);
  g_signal_emit (gve, gve_signals[SIGNAL_PERCENT_COMPLETED], 0, (gfloat) 0);
}
Пример #19
0
int main(int argc, char** argv)
{
    GMainLoop *loop;
    GstElement *pipeline, *source, *demuxer, *decoder, *conv, *sink;
    GstBus *bus;

    gst_init(&argc, &argv);
    loop = g_main_loop_new(NULL, FALSE);
    if(argc != 2)
    {
        g_printerr("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
        return -1;
    }
    pipeline = gst_pipeline_new("audio-player");
    source = gst_element_factory_make("filesrc", "file-source");
    demuxer = gst_element_factory_make("oggdemux", "ogg-demuxer");
    decoder = gst_element_factory_make("vorbisdec", "vorbis-decoder");
    conv = gst_element_factory_make("audioconvert", "converter");
    sink = gst_element_factory_make("autoaudiosink", "audio-output");
    if(!pipeline || !source || !demuxer || !decoder ||!conv || !sink)
    {
        g_printerr("One element could no be created. Exiting\n");
    }
    g_object_set(G_OBJECT(source), "location", argv[1], NULL);
    bus = gst_pipeline_get_bus(GST_PIPELINE(pipeline));
    gst_bus_add_watch(bus, bus_call, loop);
    gst_object_unref(bus);

    gst_bin_add_many(GST_BIN(pipeline), source, demuxer, decoder, conv, sink, NULL);
    gst_element_link(source, demuxer);
    gst_element_link_many(decoder, conv, sink, NULL);
    g_signal_connect(demuxer, "pad-added", G_CALLBACK(on_pad_added), decoder);

    g_print("Now playing: %s\n", argv[1]);
    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    g_print("Running...\n");
    g_main_loop_run(loop);

    g_print("Returned, stopping playback\n");
    gst_element_set_state(pipeline, GST_STATE_NULL);

    g_print("Deteling pipeline\n");
    gst_object_unref(GST_OBJECT(pipeline));

    return 0;
}
static gboolean
create_sink (GstSplitMuxSink * splitmux)
{
  GstElement *provided_sink = NULL;

  g_return_val_if_fail (splitmux->active_sink == NULL, TRUE);

  GST_OBJECT_LOCK (splitmux);
  if (splitmux->provided_sink != NULL)
    provided_sink = gst_object_ref (splitmux->provided_sink);
  GST_OBJECT_UNLOCK (splitmux);

  if (provided_sink == NULL) {
    if ((splitmux->sink =
            create_element (splitmux, DEFAULT_SINK, "sink")) == NULL)
      goto fail;
    splitmux->active_sink = splitmux->sink;
  } else {
    if (!gst_bin_add (GST_BIN (splitmux), provided_sink)) {
      g_warning ("Could not add sink elements - splitmuxsink will not work");
      gst_object_unref (provided_sink);
      goto fail;
    }

    splitmux->active_sink = provided_sink;

    /* The bin holds a ref now, we can drop our tmp ref */
    gst_object_unref (provided_sink);

    /* Find the sink element */
    splitmux->sink = find_sink (splitmux->active_sink);
    if (splitmux->sink == NULL) {
      g_warning
          ("Could not locate sink element in provided sink - splitmuxsink will not work");
      goto fail;
    }
  }

  if (!gst_element_link (splitmux->muxer, splitmux->active_sink)) {
    g_warning ("Failed to link muxer and sink- splitmuxsink will not work");
    goto fail;
  }

  return TRUE;
fail:
  return FALSE;
}
Пример #21
0
static gpointer
rb_audiocd_load_songs (RBAudioCdSource *source)
{
	RBAudioCdSourcePrivate *priv = AUDIOCD_SOURCE_GET_PRIVATE (source);
	RhythmDB *db;
	GVolume *volume;

	g_object_get (source, "volume", &volume, NULL);
	priv->device_path = g_volume_get_identifier (volume,
						     G_VOLUME_IDENTIFIER_KIND_UNIX_DEVICE);
	g_object_unref (volume);

	db = get_db_for_source (source);

	rb_debug ("loading Audio CD from %s", priv->device_path);
	/* create a cdda gstreamer element, to get cd info from */
	priv->cdda = gst_element_make_from_uri (GST_URI_SRC, "cdda://", NULL);
	if (!priv->cdda) {
		gdk_threads_enter ();
		rb_error_dialog (NULL, _("Couldn't load Audio CD"),
					_("Rhythmbox could not get access to the CD device."));
		gdk_threads_leave ();
		goto error_out;
	}

	rb_debug ("cdda longname: %s", gst_element_factory_get_longname (gst_element_get_factory (priv->cdda)));
	g_object_set (G_OBJECT (priv->cdda), "device", priv->device_path, NULL);
	priv->pipeline = gst_pipeline_new ("pipeline");
	priv->fakesink = gst_element_factory_make ("fakesink", "fakesink");
	gst_bin_add_many (GST_BIN (priv->pipeline), priv->cdda, priv->fakesink, NULL);
	gst_element_link (priv->cdda, priv->fakesink);

	/* disable paranoia (if using cdparanoia) since we're only reading track information here.
	 * this reduces cdparanoia's cache size, so the process is much faster.
	 */
	if (g_object_class_find_property (G_OBJECT_GET_CLASS (source), "paranoia-mode"))
		g_object_set (source, "paranoia-mode", 0, NULL);

	if (rb_audiocd_scan_songs (source, db))
		rb_audiocd_load_metadata (source, db);

error_out:
	g_object_unref (db);
	g_object_unref (source);

	return NULL;
}
Пример #22
0
void CQMedia::_updateDemux()
{
    GstElement *aiurdemux = gst_bin_get_by_name((GstBin*)pipeline,"aiurdemux");
    GstStateChangeReturn ret=gst_element_set_state(aiurdemux,GST_STATE_NULL);
    if(GST_STATE_CHANGE_SUCCESS==ret)
    {
        qDebug("set aiurdemux to null SUCCESS!!");
        if(gst_bin_remove(GST_BIN(pipeline),aiurdemux))
        {
            qDebug("remove aiurdemux success!!");
            gst_object_unref(aiurdemux);
            aiurdemux =  gst_element_factory_make("aiurdemux","aiurdemux");
            if(devType == ROOM || devType == SUBVIDEO)
                g_object_set(G_OBJECT(aiurdemux),"streaming_latency",(guint64)3000,NULL);
            else
                g_object_set(G_OBJECT(aiurdemux),"streaming_latency",(guint64)3000,NULL);

            GstElement *queue1 = gst_bin_get_by_name((GstBin*)pipeline,"queue1");
            GstElement *queue2 = gst_bin_get_by_name((GstBin*)pipeline,"queue2");
            GstElement *queue3 = gst_bin_get_by_name((GstBin*)pipeline,"queue3");
            //qDebug("queue3 is %p",queue3);
            gst_bin_add_many (GST_BIN (pipeline),aiurdemux,NULL);
            if(gst_element_link ( queue1,aiurdemux))
            {
                qDebug("link aiurdemux  SUCCESS!!");
            }
            else
            {
                qDebug("link aiurdemux  FAILED!!");
            }
            g_signal_connect (aiurdemux, "pad-added", G_CALLBACK (pad_added_handler), queue2);
            g_signal_connect (aiurdemux, "pad-added", G_CALLBACK (pad_added_handler), queue3);

            gst_object_unref(queue1);
            gst_object_unref(queue2);
            gst_object_unref(queue3);
        }
        else
        {
            qDebug("remove aiurdemux failed!!");
        }
    }
    else
    {
        qDebug("set aiurdemux to null FAILED!!");
    }
}
Пример #23
0
void test_buffer_probe_n_times()
{
  GstElement *pipeline, *fakesrc, *fakesink;
  GstBus *bus;
  GstMessage *message;
  GstPad *pad;
  xmlfile = "gstutils_test_buffer_probe_n_times";
  std_log(LOG_FILENAME_LINE, "Test Started gstutils_test_buffer_probe_n_times");

  pipeline = gst_element_factory_make ("pipeline", NULL);
  fakesrc = gst_element_factory_make ("fakesrc", NULL);
  fakesink = gst_element_factory_make ("fakesink", NULL);

  g_object_set (fakesrc, "num-buffers", (int) 10, NULL);
  gst_bin_add_many (GST_BIN (pipeline), fakesrc, fakesink, NULL);
  gst_element_link (fakesrc, fakesink);

  pad = gst_element_get_pad (fakesink, "sink");
  gst_pad_add_data_probe (pad, G_CALLBACK (data_probe), SPECIAL_POINTER (0));
  gst_pad_add_buffer_probe (pad, G_CALLBACK (buffer_probe),
      SPECIAL_POINTER (1));
  gst_pad_add_event_probe (pad, G_CALLBACK (event_probe), SPECIAL_POINTER (2));
  gst_object_unref (pad);

  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  bus = gst_element_get_bus (pipeline);
  message = gst_bus_poll (bus, GST_MESSAGE_EOS, -1);
  gst_message_unref (message);
  gst_object_unref (bus);

  g_assert (n_buffer_probes == 10);     /* one for every buffer */
  g_assert (n_event_probes == 3);       /* new segment, latency and eos */
  g_assert (n_data_probes == 13);       /* duh */

  gst_element_set_state (pipeline, GST_STATE_NULL);
  gst_object_unref (pipeline);

  /* make sure nothing was sent in addition to the above when shutting down */
  g_assert (n_buffer_probes == 10);     /* one for every buffer */
  g_assert (n_event_probes == 3);       /* new segment, latency and eos */
  g_assert (n_data_probes == 13);       /* duh */
  
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
}
Пример #24
0
void test_element_unlink()
{
  GstElement *src, *sink;
  
  xmlfile = "gstutils_test_element_unlink";
  std_log(LOG_FILENAME_LINE, "Test Started gstutils_test_element_unlink");

  src = gst_element_factory_make ("fakesrc", NULL);
  sink = gst_element_factory_make ("fakesink", NULL);
  fail_unless (gst_element_link (src, sink) != FALSE);
  gst_element_unlink (src, sink);
  gst_object_unref (src);
  gst_object_unref (sink);
  
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
}
Пример #25
0
static void
pipeline_add_video_sink (GstElement * pipeline, GstPad * srcpad)
{
	GstElement *q, *s;
	GstPad *sinkpad;

	q = gst_element_factory_make ("queue", NULL);
	s = gst_element_factory_make ("autovideosink", NULL);
	gst_bin_add_many (GST_BIN (pipeline), q, s, NULL);

	sinkpad = gst_element_get_static_pad (q, "sink");
	gst_pad_link (srcpad, sinkpad)
	gst_element_link (q, s);

	gst_element_sync_state_with_parent (q);
	gst_element_sync_state_with_parent (s);
	gst_object_unref (sinkpad);
}
Пример #26
0
static void
new_pad (GstElement *element,
	 GstPad     *pad,
	 gpointer    data)
{
  GstPad *sinkpad;
  /* We can now link this pad with the audio decoder */
  g_print ("Dynamic pad created, linking parser/decoder\n");
  gst_element_set_state (pipeline, GST_STATE_PAUSED);
  sink = gst_element_factory_make ("devsoundsink", "devoutput");
  gst_bin_add_many (GST_BIN (pipeline),sink,  NULL);
  // Since gst_element_get_pad is deprecated. Replace with gst_element_get_request_pad
  //sinkpad = gst_element_get_pad (decoder, "sink");
  //sinkpad = gst_element_get_request_pad (decoder, "sink");
  //gst_pad_link (pad, sinkpad);
  gst_element_link (element, sink);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
}
int
main (int argc, char *argv[])
{
    GstElement *bin, *fakesrc, *fakesink;
    GstBus *bus;

    gst_init (&argc, &argv);

    /* create a new bin to hold the elements */
    bin = gst_pipeline_new ("pipeline");
    g_assert (bin);

    /* create a source */
    fakesrc = gst_element_factory_make ("fakesrc", "fakesrc");
    g_assert (fakesrc);
    g_object_set (fakesrc, "num-buffers", 50, NULL);

    /* and a sink */
    fakesink = gst_element_factory_make ("fakesink", "fakesink");
    g_assert (fakesink);

    /* add objects to the main pipeline */
    gst_bin_add_many (GST_BIN (bin), fakesrc, fakesink, NULL);

    /* link the elements */
    gst_element_link (fakesrc, fakesink);

    /* get the bus, we need to install a sync handler */
    bus = gst_pipeline_get_bus (GST_PIPELINE (bin));
    gst_bus_set_sync_handler (bus, (GstBusSyncHandler) sync_bus_handler, bin,
                              NULL);

    /* start playing */
    gst_element_set_state (bin, GST_STATE_PLAYING);

    /* Run event loop listening for bus messages until EOS or ERROR */
    event_loop (bus, bin);

    /* stop the bin */
    gst_element_set_state (bin, GST_STATE_NULL);
    gst_object_unref (bus);

    exit (0);
}
Пример #28
0
void Pipeline::setupEffectBins()
{
    GstCaps *caps;
    GstPad *pad;

    // internal bin and elements
    effectInternalBin = gst_bin_new(NULL);
    effectPreCS = gst_element_factory_make("ffmpegcolorspace", NULL);
    effectPostCS = gst_element_factory_make("ffmpegcolorspace", NULL);
    effect = gst_element_factory_make("identity", NULL);

    // capsfilter used to force rgb in the effect pipeline
    effectCapsFilter = gst_element_factory_make("capsfilter", NULL);
    caps = gst_caps_from_string("video/x-raw-rgb");
    g_object_set(effectCapsFilter, "caps", caps, NULL);

    gst_bin_add_many(GST_BIN(effectInternalBin), effectPreCS, effectCapsFilter, effect, effectPostCS, NULL);
    gst_element_link_many(effectPreCS, effectCapsFilter, effect, effectPostCS, NULL);

    // ghost pads to the internal bin
    pad = gst_element_get_static_pad(effectPreCS, "sink");
    gst_element_add_pad(effectInternalBin, gst_ghost_pad_new("sink", pad));
    gst_object_unref(GST_OBJECT(pad));
    pad = gst_element_get_static_pad(effectPostCS, "src");
    gst_element_add_pad(effectInternalBin, gst_ghost_pad_new("src", pad));
    gst_object_unref(GST_OBJECT(pad));

    // main bin and valve
    effectBin = gst_bin_new(NULL);
    effectValve = gst_element_factory_make("valve", NULL);

    gst_bin_add_many(GST_BIN(effectBin), effectValve, effectInternalBin, NULL);
    gst_element_link(effectValve, effectInternalBin);

    // ghost pads to the main bin
    pad = gst_element_get_static_pad(effectValve, "sink");
    gst_element_add_pad(effectBin, gst_ghost_pad_new("sink", pad));
    gst_object_unref(GST_OBJECT(pad));
    pad = gst_element_get_static_pad(effectInternalBin, "src");
    gst_element_add_pad(effectBin, gst_ghost_pad_new("src", pad));
    gst_object_unref(GST_OBJECT(pad));

    g_object_set(camerabin, "video-source-filter", effectBin, NULL);
}
Пример #29
0
RygelMP3TranscoderBin* rygel_mp3_transcoder_bin_construct (GType object_type, GstElement* src, RygelMP3Transcoder* transcoder, GError** error) {
	GError * _inner_error_;
	RygelMP3TranscoderBin * self;
	GstElement* decodebin;
	GstElement* _tmp0_;
	GstElement* _tmp1_;
	GstElement* _tmp4_;
	GstElement* _tmp3_;
	GstElement* _tmp2_;
	GstPad* src_pad;
	GstGhostPad* ghost;
	GstPad* _tmp5_;
	g_return_val_if_fail (src != NULL, NULL);
	g_return_val_if_fail (transcoder != NULL, NULL);
	_inner_error_ = NULL;
	self = g_object_newv (object_type, 0, NULL);
	decodebin = rygel_gst_utils_create_element (RYGEL_MP3_TRANSCODER_BIN_DECODEBIN, RYGEL_MP3_TRANSCODER_BIN_DECODEBIN, &_inner_error_);
	if (_inner_error_ != NULL) {
		g_propagate_error (error, _inner_error_);
		return;
	}
	_tmp0_ = rygel_mp3_transcoder_create_encoder (transcoder, RYGEL_MP3_TRANSCODER_BIN_AUDIO_SRC_PAD, RYGEL_MP3_TRANSCODER_BIN_AUDIO_SINK_PAD, &_inner_error_);
	if (_inner_error_ != NULL) {
		g_propagate_error (error, _inner_error_);
		(decodebin == NULL) ? NULL : (decodebin = (gst_object_unref (decodebin), NULL));
		return;
	}
	_tmp1_ = NULL;
	self->priv->audio_enc = (_tmp1_ = _tmp0_, (self->priv->audio_enc == NULL) ? NULL : (self->priv->audio_enc = (gst_object_unref (self->priv->audio_enc), NULL)), _tmp1_);
	_tmp4_ = NULL;
	_tmp3_ = NULL;
	_tmp2_ = NULL;
	gst_bin_add_many ((GstBin*) self, (_tmp2_ = src, (_tmp2_ == NULL) ? NULL : gst_object_ref (_tmp2_)), (_tmp3_ = decodebin, (_tmp3_ == NULL) ? NULL : gst_object_ref (_tmp3_)), (_tmp4_ = self->priv->audio_enc, (_tmp4_ == NULL) ? NULL : gst_object_ref (_tmp4_)), NULL);
	gst_element_link (src, decodebin);
	src_pad = gst_element_get_static_pad (self->priv->audio_enc, RYGEL_MP3_TRANSCODER_BIN_AUDIO_SRC_PAD);
	ghost = (GstGhostPad*) gst_ghost_pad_new (NULL, src_pad);
	_tmp5_ = NULL;
	gst_element_add_pad ((GstElement*) self, (_tmp5_ = (GstPad*) ghost, (_tmp5_ == NULL) ? NULL : gst_object_ref (_tmp5_)));
	g_signal_connect_object (decodebin, "pad-added", (GCallback) _rygel_mp3_transcoder_bin_decodebin_pad_added_gst_element_pad_added, self, 0);
	(decodebin == NULL) ? NULL : (decodebin = (gst_object_unref (decodebin), NULL));
	(src_pad == NULL) ? NULL : (src_pad = (gst_object_unref (src_pad), NULL));
	(ghost == NULL) ? NULL : (ghost = (gst_object_unref (ghost), NULL));
	return self;
}
Пример #30
0
void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
{
    QGstreamerVideoRendererInterface* renderer = qobject_cast<QGstreamerVideoRendererInterface*>(videoOutput);

    if (m_renderer == renderer)
        return;

#ifdef DEBUG_VO_BIN_DUMP
    dumpNum++;

    _gst_debug_bin_to_dot_file(GST_BIN(m_videoOutputBin),
                                  GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/),
                                  QString("video_output_change_%1_set").arg(dumpNum).toAscii().constData());
#endif

    m_renderer = renderer;

    GstElement *videoSink = m_renderer ? m_renderer->videoSink() : m_nullVideoSink;

    if (m_state == QMediaPlayer::StoppedState) {
        m_pendingVideoSink = 0;
        gst_element_unlink(m_videoScale, m_videoSink);

        gst_bin_remove(GST_BIN(m_videoOutputBin), m_videoSink);

        m_videoSink = videoSink;

        gst_bin_add(GST_BIN(m_videoOutputBin), m_videoSink);
        gst_element_link(m_videoScale, m_videoSink);

    } else {
        if (m_pendingVideoSink) {
            m_pendingVideoSink = videoSink;
            return;
        }

        m_pendingVideoSink = videoSink;

        //block pads, async to avoid locking in paused state
        GstPad *srcPad = gst_element_get_static_pad(m_videoIdentity, "src");
        gst_pad_set_blocked_async(srcPad, true, &block_pad_cb, this);
        gst_object_unref(GST_OBJECT(srcPad));
    }
}