Beispiel #1
0
void CvCapture_GStreamer::restartPipeline()
{
    CV_FUNCNAME("icvRestartPipeline");

    __BEGIN__;

    printf("restarting pipeline, going to ready\n");

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_READY) ==
            GST_STATE_CHANGE_FAILURE) {
        CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
        return;
    }

    printf("ready, relinking\n");

    gst_element_unlink(uridecodebin, color);
    printf("filtering with %s\n", gst_caps_to_string(caps));
    gst_element_link_filtered(uridecodebin, color, caps);

    printf("relinked, pausing\n");

    if(gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING) ==
            GST_STATE_CHANGE_FAILURE) {
        CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
        return;
    }

    printf("state now paused\n");

    __END__;
}
static void
fps_display_sink_stop (GstFPSDisplaySink * self)
{
  if (self->text_overlay) {
    gst_element_unlink (self->text_overlay, self->video_sink);
    gst_bin_remove (GST_BIN (self), self->text_overlay);
    gst_object_unref (self->text_overlay);
    self->text_overlay = NULL;
  }

  if (!self->silent) {
    gchar *str;

    /* print the max and minimum fps values */
    str =
        g_strdup_printf ("Max-fps: %0.2f, Min-fps: %0.2f", self->max_fps,
        self->min_fps);
    GST_OBJECT_LOCK (self);
    g_free (self->last_message);
    self->last_message = str;
    GST_OBJECT_UNLOCK (self);
    g_object_notify_by_pspec ((GObject *) self, pspec_last_message);
  }

  GST_OBJECT_LOCK (self);
  g_free (self->last_message);
  self->last_message = NULL;
  GST_OBJECT_UNLOCK (self);
}
static gboolean
remove_elements_from_pipeline (KmsAlphaBlendingData * port_data)
{
  KmsAlphaBlending *self = port_data->mixer;
  GstElement *videoconvert, *videoscale, *videorate, *capsfilter, *queue,
      *videobox;

  KMS_ALPHA_BLENDING_LOCK (self);

  videobox = port_data->videobox;
  gst_element_unlink (videobox, self->priv->videomixer);

  if (port_data->video_mixer_pad != NULL) {
    gst_element_release_request_pad (self->priv->videomixer,
        port_data->video_mixer_pad);
    g_object_unref (port_data->video_mixer_pad);
    port_data->video_mixer_pad = NULL;
  }

  videoconvert = g_object_ref (port_data->videoconvert);
  videorate = g_object_ref (port_data->videorate);
  queue = g_object_ref (port_data->queue);
  videoscale = g_object_ref (port_data->videoscale);
  capsfilter = g_object_ref (port_data->capsfilter);
  g_object_ref (videobox);

  g_object_unref (port_data->videoconvert_sink_pad);

  port_data->videoconvert_sink_pad = NULL;
  port_data->videoconvert = NULL;
  port_data->videorate = NULL;
  port_data->queue = NULL;
  port_data->videoscale = NULL;
  port_data->capsfilter = NULL;
  port_data->videobox = NULL;

  gst_bin_remove_many (GST_BIN (self), videoconvert, videoscale, capsfilter,
      videorate, queue, videobox, NULL);

  kms_base_hub_unlink_video_src (KMS_BASE_HUB (self), port_data->id);

  KMS_ALPHA_BLENDING_UNLOCK (self);

  gst_element_set_state (videoconvert, GST_STATE_NULL);
  gst_element_set_state (videoscale, GST_STATE_NULL);
  gst_element_set_state (videorate, GST_STATE_NULL);
  gst_element_set_state (capsfilter, GST_STATE_NULL);
  gst_element_set_state (queue, GST_STATE_NULL);
  gst_element_set_state (videobox, GST_STATE_NULL);

  g_object_unref (videoconvert);
  g_object_unref (videoscale);
  g_object_unref (videorate);
  g_object_unref (capsfilter);
  g_object_unref (queue);
  g_object_unref (videobox);

  return G_SOURCE_REMOVE;
}
/*
 * Method: unlink(element)
 * element: a Gst::Element object.
 *
 * Unlinks this element (source) to the provided element (destination). 
 *
 * The method looks for all source pads of the source elemnt that are
 * linked to the destination element and unlinkes them.
 *
 */
static VALUE
rg_unlink(VALUE self, VALUE other_element)
{
    GstElement *element1, *element2;

    element1 = SELF(self);
    element2 = SELF(other_element);
    gst_element_unlink(element1, element2);
    return self;
}
Beispiel #5
0
static void gate_block_async_cb(GstPad * pad, gboolean blocked,
	gpointer user_data)
    {
    GstStateChangeReturn ret;
    std::string location[4];
    GstRTSPMedia *media;

    SMC::RTSPserverGate *object = static_cast<SMC::RTSPserverGate*> (user_data);
    media = static_cast<GstRTSPMedia*> (g_hash_table_lookup(
	    object->factory->medias, object->factory->key));
    GstElement * pipeline = (GstElement *) media->element;
    GstState rtspstate;
    GstElement *source, *rtspsrc, *buffer;
    buffer = gst_bin_get_by_name(GST_BIN(pipeline), "buffer");
    source = gst_bin_get_by_name(GST_BIN(pipeline), "gate");

    gst_element_set_state(pipeline, GST_STATE_PAUSED);

    gst_element_unlink(source, buffer);
    if (TRUE == gst_bin_remove(GST_BIN(pipeline), source))
	std::clog << "########## Removed" << std::endl;
    else
	std::clog << "########## Not Removed" << std::endl;

    gst_element_get_state(source, &rtspstate, NULL, GST_CLOCK_TIME_NONE);
    std::clog << "########## FIRST rtsp: " << gst_element_state_get_name(
	    rtspstate) << " ::: " << object->getState().c_str() << std::endl;

    if (object->getState() == "gate")
	object->setState("gate1");
    else
	object->setState("gate");

    rtspsrc = gst_element_factory_make("rtspsrc", "gate");

    std::clog << "########## LAST rtsp" << object->getState().c_str()
	    << std::endl;

    std::clog << "########## Flusso:" << object->getLocation().c_str()
	    << std::endl;

    g_object_set(rtspsrc, "location", object->getLocation().c_str(),"tcp-timeout", 600000, NULL);

    gst_bin_add(GST_BIN(pipeline), rtspsrc);

    g_signal_connect(rtspsrc, "pad-added", G_CALLBACK(gate_on_rtsppad_added),
	    object);
    gst_element_set_state(rtspsrc, GST_STATE_PLAYING);
    gst_element_set_state(pipeline, GST_STATE_PLAYING);

    g_idle_add((GSourceFunc) stop_data, source);
    std::clog << "########## start rtsp:" << object->getLocation().c_str()
	    << std::endl;
    std::clog << "########## Playing Pause:" << std::endl;
    }
Beispiel #6
0
/**
 * 播放文件结束响应处理函数.
 */
void SoundSystem::EosMessageOccur(SoundSystem *sndsys)
{
        GstElement *pipeline, *decode, *volume;

        pipeline = GST_ELEMENT(g_datalist_get_data(&sndsys->eltset, "pipeline-element"));
        gst_element_set_state(pipeline, GST_STATE_READY);
        decode = GST_ELEMENT(g_datalist_get_data(&sndsys->eltset, "decode-element"));
        volume = GST_ELEMENT(g_datalist_get_data(&sndsys->eltset, "volume-element"));
        gst_element_unlink(decode, volume);
        sndsys->persist = false;
}
static gboolean
remove_elements_from_pipeline (KmsCompositeMixerData * port_data)
{
  KmsCompositeMixer *self = port_data->mixer;

  KMS_COMPOSITE_MIXER_LOCK (self);

  gst_element_unlink (port_data->capsfilter, self->priv->videomixer);

  if (port_data->video_mixer_pad != NULL) {
    gst_element_release_request_pad (self->priv->videomixer,
        port_data->video_mixer_pad);
    g_object_unref (port_data->video_mixer_pad);
    port_data->video_mixer_pad = NULL;
  }

  g_object_unref (port_data->videoconvert_sink_pad);

  gst_bin_remove_many (GST_BIN (self),
      g_object_ref (port_data->input_capsfilter),
      g_object_ref (port_data->videoconvert),
      g_object_ref (port_data->videoscale),
      g_object_ref (port_data->capsfilter), g_object_ref (port_data->videorate),
      g_object_ref (port_data->queue), NULL);

  kms_base_hub_unlink_video_src (KMS_BASE_HUB (self), port_data->id);

  KMS_COMPOSITE_MIXER_UNLOCK (self);

  gst_element_set_state (port_data->input_capsfilter, GST_STATE_NULL);
  gst_element_set_state (port_data->videoconvert, GST_STATE_NULL);
  gst_element_set_state (port_data->videoscale, GST_STATE_NULL);
  gst_element_set_state (port_data->videorate, GST_STATE_NULL);
  gst_element_set_state (port_data->capsfilter, GST_STATE_NULL);
  gst_element_set_state (port_data->queue, GST_STATE_NULL);

  g_object_unref (port_data->input_capsfilter);
  g_object_unref (port_data->videoconvert);
  g_object_unref (port_data->videoscale);
  g_object_unref (port_data->videorate);
  g_object_unref (port_data->capsfilter);
  g_object_unref (port_data->queue);

  port_data->videoconvert_sink_pad = NULL;
  port_data->input_capsfilter = NULL;
  port_data->videoconvert = NULL;
  port_data->videoscale = NULL;
  port_data->capsfilter = NULL;
  port_data->videorate = NULL;
  port_data->queue = NULL;

  return G_SOURCE_REMOVE;
}
Beispiel #8
0
static void
fps_display_sink_stop (GstFPSDisplaySink * self)
{
  if (self->text_overlay) {
    gst_element_unlink (self->text_overlay, self->video_sink);
    gst_bin_remove (GST_BIN (self), self->text_overlay);
    gst_object_unref (self->text_overlay);
    self->text_overlay = NULL;
  } else {
    /* print the max and minimum fps values */
    g_print ("Max-fps: %0.2f\nMin-fps: %0.2f\n", self->max_fps, self->min_fps);
  }
}
Beispiel #9
0
GstPadProbeReturn MediaPlayer::cb_event_probe_toggle_splitter(GstPad *pad, GstPadProbeInfo *info, gpointer user_data)
{
    MediaPlayer * self = reinterpret_cast<MediaPlayer*>( user_data );

    // remove the probe first
    gst_pad_remove_probe( pad, GST_PAD_PROBE_INFO_ID (info) );

    // Is the element already in the bin?
    if ( self->m_gst_audio_karaokesplitter == 0 )
    {
        Logger::debug( "GstMediaPlayer:  karaokesplitter is not enabled, enabling");
        self->m_gst_audio_karaokesplitter = self->createElement ("audiokaraoke", "karaoke", false );

        // This might happen if the player requested it despite us returning no such capability
        if ( !self->m_gst_audio_karaokesplitter )
            return GST_PAD_PROBE_OK;

        // Add splitter into the bin
        gst_bin_add( GST_BIN (self->m_gst_pipeline), self->m_gst_audio_karaokesplitter );

        // Unlink the place for the splitter
        gst_element_unlink( self->m_gst_audioconverter, self->m_gst_audio_volume );

        // Link it in
        gst_element_link_many( self->m_gst_audioconverter, self->m_gst_audio_karaokesplitter, self->m_gst_audio_volume, NULL );

        // And start playing it
        gst_element_set_state( self->m_gst_audio_karaokesplitter, GST_STATE_PLAYING );

        Logger::debug( "GstMediaPlayer: karaoke splitter enabled");
    }
    else
    {
        Logger::debug( "GstMediaPlayer: karaokesplitter is enabled, disabling");

        // Stop the splitter
        gst_element_set_state( self->m_gst_audio_karaokesplitter, GST_STATE_NULL );

        // Remove splitter from the bin (it unlinks it too)
        gst_bin_remove( GST_BIN (self->m_gst_pipeline), self->m_gst_audio_karaokesplitter );
        self->m_gst_audio_karaokesplitter = 0;

        // And link the disconnected elements again
        gst_element_link_many( self->m_gst_audioconverter, self->m_gst_audio_volume, NULL );

        Logger::debug( "GstMediaPlayer: karaoke splitter disabled");
    }

    return GST_PAD_PROBE_OK;
}
Beispiel #10
0
static void
fps_display_sink_start (GstFPSDisplaySink * self)
{
  GstPad *target_pad = NULL;

  /* Init counters */
  self->next_ts = GST_CLOCK_TIME_NONE;
  self->last_ts = GST_CLOCK_TIME_NONE;
  self->frames_rendered = G_GUINT64_CONSTANT (0);
  self->frames_dropped = G_GUINT64_CONSTANT (0);

  GST_DEBUG_OBJECT (self, "Use text-overlay? %d", self->use_text_overlay);

  if (self->use_text_overlay) {
    if (!self->text_overlay) {
      self->text_overlay =
          gst_element_factory_make ("textoverlay", "fps-display-text-overlay");
      if (!self->text_overlay) {
        GST_WARNING_OBJECT (self, "text-overlay element could not be created");
        self->use_text_overlay = FALSE;
        goto no_text_overlay;
      }
      gst_object_ref (self->text_overlay);
      g_object_set (self->text_overlay,
          "font-desc", DEFAULT_FONT, "silent", FALSE, NULL);
      gst_bin_add (GST_BIN (self), self->text_overlay);

      if (!gst_element_link (self->text_overlay, self->video_sink)) {
        GST_ERROR_OBJECT (self, "Could not link elements");
      }
    }
    target_pad = gst_element_get_static_pad (self->text_overlay, "video_sink");
  }
no_text_overlay:
  if (!self->use_text_overlay) {
    if (self->text_overlay) {
      gst_element_unlink (self->text_overlay, self->video_sink);
      gst_bin_remove (GST_BIN (self), self->text_overlay);
      self->text_overlay = NULL;
    }
    target_pad = gst_element_get_static_pad (self->video_sink, "sink");
  }
  gst_ghost_pad_set_target (GST_GHOST_PAD (self->ghost_pad), target_pad);
  gst_object_unref (target_pad);

  /* Set a timeout for the fps display */
  self->timeout_id =
      g_timeout_add (FPS_DISPLAY_INTERVAL_MS,
      display_current_fps, (gpointer) self);
}
static void hotLinkingToggleSub (GtkWidget *widget, GstElement* pipeline)
{
    /*
     * toggle subtitles
     *
     * Could have been done simply by toggling the "silent" property of subtitleOverlay cf toggleSub(...)
     * I just wanted to see how to hotlink elements ^^
     */

    GstElement *subParser = gst_bin_get_by_name(GST_BIN (pipeline), "sub-parser");
    GstElement *subOverlay = gst_bin_get_by_name(GST_BIN (pipeline), "sub-overlay");
    GstElement *videoSink = gst_bin_get_by_name(GST_BIN (pipeline), "video-output");
    GstElement *videoDecoder= gst_bin_get_by_name(GST_BIN (pipeline), "theora-decoder");

    if (hotlinkSubState==true) // subtitles enabled => need to disable them
    {
        gst_element_unlink(subParser, subOverlay);
        gst_element_unlink(videoDecoder, subOverlay);
        gst_element_unlink(subOverlay,videoSink);
        gst_element_link(videoDecoder,videoSink);

        g_print("Subtitles disabled (Hotlinking Method)\n");
        hotlinkSubState=false;
        return;
    }
    else // subtitles disabled => need to enable them
    {
        gst_element_unlink(videoDecoder,videoSink);
        gst_element_link(subParser, subOverlay);
        gst_element_link(videoDecoder, subOverlay);
        gst_element_link(subOverlay,videoSink);
        g_print("Subtitles enabled (Hotlinking Method)\n");
        hotlinkSubState=true;
        return;
    }
}
static void
destroy_pad (PadInfos * infos)
{
  if (G_LIKELY (infos->bin)) {
    gst_element_set_state (infos->bin, GST_STATE_NULL);
    gst_element_unlink (infos->bin, infos->self->adder);
    gst_bin_remove (GST_BIN (infos->self), infos->bin);
  }

  if (infos->adder_pad) {
    gst_element_release_request_pad (infos->self->adder, infos->adder_pad);
    gst_object_unref (infos->adder_pad);
  }
  g_slice_free (PadInfos, infos);
}
void AudioSourceProviderGStreamer::handleRemovedDeinterleavePad(GstPad* pad)
{
    m_deinterleaveSourcePads--;

    // Remove the queue ! appsink chain downstream of deinterleave.
    GQuark quark = g_quark_from_static_string("peer");
    GstPad* sinkPad = reinterpret_cast<GstPad*>(g_object_get_qdata(G_OBJECT(pad), quark));
    GRefPtr<GstElement> queue = adoptGRef(gst_pad_get_parent_element(sinkPad));
    GRefPtr<GstPad> queueSrcPad = adoptGRef(gst_element_get_static_pad(queue.get(), "src"));
    GRefPtr<GstPad> appsinkSinkPad = adoptGRef(gst_pad_get_peer(queueSrcPad.get()));
    GRefPtr<GstElement> sink = adoptGRef(gst_pad_get_parent_element(appsinkSinkPad.get()));
    gst_element_set_state(sink.get(), GST_STATE_NULL);
    gst_element_set_state(queue.get(), GST_STATE_NULL);
    gst_element_unlink(queue.get(), sink.get());
    gst_bin_remove_many(GST_BIN(m_audioSinkBin.get()), queue.get(), sink.get(), nullptr);
}
Beispiel #14
0
/*
 Changes webcam's sink
*/
static void
acam_webcam_change_sink (acam_webcam_device_s *acam_webcam_device, GstElement *src, GstElement *new_sink, GstElement *old_sink)
{
	/* Stop webcam's video */
	acam_webcam_device_stop (acam_webcam_device);

	gst_element_unlink (src, old_sink);
	gst_object_ref (old_sink);
	gst_bin_remove (GST_BIN (acam_webcam_device->video_pipeline), old_sink);

	gst_bin_add (GST_BIN (acam_webcam_device->video_pipeline), new_sink);
	gst_element_link (src, new_sink);

	/* Play webcam's video */
	acam_webcam_device_play (acam_webcam_device);
}
Beispiel #15
0
static void
fps_display_sink_stop (GstFPSDisplaySink * self)
{
  /* remove the timeout */
  if (self->timeout_id) {
    g_source_remove (self->timeout_id);
    self->timeout_id = 0;
  }

  if (self->text_overlay) {
    gst_element_unlink (self->text_overlay, self->video_sink);
    gst_bin_remove (GST_BIN (self), self->text_overlay);
    gst_object_unref (self->text_overlay);
    self->text_overlay = NULL;
  }
}
Beispiel #16
0
void test_element_unlink()
{
  GstElement *src, *sink;
  
  xmlfile = "gstutils_test_element_unlink";
  std_log(LOG_FILENAME_LINE, "Test Started gstutils_test_element_unlink");

  src = gst_element_factory_make ("fakesrc", NULL);
  sink = gst_element_factory_make ("fakesink", NULL);
  fail_unless (gst_element_link (src, sink) != FALSE);
  gst_element_unlink (src, sink);
  gst_object_unref (src);
  gst_object_unref (sink);
  
  std_log(LOG_FILENAME_LINE, "Test Successful");
  create_xml(0);
}
static void
uridecodebin_pad_removed_cb (GstElement * uridecodebin, GstPad * pad,
    GstDiscoverer * dc)
{
  GList *tmp;
  PrivateStream *ps;
  GstPad *sinkpad;

  GST_DEBUG_OBJECT (dc, "pad %s:%s", GST_DEBUG_PAD_NAME (pad));

  /* Find the PrivateStream */
  DISCO_LOCK (dc);
  for (tmp = dc->priv->streams; tmp; tmp = tmp->next) {
    ps = (PrivateStream *) tmp->data;
    if (ps->pad == pad)
      break;
  }

  if (tmp == NULL) {
    DISCO_UNLOCK (dc);
    GST_DEBUG ("The removed pad wasn't controlled by us !");
    return;
  }

  dc->priv->streams = g_list_delete_link (dc->priv->streams, tmp);
  DISCO_UNLOCK (dc);

  gst_element_set_state (ps->sink, GST_STATE_NULL);
  gst_element_set_state (ps->queue, GST_STATE_NULL);
  gst_element_unlink (ps->queue, ps->sink);

  sinkpad = gst_element_get_static_pad (ps->queue, "sink");
  gst_pad_unlink (pad, sinkpad);
  gst_object_unref (sinkpad);

  /* references removed here */
  gst_bin_remove_many (dc->priv->pipeline, ps->sink, ps->queue, NULL);

  if (ps->tags) {
    gst_tag_list_free (ps->tags);
  }

  g_slice_free (PrivateStream, ps);

  GST_DEBUG ("Done handling pad");
}
Beispiel #18
0
static gboolean
remove_elements_from_pipeline (KmsCompositeMixerData * port_data)
{
  KmsCompositeMixer *self = port_data->mixer;

  KMS_COMPOSITE_MIXER_LOCK (self);

  gst_element_unlink (port_data->capsfilter, self->priv->videomixer);

  if (port_data->latency_probe_id > 0) {
    gst_pad_remove_probe (port_data->video_mixer_pad,
        port_data->latency_probe_id);
    port_data->latency_probe_id = 0;
  }

  if (port_data->video_mixer_pad != NULL) {
    gst_element_release_request_pad (self->priv->videomixer,
        port_data->video_mixer_pad);
    g_object_unref (port_data->video_mixer_pad);
    port_data->video_mixer_pad = NULL;
  }

  gst_bin_remove_many (GST_BIN (self),
      g_object_ref (port_data->capsfilter),
      g_object_ref (port_data->tee), g_object_ref (port_data->fakesink), NULL);

  kms_base_hub_unlink_video_src (KMS_BASE_HUB (self), port_data->id);

  KMS_COMPOSITE_MIXER_UNLOCK (self);

  gst_element_set_state (port_data->capsfilter, GST_STATE_NULL);
  gst_element_set_state (port_data->tee, GST_STATE_NULL);
  gst_element_set_state (port_data->fakesink, GST_STATE_NULL);

  g_object_unref (port_data->capsfilter);
  g_object_unref (port_data->tee);
  g_object_unref (port_data->fakesink);
  g_object_unref (port_data->tee_sink_pad);

  port_data->tee_sink_pad = NULL;
  port_data->capsfilter = NULL;
  port_data->tee = NULL;
  port_data->fakesink = NULL;

  return G_SOURCE_REMOVE;
}
Beispiel #19
0
void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
{
    QGstreamerVideoRendererInterface* renderer = qobject_cast<QGstreamerVideoRendererInterface*>(videoOutput);

    if (m_renderer == renderer)
        return;

#ifdef DEBUG_VO_BIN_DUMP
    dumpNum++;

    _gst_debug_bin_to_dot_file(GST_BIN(m_videoOutputBin),
                                  GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/),
                                  QString("video_output_change_%1_set").arg(dumpNum).toAscii().constData());
#endif

    m_renderer = renderer;

    GstElement *videoSink = m_renderer ? m_renderer->videoSink() : m_nullVideoSink;

    if (m_state == QMediaPlayer::StoppedState) {
        m_pendingVideoSink = 0;
        gst_element_unlink(m_videoScale, m_videoSink);

        gst_bin_remove(GST_BIN(m_videoOutputBin), m_videoSink);

        m_videoSink = videoSink;

        gst_bin_add(GST_BIN(m_videoOutputBin), m_videoSink);
        gst_element_link(m_videoScale, m_videoSink);

    } else {
        if (m_pendingVideoSink) {
            m_pendingVideoSink = videoSink;
            return;
        }

        m_pendingVideoSink = videoSink;

        //block pads, async to avoid locking in paused state
        GstPad *srcPad = gst_element_get_static_pad(m_videoIdentity, "src");
        gst_pad_set_blocked_async(srcPad, true, &block_pad_cb, this);
        gst_object_unref(GST_OBJECT(srcPad));
    }
}
Beispiel #20
0
// ----------------------------------------------------------------------------
// Process the "pad-removed" signal from uridecodebin
void
GStreamerImportFileHandle::OnPadRemoved(GstPad *pad)
{
   GStreamContext *c = GETCTX(pad);

   // Set audioconvert and appsink states to NULL
   gst_element_set_state(c->mSink, GST_STATE_NULL);
   gst_element_set_state(c->mConv, GST_STATE_NULL);

   // Unlink audioconvert -> appsink
   gst_element_unlink(c->mConv, c->mSink);

   // Remove the pads from the pipeilne
   gst_bin_remove_many(GST_BIN(mPipeline), c->mConv, c->mSink, NULL);

   // And reset context
   c->mConv = NULL;
   c->mSink = NULL;

   return;
}
Beispiel #21
0
static void 
cb_change_track(gpointer *mdata)
{
    struct srcbin_pad_struct *cbs;
    GstElement *ns;

    cbs = (struct srcbin_pad_struct *) mdata;
    /* first get rid of the old element */
    gst_element_set_state(cbs->mbin, GST_STATE_READY);
    gst_element_unlink(cbs->srcelem, cbs->decelem);
    gst_element_set_state(cbs->srcelem, GST_STATE_NULL);
    gst_bin_remove(GST_BIN(cbs->mbin),cbs->srcelem);

    /* allocate the new element and restart
     * the whole bin 
     */ 
    cbs->srcelem = get_file_source(NULL);
    gst_bin_add(GST_BIN(cbs->mbin), cbs->srcelem);
    gst_element_link(cbs->srcelem, cbs->decelem);
    gst_element_set_state(cbs->mbin, GST_STATE_PLAYING);

}
Beispiel #22
0
void PlayerGst::setLink(int l, QUrl &url)
{
	GstElement *audio = gst_bin_get_by_name(GST_BIN(pipeline), "audiobin");
	GstElement *dec = gst_bin_get_by_name(GST_BIN(pipeline), "decoder");
	GstElement *l_src = gst_bin_get_by_name(GST_BIN(pipeline), "localsrc");
	GstElement *http_src = gst_bin_get_by_name(GST_BIN(pipeline), "httpsrc");
	GstElement *playbin;
	if(canUsePlaybin) playbin = gst_bin_get_by_name(GST_BIN(pipeline), "playbin");

	if(l != link) {
		switch(link) {
		case 2: // http
			gst_element_unlink (http_src, dec);
			gst_element_set_state (http_src, GST_STATE_NULL);
			gst_element_set_locked_state (http_src, TRUE);
			break;
		case 1: // file
			if(canUsePlaybin) {
				gst_element_set_state(playbin, GST_STATE_NULL);
				gst_element_set_locked_state(playbin, TRUE);
				usePlaybin = false;
				gst_element_set_locked_state(audio, FALSE);
				gst_element_set_locked_state(dec, FALSE);
			} else {
				gst_element_unlink (l_src, dec);
				gst_element_set_state (l_src, GST_STATE_NULL);
				gst_element_set_locked_state (l_src, TRUE);
			}
			break;
		case 0:
		default:
			{}
		}
		switch(l) {
		case 2: // http
			//g_object_set (G_OBJECT (http_src), "location", (const char*)url.toString().toLocal8Bit(), NULL);
			gst_element_link (http_src, dec);
			gst_element_set_locked_state (http_src, FALSE);
			break;
		case 1: // file
			if(canUsePlaybin) {
				gst_element_set_locked_state(playbin, FALSE);
				usePlaybin = true;
				gst_element_set_state(audio, GST_STATE_NULL);
				gst_element_set_locked_state(audio, TRUE);
				gst_element_set_state(dec, GST_STATE_NULL);
				gst_element_set_locked_state(dec, TRUE);
			} else {
				//g_object_set (G_OBJECT (l_src), "location", (const char*)url.toLocalFile().toLocal8Bit(), NULL);
				gst_element_link (l_src, dec);
				gst_element_set_locked_state (l_src, FALSE);
			}
			break;
		case 0:
		default:
			{}
		}
		link = l;
	}
	switch(link) {
	case 2: {// http
		g_object_set (G_OBJECT (http_src), "location", (const char*)url.toEncoded(), NULL);
		QString proxyStr;
		// if(proxy->hasVariable("proxyEnabled") && proxy->getVariable("proxyEnabled") == "true") 
		// 	proxyStr = proxy->getVariable("proxyHost") + ":" + proxy->getVariable("proxyPort");
		// g_object_set (G_OBJECT (http_src), "proxy", (const char*)proxyStr.toLocal8Bit(), NULL);
		break;
	}
	case 1: // file
		if(canUsePlaybin) {
			g_object_set (playbin, "uri", (const char*)ToLocalFile(url).toLocal8Bit(), NULL);
		} else {
			g_object_set (G_OBJECT (l_src), "location", (const char*)ToLocalFile(url).toLocal8Bit(), NULL);
		}
		break;
	case 0:
	default:
		{}
	}
	if(canUsePlaybin) gst_object_unref(playbin);
	gst_object_unref(l_src);
	gst_object_unref(http_src);
	gst_object_unref(dec);
	gst_object_unref(audio);
}
Beispiel #23
0
// ----------------------------------------------------------------------------
// Import streams
int
GStreamerImportFileHandle::Import(TrackFactory *trackFactory,
                                  Track ***outTracks,
                                  int *outNumTracks,
                                  Tags *tags)
{
   // Save track factory pointer
   mTrackFactory = trackFactory;

   // Create the progrress dialog
   CreateProgress();

   // Block streams that are to be bypassed
   g_mutex_lock(&mStreamsLock);
   bool haveStreams = false;
   for (guint i = 0; i < mStreams->len; i++)
   {
      GStreamContext *c = (GStreamContext *) g_ptr_array_index(mStreams, i);

      // Did the user choose to skip this stream?
      if (!c->mUse)
      {
         // Get the audioconvert sink pad and unlink
         GstPad *convsink = gst_element_get_static_pad(c->mConv, "sink");
         GstPad *convpeer = gst_pad_get_peer(convsink);
         gst_pad_unlink(convpeer, convsink);
         gst_object_unref(convpeer);

         // Set bitbucket callbacks so the prerolled sample won't get processed
         // when we change the state to PLAYING
         gst_app_sink_set_callbacks(GST_APP_SINK(c->mSink), &AppSinkBitBucket, this, NULL);

         // Set state to playing for conv and sink so EOS gets processed
         gst_element_set_state(c->mConv, GST_STATE_PLAYING);
         gst_element_set_state(c->mSink, GST_STATE_PLAYING);

         // Send an EOS event to the pad to force them to drain
         gst_pad_send_event(convsink, gst_event_new_eos());

         // Resync state with pipeline
         gst_element_sync_state_with_parent(c->mConv);
         gst_element_sync_state_with_parent(c->mSink);

         // Done with the pad
         gst_object_unref(convsink);

         // Unlink audioconvert and appsink
         gst_element_unlink(c->mConv, c->mSink);

         // Remove them from the bin
         gst_bin_remove_many(GST_BIN(mPipeline), c->mConv, c->mSink, NULL);

         // All done with them
         c->mConv = NULL;
         c->mSink = NULL;

         continue;
      }

      // We have a stream to process
      haveStreams = true;
   }
   g_mutex_unlock(&mStreamsLock);

   // Can't do much if we don't have any streams to process
   if (!haveStreams)
   {
      wxMessageBox(wxT("File doesn't contain any audio streams."),
                   wxT("GStreamer Importer"));
      return eProgressFailed;
   }

   // Get the ball rolling...
   GstStateChangeReturn state = gst_element_set_state(mPipeline, GST_STATE_PLAYING);
   if (state == GST_STATE_CHANGE_FAILURE)
   {
      wxMessageBox(wxT("Unable to import file, state change failed."),
                   wxT("GStreamer Importer"));
      return eProgressFailed;
   }

   // Get the duration of the stream
   gint64 duration;
   gst_element_query_duration(mPipeline, GST_FORMAT_TIME, &duration);

   // Handle bus messages and update progress while files is importing
   bool success = true;
   int updateResult = eProgressSuccess;
   while (ProcessBusMessage(success) && success && updateResult == eProgressSuccess)
   {
      gint64 position;

      // Update progress indicator and give user chance to abort
      if (gst_element_query_position(mPipeline, GST_FORMAT_TIME, &position))
      {
         updateResult = mProgress->Update((wxLongLong_t) position,
                                          (wxLongLong_t) duration);
      }
   }

   // Disable pipeline
   gst_element_set_state(mPipeline, GST_STATE_NULL);

   // Something bad happened
   if (!success || updateResult == eProgressFailed || updateResult == eProgressCancelled)
   {
      return updateResult;
   }

   // Grah the streams lock
   g_mutex_lock(&mStreamsLock);

   // Count the total number of tracks collected
   *outNumTracks = 0;
   for (guint s = 0; s < mStreams->len; s++)
   {
      GStreamContext *c = (GStreamContext*)g_ptr_array_index(mStreams, s);
      if (c->mChannels)
      {
         *outNumTracks += c->mNumChannels;
      }
   }

   // Create new tracks
   *outTracks = new Track *[*outNumTracks];

   // Copy audio from mChannels to newly created tracks (destroying mChannels in process)
   int trackindex = 0;
   for (guint s = 0; s < mStreams->len; s++)
   {
      GStreamContext *c = (GStreamContext*)g_ptr_array_index(mStreams, s);
      if (c->mChannels)
      {
         for (int ch = 0; ch < c->mNumChannels; ch++)
         {
            c->mChannels[ch]->Flush();
            (*outTracks)[trackindex++] = c->mChannels[ch];
         }

         delete [] c->mChannels;
         c->mChannels = NULL;
      }
   }
   g_mutex_unlock(&mStreamsLock);

   // Set any tags found in the stream
   *tags = mTags;

   return updateResult;
}
void QGstreamerPlayerSession::setVideoRenderer(QObject *videoOutput)
{
    if (m_videoOutput != videoOutput) {
        if (m_videoOutput) {
            disconnect(m_videoOutput, SIGNAL(sinkChanged()),
                       this, SLOT(updateVideoRenderer()));
            disconnect(m_videoOutput, SIGNAL(readyChanged(bool)),
                   this, SLOT(updateVideoRenderer()));
        }

        if (videoOutput) {
            connect(videoOutput, SIGNAL(sinkChanged()),
                    this, SLOT(updateVideoRenderer()));
            connect(videoOutput, SIGNAL(readyChanged(bool)),
                   this, SLOT(updateVideoRenderer()));
        }

        m_videoOutput = videoOutput;
    }

    QGstreamerVideoRendererInterface* renderer = qobject_cast<QGstreamerVideoRendererInterface*>(videoOutput);   

    m_renderer = renderer;

#ifdef DEBUG_VO_BIN_DUMP
    dumpNum++;

    _gst_debug_bin_to_dot_file(GST_BIN(m_playbin),
                                  GstDebugGraphDetails(GST_DEBUG_GRAPH_SHOW_ALL /* GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES*/),
                                  QString("playbin_%1_set").arg(dumpNum).toAscii().constData());
#endif

    GstElement *videoSink = m_renderer ? m_renderer->videoSink() : m_nullVideoSink;
    if (!videoSink)
        videoSink = m_nullVideoSink;

#ifdef DEBUG_PLAYBIN
    qDebug() << "Set video output:" << videoOutput;
    qDebug() << "Current sink:" << (m_videoSink ? GST_ELEMENT_NAME(m_videoSink) : "") <<  m_videoSink
             << "pending:" << (m_pendingVideoSink ? GST_ELEMENT_NAME(m_pendingVideoSink) : "") << m_pendingVideoSink
             << "new sink:" << (videoSink ? GST_ELEMENT_NAME(videoSink) : "") << videoSink;
#endif

    if (m_pendingVideoSink == videoSink ||
        (m_pendingVideoSink == 0 && m_videoSink == videoSink)) {
#ifdef DEBUG_PLAYBIN
        qDebug() << "Video sink has not changed, skip video output reconfiguration";
#endif
        return;
    }

#ifdef DEBUG_PLAYBIN
    qDebug() << "Reconfigure video output";
#endif

    if (m_state == QMediaPlayer::StoppedState) {
#ifdef DEBUG_PLAYBIN
        qDebug() << "The pipeline has not started yet, pending state:" << m_pendingState;
#endif
        //the pipeline has not started yet
        m_pendingVideoSink = 0;        
        gst_element_set_state(m_videoSink, GST_STATE_NULL);
        gst_element_set_state(m_playbin, GST_STATE_NULL);
        gst_element_unlink(m_videoIdentity, m_videoSink);

        gst_bin_remove(GST_BIN(m_videoOutputBin), m_videoSink);

        m_videoSink = videoSink;

        gst_bin_add(GST_BIN(m_videoOutputBin), m_videoSink);
        gst_element_link(m_videoIdentity, m_videoSink);

        switch (m_pendingState) {
        case QMediaPlayer::PausedState:
            gst_element_set_state(m_playbin, GST_STATE_PAUSED);
            break;
        case QMediaPlayer::PlayingState:
            gst_element_set_state(m_playbin, GST_STATE_PLAYING);
            break;
        default:
            break;
        }
    } else {
        if (m_pendingVideoSink) {
#ifdef DEBUG_PLAYBIN
            qDebug() << "already waiting for pad to be blocked, just change the pending sink";
#endif
            m_pendingVideoSink = videoSink;
            return;
        }

        m_pendingVideoSink = videoSink;

#ifdef DEBUG_PLAYBIN
        qDebug() << "Blocking the video output pad...";
#endif

        {
#ifdef DEBUG_PLAYBIN
            qDebug() << "send the last new segment event to the video output...";
#endif
            GstEvent *event = gst_event_new_new_segment(TRUE,
                                                        m_segment.rate,
                                                        m_segment.format,
                                                        m_segment.last_stop, //start
                                                        m_segment.stop,
                                                        m_segment.last_stop);//position

            GstPad *pad = gst_element_get_static_pad(videoSink, "sink");
            //gst_pad_send_event(pad, m_lastSegmentEvent);
            gst_pad_send_event(pad, event);
            gst_object_unref(GST_OBJECT(pad));
        }

        //block pads, async to avoid locking in paused state
        GstPad *srcPad = gst_element_get_static_pad(m_videoIdentity, "src");
        gst_pad_set_blocked_async(srcPad, true, &block_pad_cb, this);
        gst_object_unref(GST_OBJECT(srcPad));
    }
}
Beispiel #25
0
static void
create_pipeline (void)
{
  GstElement *src;
  SineSrc *sinesrc;
  GstElement *alsasink;

  pipeline = gst_pipeline_new ("pipeline");
  src = sinesrc_new ();
  alsasink = gst_element_factory_make ("alsasink", "alsasink");

  gst_bin_add_many (GST_BIN (pipeline), src, alsasink, NULL);
  gst_element_link (src, alsasink);

  /* prepare our sinesrc */
  sinesrc = (SineSrc *) src;
  sinesrc->pre_get_func = pre_get_func;
  sinesrc->newcaps = TRUE;
  /* int tests */
  if (last < NUMBER_OF_INT_TESTS) {
    sinesrc->type = SINE_SRC_INT;
    sinesrc->sign = ((last % 2) == 0) ? TRUE : FALSE;
    sinesrc->endianness =
        ((last / 2) % 2 == 0) ? G_LITTLE_ENDIAN : G_BIG_ENDIAN;
    switch ((last / 4) % 8) {
      case 0:
        sinesrc->depth = 8;
        sinesrc->width = 8;
        break;
      case 1:
        sinesrc->depth = 16;
        sinesrc->width = 16;
        break;
      case 2:
        sinesrc->depth = 24;
        sinesrc->width = 32;
        break;
      case 3:
        sinesrc->depth = 32;
        sinesrc->width = 32;
        break;
        /* nomore tests below until i know what 24bit width means to alsa wrt endianness */
      case 4:
        sinesrc->depth = 24;
        sinesrc->width = 24;
        break;
      case 5:
        sinesrc->depth = 20;
        sinesrc->width = 24;
        break;
      case 6:
        sinesrc->depth = 18;
        sinesrc->width = 24;
        break;
      case 7:
        /* not used yet */
        sinesrc->depth = 8;
        sinesrc->width = 8;
        break;
      default:
        g_assert_not_reached ();
    }

    g_print ("Setting format to: format:     \"int\"\n"
        "                   sign:       %s\n"
        "                   endianness: %d\n"
        "                   width:      %d\n"
        "                   depth:      %d\n",
        sinesrc->sign ? "TRUE" : "FALSE", sinesrc->endianness,
        sinesrc->width, sinesrc->depth);
  } else if (last < NUMBER_OF_INT_TESTS + NUMBER_OF_FLOAT_TESTS) {
    gint temp = last - NUMBER_OF_INT_TESTS;

    sinesrc->type = SINE_SRC_FLOAT;
    switch (temp) {
      case 0:
        sinesrc->width = 32;
        break;
      case 1:
        sinesrc->width = 64;
        break;
      default:
        g_assert_not_reached ();
    }
    g_print ("Setting format to float width %d\n", sinesrc->width);
  } else if (last <
      NUMBER_OF_INT_TESTS + NUMBER_OF_FLOAT_TESTS + NUMBER_OF_LAW_TESTS) {
    gint temp = last - NUMBER_OF_INT_TESTS - NUMBER_OF_FLOAT_TESTS;
    GstElement *law;

    sinesrc->type = SINE_SRC_INT;
    sinesrc->sign = TRUE;
    sinesrc->endianness = G_BYTE_ORDER;
    sinesrc->depth = 16;
    sinesrc->width = 16;

    if (temp == 0) {
      law = gst_element_factory_make ("mulawenc", "mulaw");
    } else {
      law = gst_element_factory_make ("alawenc", "alaw");
    }
    g_assert (law);
    gst_element_unlink (src, alsasink);
    gst_bin_add (GST_BIN (pipeline), law);
    gst_element_link_many (src, law, alsasink, NULL);
    if (temp == 0) {
      g_print ("Setting format to: format:     \"MU law\"\n");
    } else {
      g_print ("Setting format to: format:     \"A law\"\n");
    }
  } else {
    g_print ("All formats work like a charm.\n");
    exit (0);
  }
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
}
Beispiel #26
0
static void
kms_composite_mixer_port_data_destroy (gpointer data)
{
  KmsCompositeMixerData *port_data = (KmsCompositeMixerData *) data;
  KmsCompositeMixer *self = port_data->mixer;
  GstPad *audiosink;
  gchar *padname;

  KMS_COMPOSITE_MIXER_LOCK (self);

  port_data->removing = TRUE;

  kms_base_hub_unlink_video_sink (KMS_BASE_HUB (self), port_data->id);
  kms_base_hub_unlink_audio_sink (KMS_BASE_HUB (self), port_data->id);

  if (port_data->input) {
    GstEvent *event;
    gboolean result;
    GstPad *pad;

    if (port_data->capsfilter == NULL) {
      KMS_COMPOSITE_MIXER_UNLOCK (self);
      return;
    }

    pad = gst_element_get_static_pad (port_data->capsfilter, "sink");

    if (pad == NULL) {
      KMS_COMPOSITE_MIXER_UNLOCK (self);
      return;
    }

    if (!GST_OBJECT_FLAG_IS_SET (pad, GST_PAD_FLAG_EOS)) {

      if (GST_PAD_IS_FLUSHING (pad)) {
        gst_pad_send_event (pad, gst_event_new_flush_stop (FALSE));
      }

      event = gst_event_new_eos ();
      result = gst_pad_send_event (pad, event);

      if (port_data->input && self->priv->n_elems > 0) {
        port_data->input = FALSE;
        self->priv->n_elems--;
        kms_composite_mixer_recalculate_sizes (self);
      }
      KMS_COMPOSITE_MIXER_UNLOCK (self);

      if (!result) {
        GST_WARNING ("EOS event did not send");
      }
    } else {
      gboolean remove = FALSE;

      /* EOS callback was triggered before we could remove the port data */
      /* so we have to remove elements to avoid memory leaks. */
      remove = port_data->eos_managed;
      KMS_COMPOSITE_MIXER_UNLOCK (self);

      if (remove) {
        /* Remove pipeline without helding the mutex */
        kms_loop_idle_add_full (self->priv->loop, G_PRIORITY_DEFAULT,
            (GSourceFunc) remove_elements_from_pipeline,
            KMS_COMPOSITE_MIXER_REF (port_data),
            (GDestroyNotify) kms_ref_struct_unref);
      }
    }
    gst_element_unlink (port_data->capsfilter, port_data->tee);
    g_object_unref (pad);
  } else {
    if (port_data->probe_id > 0) {
      gst_pad_remove_probe (port_data->video_mixer_pad, port_data->probe_id);
    }

    if (port_data->latency_probe_id > 0) {
      gst_pad_remove_probe (port_data->video_mixer_pad,
          port_data->latency_probe_id);
    }

    if (port_data->link_probe_id > 0) {
      gst_pad_remove_probe (port_data->tee_sink_pad, port_data->link_probe_id);
    }
    KMS_COMPOSITE_MIXER_UNLOCK (self);

    gst_element_unlink (port_data->capsfilter, port_data->tee);
    gst_element_unlink (port_data->tee, port_data->fakesink);

    gst_bin_remove (GST_BIN (self), g_object_ref (port_data->capsfilter));
    gst_element_set_state (port_data->capsfilter, GST_STATE_NULL);
    g_object_unref (port_data->capsfilter);
    port_data->capsfilter = NULL;

    gst_bin_remove (GST_BIN (self), g_object_ref (port_data->tee));
    gst_element_set_state (port_data->tee, GST_STATE_NULL);
    g_object_unref (port_data->tee);
    port_data->tee = NULL;

    gst_bin_remove (GST_BIN (self), g_object_ref (port_data->fakesink));
    gst_element_set_state (port_data->fakesink, GST_STATE_NULL);
    g_object_unref (port_data->fakesink);
    port_data->fakesink = NULL;
  }

  padname = g_strdup_printf (AUDIO_SINK_PAD, port_data->id);
  audiosink = gst_element_get_static_pad (self->priv->audiomixer, padname);
  gst_element_release_request_pad (self->priv->audiomixer, audiosink);
  gst_object_unref (audiosink);
  g_free (padname);
}
static void
kms_alpha_blending_port_data_destroy (KmsAlphaBlendingData * port_data)
{
  KmsAlphaBlending *self = port_data->mixer;
  GstPad *audiosink;
  gchar *padname;

  KMS_ALPHA_BLENDING_LOCK (self);

  port_data->removing = TRUE;

  kms_base_hub_unlink_video_sink (KMS_BASE_HUB (self), port_data->id);
  kms_base_hub_unlink_audio_sink (KMS_BASE_HUB (self), port_data->id);

  if (port_data->input) {
    GstEvent *event;
    gboolean result;
    GstPad *pad;

    if (port_data->videorate == NULL) {
      KMS_ALPHA_BLENDING_UNLOCK (self);
      return;
    }

    pad = gst_element_get_static_pad (port_data->videorate, "sink");

    if (pad == NULL) {
      KMS_ALPHA_BLENDING_UNLOCK (self);
      return;
    }

    if (!GST_OBJECT_FLAG_IS_SET (pad, GST_PAD_FLAG_EOS)) {

      event = gst_event_new_eos ();
      result = gst_pad_send_event (pad, event);

      if (port_data->input && self->priv->n_elems > 0) {
        port_data->input = FALSE;
        self->priv->n_elems--;
      }

      if (!result) {
        GST_WARNING ("EOS event did not send");
      }

      gst_element_unlink (port_data->videoconvert, port_data->videorate);
      g_object_unref (pad);

      KMS_ALPHA_BLENDING_UNLOCK (self);
    } else {
      gboolean remove = FALSE;

      /* EOS callback was triggered before we could remove the port data */
      /* so we have to remove elements to avoid memory leaks. */
      remove = port_data->eos_managed;

      gst_element_unlink (port_data->videoconvert, port_data->videorate);
      g_object_unref (pad);

      KMS_ALPHA_BLENDING_UNLOCK (self);

      if (remove) {
        /* Remove pipeline without helding the mutex */
        kms_loop_idle_add_full (self->priv->loop, G_PRIORITY_DEFAULT,
            (GSourceFunc) remove_elements_from_pipeline,
            KMS_ALPHA_BLENDING_REF (port_data),
            (GDestroyNotify) kms_ref_struct_unref);
      }
    }
  } else {
    GstElement *videoconvert;

    videoconvert = g_object_ref (port_data->videoconvert);
    port_data->videoconvert = NULL;

    if (port_data->probe_id > 0) {
      gst_pad_remove_probe (port_data->video_mixer_pad, port_data->probe_id);
    }

    if (port_data->link_probe_id > 0) {
      gst_pad_remove_probe (port_data->videoconvert_sink_pad,
          port_data->link_probe_id);
    }
    KMS_ALPHA_BLENDING_UNLOCK (self);

    gst_bin_remove (GST_BIN (self), videoconvert);
    gst_element_set_state (videoconvert, GST_STATE_NULL);
    g_object_unref (videoconvert);
  }

  padname = g_strdup_printf (AUDIO_SINK_PAD, port_data->id);
  audiosink = gst_element_get_static_pad (self->priv->audiomixer, padname);

  gst_element_release_request_pad (self->priv->audiomixer, audiosink);

  gst_object_unref (audiosink);
  g_free (padname);

  KMS_ALPHA_BLENDING_UNREF (port_data);
}
/**
 * gst_wrapper_camera_bin_src_construct_pipeline:
 * @bcamsrc: camerasrc object
 *
 * This function creates and links the elements of the camerasrc bin
 * videosrc ! cspconv ! srcfilter ! cspconv ! capsfilter ! crop ! scale ! \
 * capsfilter ! tee name=t
 *    t. ! ... (viewfinder pad)
 *    t. ! output-selector name=outsel
 *        outsel. ! (image pad)
 *        outsel. ! (video pad)
 *
 * Returns: TRUE, if elements were successfully created, FALSE otherwise
 */
static gboolean
gst_wrapper_camera_bin_src_construct_pipeline (GstBaseCameraSrc * bcamsrc)
{
  GstWrapperCameraBinSrc *self = GST_WRAPPER_CAMERA_BIN_SRC (bcamsrc);
  GstBin *cbin = GST_BIN (bcamsrc);
  GstElement *tee;
  GstElement *filter_csp;
  GstElement *src_csp;
  GstElement *capsfilter;
  gboolean ret = FALSE;
  GstPad *vf_pad;
  GstPad *tee_capture_pad;
  GstPad *src_caps_src_pad;

  /* checks and adds a new video src if needed */
  if (!check_and_replace_src (self))
    goto done;

  if (!self->elements_created) {

    GST_DEBUG_OBJECT (self, "constructing pipeline");

    if (!gst_camerabin_create_and_add_element (cbin, "videoconvert",
            "src-videoconvert"))
      goto done;

    if (self->app_vid_filter) {
      self->video_filter = gst_object_ref (self->app_vid_filter);

      if (!gst_camerabin_add_element (cbin, self->video_filter))
        goto done;
      if (!gst_camerabin_create_and_add_element (cbin, "videoconvert",
              "filter-videoconvert"))
        goto done;
    }

    if (!(self->src_filter =
            gst_camerabin_create_and_add_element (cbin, "capsfilter",
                "src-capsfilter")))
      goto done;

    /* attach to notify::caps on the first capsfilter and use a callback
     * to recalculate the zoom properties when these caps change and to
     * propagate the caps to the second capsfilter */
    src_caps_src_pad = gst_element_get_static_pad (self->src_filter, "src");
    g_signal_connect (src_caps_src_pad, "notify::caps",
        G_CALLBACK (gst_wrapper_camera_bin_src_caps_cb), self);
    gst_object_unref (src_caps_src_pad);

    if (!(self->src_zoom_crop =
            gst_camerabin_create_and_add_element (cbin, "videocrop",
                "zoom-crop")))
      goto done;
    if (!(self->src_zoom_scale =
            gst_camerabin_create_and_add_element (cbin, "videoscale",
                "zoom-scale")))
      goto done;
    if (!(self->src_zoom_filter =
            gst_camerabin_create_and_add_element (cbin, "capsfilter",
                "zoom-capsfilter")))
      goto done;

    if (!(tee =
            gst_camerabin_create_and_add_element (cbin, "tee",
                "camerasrc-tee")))
      goto done;

    /* viewfinder pad */
    vf_pad = gst_element_get_request_pad (tee, "src_%u");
    g_object_set (tee, "alloc-pad", vf_pad, NULL);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->vfsrc), vf_pad);
    gst_object_unref (vf_pad);

    /* image/video pad from tee */
    tee_capture_pad = gst_element_get_request_pad (tee, "src_%u");

    self->output_selector =
        gst_element_factory_make ("output-selector", "outsel");
    g_object_set (self->output_selector, "pad-negotiation-mode", 2, NULL);
    gst_bin_add (GST_BIN (self), self->output_selector);
    {
      GstPad *pad = gst_element_get_static_pad (self->output_selector, "sink");

      /* check return TODO */
      gst_pad_link (tee_capture_pad, pad);
      gst_object_unref (pad);
    }
    gst_object_unref (tee_capture_pad);

    /* Create the 2 output pads for video and image */
    self->outsel_vidpad =
        gst_element_get_request_pad (self->output_selector, "src_%u");
    self->outsel_imgpad =
        gst_element_get_request_pad (self->output_selector, "src_%u");

    g_assert (self->outsel_vidpad != NULL);
    g_assert (self->outsel_imgpad != NULL);

    gst_pad_add_probe (self->outsel_imgpad, GST_PAD_PROBE_TYPE_BUFFER,
        gst_wrapper_camera_bin_src_imgsrc_probe, gst_object_ref (self),
        gst_object_unref);
    gst_pad_add_probe (self->outsel_vidpad, GST_PAD_PROBE_TYPE_BUFFER,
        gst_wrapper_camera_bin_src_vidsrc_probe, gst_object_ref (self),
        gst_object_unref);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->imgsrc),
        self->outsel_imgpad);
    gst_ghost_pad_set_target (GST_GHOST_PAD (self->vidsrc),
        self->outsel_vidpad);

    if (bcamsrc->mode == MODE_IMAGE) {
      g_object_set (self->output_selector, "active-pad", self->outsel_imgpad,
          NULL);
    } else {
      g_object_set (self->output_selector, "active-pad", self->outsel_vidpad,
          NULL);
    }



    gst_pad_set_active (self->vfsrc, TRUE);
    gst_pad_set_active (self->imgsrc, TRUE);    /* XXX ??? */
    gst_pad_set_active (self->vidsrc, TRUE);    /* XXX ??? */
  }

  /* Do this even if pipeline is constructed */

  if (self->video_filter) {
    /* check if we need to replace the current one */
    if (self->video_filter != self->app_vid_filter) {
      gst_bin_remove (cbin, self->video_filter);
      gst_object_unref (self->video_filter);
      self->video_filter = NULL;
      filter_csp = gst_bin_get_by_name (cbin, "filter-videoconvert");
      gst_bin_remove (cbin, filter_csp);
      gst_object_unref (filter_csp);
      filter_csp = NULL;
    }
  }

  if (!self->video_filter) {
    if (self->app_vid_filter) {
      self->video_filter = gst_object_ref (self->app_vid_filter);
      filter_csp = gst_element_factory_make ("videoconvert",
          "filter-videoconvert");
      gst_bin_add_many (cbin, self->video_filter, filter_csp, NULL);
      src_csp = gst_bin_get_by_name (cbin, "src-videoconvert");
      capsfilter = gst_bin_get_by_name (cbin, "src-capsfilter");
      if (gst_pad_is_linked (gst_element_get_static_pad (src_csp, "src")))
        gst_element_unlink (src_csp, capsfilter);
      if (!gst_element_link_many (src_csp, self->video_filter, filter_csp,
              capsfilter, NULL))
        goto done;
    }
  }
  ret = TRUE;
  self->elements_created = TRUE;
done:
  return ret;
}
void QGstreamerPlayerSession::finishVideoOutputChange()
{
    if (!m_pendingVideoSink)
        return;

#ifdef DEBUG_PLAYBIN
    qDebug() << "finishVideoOutputChange" << m_pendingVideoSink;
#endif

    GstPad *srcPad = gst_element_get_static_pad(m_videoIdentity, "src");

    if (!gst_pad_is_blocked(srcPad)) {
        //pad is not blocked, it's possible to swap outputs only in the null state
        qWarning() << "Pad is not blocked yet, could not switch video sink";
        GstState identityElementState = GST_STATE_NULL;
        gst_element_get_state(m_videoIdentity, &identityElementState, NULL, GST_CLOCK_TIME_NONE);
        if (identityElementState != GST_STATE_NULL) {
            gst_object_unref(GST_OBJECT(srcPad));
            return; //can't change vo yet, received async call from the previous change
        }

    }

    if (m_pendingVideoSink == m_videoSink) {
        //video output was change back to the current one,
        //no need to torment the pipeline, just unblock the pad
        if (gst_pad_is_blocked(srcPad))
            gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0);

        m_pendingVideoSink = 0;
        gst_object_unref(GST_OBJECT(srcPad));
        return;
    }  

    gst_element_set_state(m_videoSink, GST_STATE_NULL);

    gst_element_unlink(m_videoIdentity, m_videoSink);

    gst_bin_remove(GST_BIN(m_videoOutputBin), m_videoSink);

    m_videoSink = m_pendingVideoSink;
    m_pendingVideoSink = 0;

    gst_bin_add(GST_BIN(m_videoOutputBin), m_videoSink);
    if (!gst_element_link(m_videoIdentity, m_videoSink))
        qWarning() << "Linking video output element failed";

    GstState state;

    switch (m_pendingState) {
    case QMediaPlayer::StoppedState:
        state = GST_STATE_NULL;
        break;
    case QMediaPlayer::PausedState:
        state = GST_STATE_PAUSED;
        break;
    case QMediaPlayer::PlayingState:
        state = GST_STATE_PLAYING;
        break;
    }

    gst_element_set_state(m_videoSink, state);    

    //don't have to wait here, it will unblock eventually
    if (gst_pad_is_blocked(srcPad))
        gst_pad_set_blocked_async(srcPad, false, &block_pad_cb, 0);
    gst_object_unref(GST_OBJECT(srcPad));

#ifdef DEBUG_VO_BIN_DUMP
    dumpNum++;
    _gst_debug_bin_to_dot_file(GST_BIN(m_playbin),
                                  GstDebugGraphDetails(/*GST_DEBUG_GRAPH_SHOW_ALL */ GST_DEBUG_GRAPH_SHOW_MEDIA_TYPE | GST_DEBUG_GRAPH_SHOW_NON_DEFAULT_PARAMS | GST_DEBUG_GRAPH_SHOW_STATES),
                                  QString("playbin_%1_finish").arg(dumpNum).toAscii().constData());
#endif
}
Beispiel #30
0
/*!
 * \brief CvVideoWriter_GStreamer::open
 * \param filename filename to output to
 * \param fourcc desired codec fourcc
 * \param fps desired framerate
 * \param frameSize the size of the expected frames
 * \param is_color color or grayscale
 * \return success
 *
 * We support 2 modes of operation. Either the user enters a filename and a fourcc
 * code, or enters a manual pipeline description like in CvVideoCapture_Gstreamer.
 * In the latter case, we just push frames on the appsink with appropriate caps.
 * In the former case, we try to deduce the correct container from the filename,
 * and the correct encoder from the fourcc profile.
 *
 * If the file extension did was not recognize, an avi container is used
 *
 */
bool CvVideoWriter_GStreamer::open( const char * filename, int fourcc,
                                    double fps, CvSize frameSize, bool is_color )
{
    CV_FUNCNAME("CvVideoWriter_GStreamer::open");

    // check arguments
    assert (filename);
    assert (fps > 0);
    assert (frameSize.width > 0  &&  frameSize.height > 0);

    // init gstreamer
    gst_initializer::init();

    // init vars
    bool manualpipeline = true;
    int  bufsize = 0;
    GError *err = NULL;
    const char* mime = NULL;
    GstStateChangeReturn stateret;

    GstCaps* caps = NULL;
    GstCaps* videocaps = NULL;

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
    GstCaps* containercaps = NULL;
    GstEncodingContainerProfile* containerprofile = NULL;
    GstEncodingVideoProfile* videoprofile = NULL;
#endif

    GstIterator* it = NULL;
    gboolean done = FALSE;
    GstElement *element = NULL;
    gchar* name = NULL;
    GstElement* splitter = NULL;
    GstElement* combiner = NULL;

    // we first try to construct a pipeline from the given string.
    // if that fails, we assume it is an ordinary filename

    __BEGIN__;

    encodebin = gst_parse_launch(filename, &err);
    manualpipeline = (encodebin != NULL);

    if(manualpipeline)
    {
#if GST_VERSION_MAJOR == 0
        it = gst_bin_iterate_sources(GST_BIN(encodebin));
        if(gst_iterator_next(it, (gpointer *)&source) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#else
        it = gst_bin_iterate_sources (GST_BIN(encodebin));
        GValue value = G_VALUE_INIT;

        while (!done) {
          switch (gst_iterator_next (it, &value)) {
            case GST_ITERATOR_OK:
              element = GST_ELEMENT (g_value_get_object (&value));
              name = gst_element_get_name(element);
              if (name){
                if(strstr(name, "opencvsrc") != NULL || strstr(name, "appsrc") != NULL) {
                  source = GST_ELEMENT ( gst_object_ref (element) );
                  done = TRUE;
                }
                g_free(name);
              }
              g_value_unset (&value);

              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);

        if (!source){
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsrc in manual pipeline\n");
            return false;
        }
#endif
        pipeline = encodebin;
    }
    else
    {
        pipeline = gst_pipeline_new (NULL);

        // we just got a filename and a fourcc code.
        // first, try to guess the container from the filename
        //encodebin = gst_element_factory_make("encodebin", NULL);

        //proxy old non existing fourcc ids. These were used in previous opencv versions,
        //but do not even exist in gstreamer any more
        if (fourcc == CV_FOURCC('M','P','1','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'1');
        if (fourcc == CV_FOURCC('M','P','2','V')) fourcc = CV_FOURCC('M', 'P', 'G' ,'2');
        if (fourcc == CV_FOURCC('D','R','A','C')) fourcc = CV_FOURCC('d', 'r', 'a' ,'c');


        //create encoder caps from fourcc

        videocaps = gst_riff_create_video_caps(fourcc, NULL, NULL, NULL, NULL, NULL);
        if (!videocaps){
            CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this codec.");
        }

        //create container caps from file extension
        mime = filenameToMimetype(filename);
        if (!mime) {
            CV_ERROR( CV_StsUnsupportedFormat, "Gstreamer Opencv backend does not support this file type.");
        }

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
        containercaps = gst_caps_from_string(mime);

        //create encodebin profile
        containerprofile = gst_encoding_container_profile_new("container", "container", containercaps, NULL);
        videoprofile = gst_encoding_video_profile_new(videocaps, NULL, NULL, 1);
        gst_encoding_container_profile_add_profile(containerprofile, (GstEncodingProfile *) videoprofile);
#endif

        //create pipeline elements
        encodebin = gst_element_factory_make("encodebin", NULL);

#if FULL_GST_VERSION >= VERSION_NUM(0,10,32)
        g_object_set(G_OBJECT(encodebin), "profile", containerprofile, NULL);
#endif
        source = gst_element_factory_make("appsrc", NULL);
        file = gst_element_factory_make("filesink", NULL);
        g_object_set(G_OBJECT(file), "location", filename, NULL);
    }

    if (is_color)
    {
        input_pix_fmt = GST_VIDEO_FORMAT_BGR;
        bufsize = frameSize.width * frameSize.height * 3;

#if GST_VERSION_MAJOR == 0
        caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_BGR,
                                         frameSize.width,
                                         frameSize.height,
                                         int(fps), 1,
                                         1, 1);
#else
        caps = gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "BGR",
                                   "width", G_TYPE_INT, frameSize.width,
                                   "height", G_TYPE_INT, frameSize.height,
                                   "framerate", GST_TYPE_FRACTION, int(fps), 1,
                                   NULL);
        caps = gst_caps_fixate(caps);

#endif

    }
    else
    {
#if FULL_GST_VERSION >= VERSION_NUM(0,10,29)
        input_pix_fmt = GST_VIDEO_FORMAT_GRAY8;
        bufsize = frameSize.width * frameSize.height;

#if GST_VERSION_MAJOR == 0
        caps = gst_video_format_new_caps(GST_VIDEO_FORMAT_GRAY8,
                                         frameSize.width,
                                         frameSize.height,
                                         int(fps), 1,
                                         1, 1);
#else
        caps = gst_caps_new_simple("video/x-raw",
                                   "format", G_TYPE_STRING, "GRAY8",
                                   "width", G_TYPE_INT, frameSize.width,
                                   "height", G_TYPE_INT, frameSize.height,
                                   "framerate", GST_TYPE_FRACTION, int(fps), 1,
                                   NULL);
        caps = gst_caps_fixate(caps);
#endif
#else
        CV_Assert(!"Gstreamer 0.10.29 or newer is required for grayscale input");
#endif
    }

    gst_app_src_set_caps(GST_APP_SRC(source), caps);
    gst_app_src_set_stream_type(GST_APP_SRC(source), GST_APP_STREAM_TYPE_STREAM);
    gst_app_src_set_size (GST_APP_SRC(source), -1);

    g_object_set(G_OBJECT(source), "format", GST_FORMAT_TIME, NULL);
    g_object_set(G_OBJECT(source), "block", 1, NULL);
    g_object_set(G_OBJECT(source), "is-live", 0, NULL);


    if(!manualpipeline)
    {
        g_object_set(G_OBJECT(file), "buffer-size", bufsize, NULL);
        gst_bin_add_many(GST_BIN(pipeline), source, encodebin, file, NULL);
        if(!gst_element_link_many(source, encodebin, file, NULL)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link elements\n");
        }
    }

#if GST_VERSION_MAJOR == 0
    // HACK: remove streamsplitter and streamcombiner from
    // encodebin pipeline to prevent early EOF event handling
    // We always fetch BGR or gray-scale frames, so combiner->spliter
    // endge in graph is useless.
    it = gst_bin_iterate_recurse (GST_BIN(encodebin));
    while (!done) {
      switch (gst_iterator_next (it, (void**)&element)) {
        case GST_ITERATOR_OK:
          name = gst_element_get_name(element);
          if (strstr(name, "streamsplitter"))
            splitter = element;
          else if (strstr(name, "streamcombiner"))
            combiner = element;
          break;
        case GST_ITERATOR_RESYNC:
          gst_iterator_resync (it);
          break;
        case GST_ITERATOR_ERROR:
          done = true;
          break;
        case GST_ITERATOR_DONE:
          done = true;
          break;
      }
    }

    gst_iterator_free (it);

    if (splitter && combiner)
    {
        gst_element_unlink(splitter, combiner);

        GstPad* src  = gst_element_get_pad(combiner, "src");
        GstPad* sink = gst_element_get_pad(combiner, "encodingsink");

        GstPad* srcPeer = gst_pad_get_peer(src);
        GstPad* sinkPeer = gst_pad_get_peer(sink);

        gst_pad_unlink(sinkPeer, sink);
        gst_pad_unlink(src, srcPeer);

        gst_pad_link(sinkPeer, srcPeer);

        src = gst_element_get_pad(splitter, "encodingsrc");
        sink = gst_element_get_pad(splitter, "sink");

        srcPeer = gst_pad_get_peer(src);
        sinkPeer = gst_pad_get_peer(sink);

        gst_pad_unlink(sinkPeer, sink);
        gst_pad_unlink(src, srcPeer);

        gst_pad_link(sinkPeer, srcPeer);
    }
#endif

    stateret = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    if(stateret  == GST_STATE_CHANGE_FAILURE) {
        handleMessage(pipeline);
        CV_ERROR(CV_StsError, "GStreamer: cannot put pipeline to play\n");
    }

    framerate = fps;
    num_frames = 0;

    handleMessage(pipeline);

    __END__;

    return true;
}