static void
gst_app_sink_set_property (GObject * object, guint prop_id,
    const GValue * value, GParamSpec * pspec)
{
  GstAppSink *appsink = GST_APP_SINK_CAST (object);

  switch (prop_id) {
    case PROP_CAPS:
      gst_app_sink_set_caps (appsink, gst_value_get_caps (value));
      break;
    case PROP_EMIT_SIGNALS:
      gst_app_sink_set_emit_signals (appsink, g_value_get_boolean (value));
      break;
    case PROP_MAX_BUFFERS:
      gst_app_sink_set_max_buffers (appsink, g_value_get_uint (value));
      break;
    case PROP_DROP:
      gst_app_sink_set_drop (appsink, g_value_get_boolean (value));
      break;
    case PROP_WAIT_ON_EOS:
      gst_app_sink_set_wait_on_eos (appsink, g_value_get_boolean (value));
      break;
    default:
      G_OBJECT_WARN_INVALID_PROPERTY_ID (object, prop_id, pspec);
      break;
  }
}
CAMLprim value ocaml_gstreamer_appsink_emit_signals(value _as)
{
  CAMLparam0();
  appsink *as = Appsink_val(_as);

  caml_release_runtime_system();
  gst_app_sink_set_emit_signals(as->appsink, TRUE);
  caml_acquire_runtime_system();

  CAMLreturn(Val_unit);
}
Exemple #3
0
Capture::Capture() {

  // prepare the pipeline
  pipeline = gst_pipeline_new("xvoverlay");
  GstElement *src = gst_element_factory_make("videotestsrc", NULL);
  GstElement *conv = gst_element_factory_make("videoconvert", NULL);
  GstElement *sink = gst_element_factory_make("appsink", NULL);
  gst_bin_add_many(GST_BIN(pipeline), src, conv, sink, NULL);
  gst_element_link_many(src, conv, sink, NULL);

  gst_app_sink_set_emit_signals(GST_APP_SINK(sink), TRUE);
  g_signal_connect(sink, "new-sample", G_CALLBACK(newSample), (gpointer)this);

  // run the pipeline
  GstStateChangeReturn sret = gst_element_set_state(pipeline, GST_STATE_PLAYING);
  g_print("Playing...\n");
  if (sret == GST_STATE_CHANGE_FAILURE) {
    gst_element_set_state(pipeline, GST_STATE_NULL);
    gst_object_unref(pipeline);
    // Exit application
    QTimer::singleShot(0, QApplication::activeWindow(), SLOT(quit()));
  }
}
/*!
 * \brief OpenIMAJCapGStreamer::open Open the given file with gstreamer
 * \param type CvCapture type. One of CAP_GSTREAMER_*
 * \param filename Filename to open in case of CAP_GSTREAMER_FILE
 * \return boolean. Specifies if opening was succesful.
 *
 * In case of CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows:
 *    v4l2src ! autoconvert ! appsink
 *
 *
 * The 'filename' parameter is not limited to filesystem paths, and may be one of the following:
 *
 *  - a normal filesystem path:
 *        e.g. video.avi or /path/to/video.avi or C:\\video.avi
 *  - an uri:
 *        e.g. file:///path/to/video.avi or rtsp:///path/to/stream.asf
 *  - a gstreamer pipeline description:
 *        e.g. videotestsrc ! videoconvert ! appsink
 *        the appsink name should be either 'appsink0' (the default) or 'opencvsink'
 *
 *  When dealing with a file, OpenIMAJCapGStreamer will not drop frames if the grabbing interval
 *  larger than the framerate period. (Unlike the uri or manual pipeline description, which assume
 *  a live source)
 *
 *  The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties
 *  is really slow if we need to restart the pipeline over and over again.
 *
 */
bool OpenIMAJCapGStreamer::open(const char* filename )
{
    if(!isInited) {
        //FIXME: threadsafety
        gst_init (NULL, NULL);
        isInited = true;
    }

    
    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    
    // test if we have a valid uri. If so, open it with an uridecodebin
    // else, we might have a file or a manual pipeline.
    // if gstreamer cannot parse the manual pipeline, we assume we were given and
    // ordinary file path.
    if(!gst_uri_is_valid(filename))
    {
        uri = realpath(filename, NULL);
        stream = false;
        if(uri)
        {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(!uri) {
                WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        }
        else
        {
            GError *err = NULL;
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin) {
                //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
                //close();
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }
    
    bool element_from_uri = false;
    if(!uridecodebin)
    {
        // At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation.
        // This means that we cannot use an uridecodebin when dealing with v4l2, since setting
        // capture properties will not work.
        // The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2.
        gchar * protocol = gst_uri_get_protocol(uri);
        if (!strcasecmp(protocol , "v4l2"))
        {
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL);

            element_from_uri = true;
        }else{
            uridecodebin = gst_element_factory_make ("uridecodebin", NULL);
            g_object_set(G_OBJECT(uridecodebin),"uri",uri, NULL);
        }
        g_free(protocol);
        
        if(!uridecodebin) {
            //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
            close();
            return false;
        }
    }
    
    if(manualpipeline)
    {
        GstIterator *it = NULL;
        it = gst_bin_iterate_sinks (GST_BIN(uridecodebin));
        
        gboolean done = FALSE;
        GstElement *element = NULL;
        gchar* name = NULL;
        GValue value = G_VALUE_INIT;
        
        while (!done) {
            switch (gst_iterator_next (it, &value)) {
                case GST_ITERATOR_OK:
                    element = GST_ELEMENT (g_value_get_object (&value));
                    name = gst_element_get_name(element);
                    if (name){
                        if(strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL) {
                            sink = GST_ELEMENT ( gst_object_ref (element) );
                            done = TRUE;
                        }
                        g_free(name);
                    }
                    g_value_unset (&value);
                    
                    break;
                case GST_ITERATOR_RESYNC:
                    gst_iterator_resync (it);
                    break;
                case GST_ITERATOR_ERROR:
                case GST_ITERATOR_DONE:
                    done = TRUE;
                    break;
            }
        }
        gst_iterator_free (it);
        
        
        if (!sink){
            //ERROR(1, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
        
        pipeline = uridecodebin;
    }
    else
    {
        pipeline = gst_pipeline_new (NULL);
        // videoconvert (in 0.10: ffmpegcolorspace) automatically selects the correct colorspace
        // conversion based on caps.
        color = gst_element_factory_make(COLOR_ELEM, NULL);
        sink = gst_element_factory_make("appsink", NULL);
        
        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);
        
        if(element_from_uri) {
            if(!gst_element_link(uridecodebin, color)) {
                //ERROR(1, "GStreamer: cannot link color -> sink\n");
                gst_object_unref(pipeline);
                return false;
            }
        }else{
            g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
        }
        
        if(!gst_element_link(color, sink)) {
            //ERROR(1, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            return false;
        }
    }
    
    //TODO: is 1 single buffer really high enough?
    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
    //do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);
    
    // support 1 and 3 channel 8 bit data, as well as bayer (also  1 channel, 8bit)
    caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}");
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    gst_caps_unref(caps);
    
    //we do not start recording here just yet.
    // the user probably wants to set capture properties first, so start recording whenever the first frame is requested
    
    return true;
}
void AVMuxEncode::deletePipelines()
{
    m_videoSrcLock.lock();
    m_audioSrcLock.lock();
    m_videoSinkLock.lock();
    m_audioSinkLock.lock();

    if (m_videoPipeline != NULL) {
        gst_element_set_state (m_videoPipeline, GST_STATE_NULL);
        gst_app_sink_set_emit_signals((GstAppSink *)(m_appVideoSink), FALSE);
        g_signal_handlers_disconnect_by_data(m_appVideoSink, this);
        gst_object_unref(m_videoPipeline);
        m_videoPipeline = NULL;
    }

#ifdef GSTBUSMSG
    if (m_videoBusWatch != -1) {
        g_source_remove(m_videoBusWatch);
        m_videoBusWatch = -1;
    }
#endif

    if (m_audioPipeline != NULL) {
        gst_element_set_state (m_audioPipeline, GST_STATE_NULL);
        gst_app_sink_set_emit_signals((GstAppSink *)(m_appAudioSink), FALSE);
        g_signal_handlers_disconnect_by_data(m_appAudioSink, this);
        gst_object_unref(m_audioPipeline);
        m_audioPipeline = NULL;
    }

#ifdef GSTBUSMSG
    if (m_audioBusWatch != -1) {
        g_source_remove(m_audioBusWatch);
        m_audioBusWatch = -1;
    }
#endif

    if (m_videoCaps != NULL)
        g_free(m_videoCaps);
    if (m_audioCaps != NULL)
        g_free(m_audioCaps);
    m_videoCaps = NULL;
    m_audioCaps = NULL;

    m_appAudioSink = NULL;
    m_appAudioSrc = NULL;
    m_appVideoSink = NULL;
    m_appAudioSrc = NULL;

    while (!m_videoSrcQ.empty())
        delete m_videoSrcQ.dequeue();

    while (!m_audioSrcQ.empty())
        delete m_audioSrcQ.dequeue();

    while (!m_videoSinkQ.empty())
        delete m_videoSinkQ.dequeue();

    while (!m_audioSinkQ.empty())
        delete m_audioSinkQ.dequeue();

    m_audioTimestamp = -1;

    m_videoSrcLock.unlock();
    m_audioSrcLock.unlock();
    m_videoSinkLock.unlock();
    m_audioSinkLock.unlock();
}
bool AVMuxEncode::newPipelines(SYNTRO_AVPARAMS *avParams)
{
    gchar *videoLaunch;
    gchar *audioLaunch;
    GError *error = NULL;
    GstStateChangeReturn ret;

    m_avParams = *avParams;

    printf("width=%d, height=%d, rate=%d\n", avParams->videoWidth, avParams->videoHeight, avParams->videoFramerate);
    printf("channels=%d, rate=%d, size=%d\n", avParams->audioChannels, avParams->audioSampleRate, avParams->audioSampleSize);

    //  Construct the pipelines

    g_pipelineIndex++;

    if (m_AVMode == AVMUXENCODE_AV_TYPE_RTPMP4) {
        videoLaunch = g_strdup_printf (
              " appsrc name=videoSrc%d ! jpegdec ! queue ! ffenc_mpeg4 bitrate=%d "
              " ! queue ! rtpmp4vpay pt=96 ! queue ! appsink name=videoSink%d"
                , g_pipelineIndex, m_videoCompressionRate, g_pipelineIndex);
    } else {
        videoLaunch = g_strdup_printf (
              " appsrc name=videoSrc%d ! jpegdec ! queue ! x264enc bitrate=%d tune=zerolatency rc-lookahead=0"
              " ! queue ! rtph264pay pt=96 ! queue ! appsink name=videoSink%d"
                , g_pipelineIndex, m_videoCompressionRate / 1000, g_pipelineIndex);

    }

    m_videoPipeline = gst_parse_launch(videoLaunch, &error);
    g_free(videoLaunch);

    if (error != NULL) {
        g_print ("could not construct video pipeline: %s\n", error->message);
        g_error_free (error);
        m_videoPipeline = NULL;
        return false;
    }

    audioLaunch = g_strdup_printf (
#ifdef GST_IMX6
                " appsrc name=audioSrc%d ! faac bitrate=%d ! rtpmp4apay pt=97 ! appsink name=audioSink%d "
#else
                " appsrc name=audioSrc%d ! faac bitrate=%d ! rtpmp4apay pt=97 min-ptime=1000000000 ! appsink name=audioSink%d "
#endif
             , g_pipelineIndex, m_audioCompressionRate, g_pipelineIndex);

    m_audioPipeline = gst_parse_launch(audioLaunch, &error);
    g_free(audioLaunch);

    if (error != NULL) {
        g_print ("could not construct audio pipeline: %s\n", error->message);
        g_error_free (error);
        gst_object_unref(m_videoPipeline);
        m_videoPipeline = NULL;
        m_audioPipeline = NULL;
        return false;
    }

    //  find the appsrcs and appsinks

    gchar *videoSink = g_strdup_printf("videoSink%d", g_pipelineIndex);
    if ((m_appVideoSink = gst_bin_get_by_name (GST_BIN (m_videoPipeline), videoSink)) == NULL) {
        g_printerr("Unable to find video appsink\n");
        g_free(videoSink);
        deletePipelines();
        return false;
    }
    g_free(videoSink);

    gchar *videoSrc = g_strdup_printf("videoSrc%d", g_pipelineIndex);
    if ((m_appVideoSrc = gst_bin_get_by_name (GST_BIN (m_videoPipeline), videoSrc)) == NULL) {
            g_printerr("Unable to find video appsrc\n");
            g_free(videoSrc);
            deletePipelines();
            return false;
        }
    g_free(videoSrc);

    gchar *audioSink = g_strdup_printf("audioSink%d", g_pipelineIndex);
    if ((m_appAudioSink = gst_bin_get_by_name (GST_BIN (m_audioPipeline), audioSink)) == NULL) {
        g_printerr("Unable to find audio appsink\n");
        g_free(audioSink);
        deletePipelines();
        return false;
    }
    g_free(audioSink);

    gchar *audioSrc = g_strdup_printf("audioSrc%d", g_pipelineIndex);
    if ((m_appAudioSrc = gst_bin_get_by_name (GST_BIN (m_audioPipeline), audioSrc)) == NULL) {
            g_printerr("Unable to find audio appsrc\n");
            g_free(audioSrc);
            deletePipelines();
            return false;
        }
    g_free(audioSrc);

    g_signal_connect (m_appVideoSink, "new-buffer", G_CALLBACK (newVideoSinkData), this);
    gst_app_sink_set_emit_signals((GstAppSink *)(m_appVideoSink), TRUE);

    g_signal_connect (m_appAudioSink, "new-buffer", G_CALLBACK (newAudioSinkData), this);
    gst_app_sink_set_emit_signals((GstAppSink *)(m_appAudioSink), TRUE);

    gst_app_src_set_caps((GstAppSrc *) (m_appVideoSrc),
             gst_caps_new_simple ("image/jpeg",
             "width", G_TYPE_INT, m_avParams.videoWidth,
             "height", G_TYPE_INT, m_avParams.videoHeight,
             "framerate", GST_TYPE_FRACTION, m_avParams.videoFramerate, 1,
             NULL));
    gst_app_src_set_stream_type((GstAppSrc *)(m_appVideoSrc), GST_APP_STREAM_TYPE_STREAM);

    gst_app_src_set_caps((GstAppSrc *) (m_appAudioSrc),
            gst_caps_new_simple ("audio/x-raw-int",
                      "width", G_TYPE_INT, (gint)m_avParams.audioSampleSize,
                      "depth", G_TYPE_INT, (gint)m_avParams.audioSampleSize,
                      "channels" ,G_TYPE_INT, (gint)m_avParams.audioChannels,
                      "rate",G_TYPE_INT, m_avParams.audioSampleRate,
                      "endianness",G_TYPE_INT,(gint)1234,
                      "signed", G_TYPE_BOOLEAN, (gboolean)TRUE,
                      NULL));

    gst_app_src_set_stream_type((GstAppSrc *)(m_appAudioSrc), GST_APP_STREAM_TYPE_STREAM);
    g_signal_connect(m_appAudioSrc, "need-data", G_CALLBACK (needAudioSrcData), this);

    ret = gst_element_set_state (m_videoPipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr ("Unable to set the video pipeline to the play state.\n");
        deletePipelines();
        return false;
    }

#ifdef GSTBUSMSG
    GstBus *bus;

    bus = gst_pipeline_get_bus(GST_PIPELINE (m_videoPipeline));
    m_videoBusWatch = gst_bus_add_watch (bus, videoBusMessage, this);
    gst_object_unref (bus);

    bus = gst_pipeline_get_bus(GST_PIPELINE (m_audioPipeline));
    m_audioBusWatch = gst_bus_add_watch (bus, audioBusMessage, this);
    gst_object_unref (bus);
#endif

    ret = gst_element_set_state (m_audioPipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr ("Unable to set the audio pipeline to the play state.\n");
        deletePipelines();
        return false;
    }
    m_videoInterval = (1000 * PIPELINE_MS_TO_NS) / m_avParams.videoFramerate;
    m_nextVideoTime = QDateTime::currentMSecsSinceEpoch() * PIPELINE_MS_TO_NS;

    qDebug() << "Pipelines established";
    m_pipelinesActive = true;
    return true;
}
/*!
 * \brief CvCapture_GStreamer::open Open the given file with gstreamer
 * \param type CvCapture type. One of CV_CAP_GSTREAMER_*
 * \param filename Filename to open in case of CV_CAP_GSTREAMER_FILE
 * \return boolean. Specifies if opening was succesful.
 *
 * In case of CV_CAP_GSTREAMER_V4L(2), a pipelin is constructed as follows:
 *    v4l2src ! autoconvert ! appsink
 *
 *
 * The 'filename' parameter is not limited to filesystem paths, and may be one of the following:
 *
 *  - a normal filesystem path:
 *        e.g. video.avi or /path/to/video.avi or C:\\video.avi
 *  - an uri:
 *        e.g. file:///path/to/video.avi or rtsp:///path/to/stream.asf
 *  - a gstreamer pipeline description:
 *        e.g. videotestsrc ! videoconvert ! appsink
 *        the appsink name should be either 'appsink0' (the default) or 'opencvsink'
 *
 *  When dealing with a file, CvCapture_GStreamer will not drop frames if the grabbing interval
 *  larger than the framerate period. (Unlike the uri or manual pipeline description, which assume
 *  a live source)
 *
 *  The pipeline will only be started whenever the first frame is grabbed. Setting pipeline properties
 *  is really slow if we need to restart the pipeline over and over again.
 *
 *  TODO: the 'type' parameter is imo unneeded. for v4l2, filename 'v4l2:///dev/video0' can be used.
 *  I expect this to be the same for CV_CAP_GSTREAMER_1394. Is anyone actually still using v4l (v1)?
 *
 */
bool CvCapture_GStreamer::open( int type, const char* filename )
{
    CV_FUNCNAME("cvCaptureFromCAM_GStreamer");

    __BEGIN__;

    gst_initializer::init();

    bool file = false;
    bool stream = false;
    bool manualpipeline = false;
    char *uri = NULL;
    uridecodebin = NULL;
    GstElementFactory * testfac;
    GstStateChangeReturn status;

    if (type == CV_CAP_GSTREAMER_V4L){
        testfac = gst_element_factory_find("v4lsrc");
        if (!testfac){
            return false;
        }
        g_object_unref(G_OBJECT(testfac));
        filename = "v4lsrc ! "COLOR_ELEM" ! appsink";
    }
    if (type == CV_CAP_GSTREAMER_V4L2){
        testfac = gst_element_factory_find("v4l2src");
        if (!testfac){
            return false;
        }
        g_object_unref(G_OBJECT(testfac));
        filename = "v4l2src ! "COLOR_ELEM" ! appsink";
    }


    // test if we have a valid uri. If so, open it with an uridecodebin
    // else, we might have a file or a manual pipeline.
    // if gstreamer cannot parse the manual pipeline, we assume we were given and
    // ordinary file path.
    if(!gst_uri_is_valid(filename))
    {
        uri = realpath(filename, NULL);
        stream = false;
        if(uri)
        {
            uri = g_filename_to_uri(uri, NULL, NULL);
            if(uri)
            {
                file = true;
            }
            else
            {
                CV_WARN("GStreamer: Error opening file\n");
                close();
                return false;
            }
        }
        else
        {
            GError *err = NULL;
            uridecodebin = gst_parse_launch(filename, &err);
            if(!uridecodebin)
            {
                fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
                return false;
            }
            stream = true;
            manualpipeline = true;
        }
    } else {
        stream = true;
        uri = g_strdup(filename);
    }

    bool element_from_uri = false;
    if(!uridecodebin)
    {
        // At this writing, the v4l2 element (and maybe others too) does not support caps renegotiation.
        // This means that we cannot use an uridecodebin when dealing with v4l2, since setting
        // capture properties will not work.
        // The solution (probably only until gstreamer 1.2) is to make an element from uri when dealing with v4l2.
        gchar * protocol = gst_uri_get_protocol(uri);
        if (!strcasecmp(protocol , "v4l2"))
        {
#if GST_VERSION_MAJOR == 0
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src");
#else
            uridecodebin = gst_element_make_from_uri(GST_URI_SRC, uri, "src", NULL);
#endif
            element_from_uri = true;
        }else{
            uridecodebin = gst_element_factory_make("uridecodebin", NULL);
            g_object_set(G_OBJECT(uridecodebin), "uri", uri, NULL);
        }
        g_free(protocol);

        if(!uridecodebin) {
            //fprintf(stderr, "GStreamer: Error opening bin: %s\n", err->message);
            close();
            return false;
        }
    }

    if(manualpipeline)
    {
        GstIterator *it = NULL;
#if GST_VERSION_MAJOR == 0
        it = gst_bin_iterate_sinks(GST_BIN(uridecodebin));
        if(gst_iterator_next(it, (gpointer *)&sink) != GST_ITERATOR_OK) {
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#else
        it = gst_bin_iterate_sinks (GST_BIN(uridecodebin));

        gboolean done = FALSE;
        GstElement *element = NULL;
        gchar* name = NULL;
        GValue value = G_VALUE_INIT;

        while (!done) {
          switch (gst_iterator_next (it, &value)) {
            case GST_ITERATOR_OK:
              element = GST_ELEMENT (g_value_get_object (&value));
              name = gst_element_get_name(element);
              if (name){
                if(strstr(name, "opencvsink") != NULL || strstr(name, "appsink") != NULL) {
                  sink = GST_ELEMENT ( gst_object_ref (element) );
                  done = TRUE;
                }
                g_free(name);
              }
              g_value_unset (&value);

              break;
            case GST_ITERATOR_RESYNC:
              gst_iterator_resync (it);
              break;
            case GST_ITERATOR_ERROR:
            case GST_ITERATOR_DONE:
              done = TRUE;
              break;
          }
        }
        gst_iterator_free (it);


        if (!sink){
            CV_ERROR(CV_StsError, "GStreamer: cannot find appsink in manual pipeline\n");
            return false;
        }
#endif
        pipeline = uridecodebin;
    }
    else
    {
        pipeline = gst_pipeline_new(NULL);
        // videoconvert (in 0.10: ffmpegcolorspace, in 1.x autovideoconvert)
        //automatically selects the correct colorspace conversion based on caps.
        color = gst_element_factory_make(COLOR_ELEM, NULL);
        sink = gst_element_factory_make("appsink", NULL);

        gst_bin_add_many(GST_BIN(pipeline), uridecodebin, color, sink, NULL);

        if(element_from_uri) {
            if(!gst_element_link(uridecodebin, color)) {
                CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
                gst_object_unref(pipeline);
                pipeline = NULL;
                return false;
            }
        }else{
            g_signal_connect(uridecodebin, "pad-added", G_CALLBACK(newPad), color);
        }

        if(!gst_element_link(color, sink)) {
            CV_ERROR(CV_StsError, "GStreamer: cannot link color -> sink\n");
            gst_object_unref(pipeline);
            pipeline = NULL;
            return false;
        }
    }

    //TODO: is 1 single buffer really high enough?
    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), stream);
    //do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);

#if GST_VERSION_MAJOR == 0
    caps = gst_caps_new_simple("video/x-raw-rgb",
                               "bpp",        G_TYPE_INT, 24,
                               "red_mask",   G_TYPE_INT, 0x0000FF,
                               "green_mask", G_TYPE_INT, 0x00FF00,
                               "blue_mask",  G_TYPE_INT, 0xFF0000,
                               NULL);
#else
    // support 1 and 3 channel 8 bit data, as well as bayer (also  1 channel, 8bit)
    caps = gst_caps_from_string("video/x-raw, format=(string){BGR, GRAY8}; video/x-bayer,format=(string){rggb,bggr,grbg,gbrg}");
#endif
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps);
    gst_caps_unref(caps);

    // For video files only: set pipeline to PAUSED state to get its duration
    if (file)
    {
        status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PAUSED);
        if (status == GST_STATE_CHANGE_ASYNC)
        {
            // wait for status update
            GstState st1;
            GstState st2;
            status = gst_element_get_state(pipeline, &st1, &st2, GST_CLOCK_TIME_NONE);
        }
        if (status == GST_STATE_CHANGE_FAILURE)
        {
            handleMessage(pipeline);
            gst_object_unref(pipeline);
            pipeline = NULL;
            CV_ERROR(CV_StsError, "GStreamer: unable to start pipeline\n");
            return false;
        }

        GstFormat format;

        format = GST_FORMAT_DEFAULT;
#if GST_VERSION_MAJOR == 0
        if(!gst_element_query_duration(sink, &format, &duration))
#else
        if(!gst_element_query_duration(sink, format, &duration))
#endif
        {
            handleMessage(pipeline);
            CV_WARN("GStreamer: unable to query duration of stream");
            duration = -1;
            return true;
        }
    }
    else
    {
        duration = -1;
    }

    __END__;

    return true;
}
bool GStreamerCameraFrameSourceImpl::InitializeGstPipeLine()
{
    GstStateChangeReturn status;
    end = true;

    pipeline = GST_PIPELINE(gst_pipeline_new(NULL));
    if (pipeline == NULL)
    {
        printf("Cannot create Gstreamer pipeline\n");
        return false;
    }

    bus = gst_pipeline_get_bus(GST_PIPELINE (pipeline));

    // create v4l2src
    GstElement * v4l2src = gst_element_factory_make("v4l2src", NULL);
    if (v4l2src == NULL)
    {
        printf("Cannot create v4l2src\n");
        FinalizeGstPipeLine();

        return false;
    }

    std::ostringstream cameraDev;
    cameraDev << "/dev/video" << cameraIdx;
    g_object_set(G_OBJECT(v4l2src), "device", cameraDev.str().c_str(), NULL);

    gst_bin_add(GST_BIN(pipeline), v4l2src);

    // create color convert element
    GstElement * color = gst_element_factory_make(COLOR_ELEM, NULL);
    if (color == NULL)
    {
        printf("Cannot create %s element\n", COLOR_ELEM);
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), color);

    // create appsink element
    sink = gst_element_factory_make("appsink", NULL);
    if (sink == NULL)
    {
        printf("Cannot create appsink element\n");
        FinalizeGstPipeLine();

        return false;
    }

    gst_bin_add(GST_BIN(pipeline), sink);

    // if initial values for FrameSource::Parameters are not
    // specified, let's set them manually to prevent very huge images
    if (configuration.frameWidth == (vx_uint32)-1)
        configuration.frameWidth = 1920;
    if (configuration.frameHeight == (vx_uint32)-1)
        configuration.frameHeight = 1080;
    if (configuration.fps == (vx_uint32)-1)
        configuration.fps = 30;

#if GST_VERSION_MAJOR == 0
    GstCaps* caps_v42lsrc = gst_caps_new_simple ("video/x-raw-rgb",
                 "width", GST_TYPE_INT_RANGE, 1, (int)configuration.frameWidth,
                 "height", GST_TYPE_INT_RANGE, 1, (int)configuration.frameHeight,
                 "framerate", GST_TYPE_FRACTION, (int)configuration.fps,
                 NULL);
#else
    std::ostringstream stream;
    stream << "video/x-raw, format=(string){RGB, GRAY8}, width=[1," << configuration.frameWidth <<
              "], height=[1," << configuration.frameHeight << "], framerate=" << configuration.fps << "/1;";

    GstCaps* caps_v42lsrc = gst_caps_from_string(stream.str().c_str());
#endif

    if (caps_v42lsrc == NULL)
    {
        printf("Failed to create caps\n");
        FinalizeGstPipeLine();

        return false;
    }

    // link elements
    if (!gst_element_link_filtered(v4l2src, color, caps_v42lsrc))
    {
        printf("GStreamer: cannot link v4l2src -> color using caps\n");
        FinalizeGstPipeLine();
        gst_caps_unref(caps_v42lsrc);

        return false;
    }
    gst_caps_unref(caps_v42lsrc);

    // link elements
    if (!gst_element_link(color, sink))
    {
        printf("GStreamer: cannot link color -> appsink\n");
        FinalizeGstPipeLine();

        return false;
    }

    gst_app_sink_set_max_buffers (GST_APP_SINK(sink), 1);
    gst_app_sink_set_drop (GST_APP_SINK(sink), true);

    // do not emit signals: all calls will be synchronous and blocking
    gst_app_sink_set_emit_signals (GST_APP_SINK(sink), 0);

#if GST_VERSION_MAJOR == 0
    GstCaps* caps_appsink = gst_caps_new_simple("video/x-raw-rgb",
                                                "bpp",        G_TYPE_INT, 24,
                                                "red_mask",   G_TYPE_INT, 0xFF0000,
                                                "green_mask", G_TYPE_INT, 0x00FF00,
                                                "blue_mask",  G_TYPE_INT, 0x0000FF,
                                                NULL);
#else
    // support 1 and 3 channel 8 bit data
    GstCaps* caps_appsink = gst_caps_from_string("video/x-raw, format=(string){RGB, GRAY8};");
#endif
    gst_app_sink_set_caps(GST_APP_SINK(sink), caps_appsink);
    gst_caps_unref(caps_appsink);

    // Force pipeline to play video as fast as possible, ignoring system clock
    gst_pipeline_use_clock(pipeline, NULL);

    status = gst_element_set_state(GST_ELEMENT(pipeline), GST_STATE_PLAYING);
    handleGStreamerMessages();

    if (status == GST_STATE_CHANGE_ASYNC)
    {
        // wait for status update
        status = gst_element_get_state(GST_ELEMENT(pipeline), NULL, NULL, GST_CLOCK_TIME_NONE);
    }
    if (status == GST_STATE_CHANGE_FAILURE)
    {
        printf("GStreamer: unable to start playback\n");
        FinalizeGstPipeLine();

        return false;
    }

    std::unique_ptr<GstPad, GStreamerObjectDeleter> pad(gst_element_get_static_pad(color, "src"));
#if GST_VERSION_MAJOR == 0
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCaps(gst_pad_get_caps(pad.get()));
#else
    std::unique_ptr<GstCaps, GStreamerObjectDeleter> bufferCaps(gst_pad_get_current_caps(pad.get()));
#endif

    const GstStructure *structure = gst_caps_get_structure(bufferCaps.get(), 0);

    int width, height;
    if (!gst_structure_get_int(structure, "width", &width))
    {
        handleGStreamerMessages();
        printf("Cannot query video width\n");
    }

    if (!gst_structure_get_int(structure, "height", &height))
    {
        handleGStreamerMessages();
        printf("Cannot query video height\n");
    }

    configuration.frameWidth = static_cast<vx_uint32>(width);
    configuration.frameHeight = static_cast<vx_uint32>(height);

    gint num = 0, denom = 1;
    if (!gst_structure_get_fraction(structure, "framerate", &num, &denom))
    {
        handleGStreamerMessages();
        printf("Cannot query video fps\n");
    }

    configuration.fps = static_cast<float>(num) / denom;
    end = false;

    return true;
}
void ofxGstRTPServer::play(){
	// pass the pipeline to the gstUtils so it starts everything
	gst.setPipelineWithSink(pipelineStr,"",true);

	// get the rtp and rtpc elements from the pipeline so we can read their properties
	// during execution
	rtpbin = gst.getGstElementByName("rtpbin");
	vRTPsink = gst.getGstElementByName("vrtpsink");
	vRTPCsink = gst.getGstElementByName("vrtcpsink");
	vRTPCsrc = gst.getGstElementByName("vrtcpsrc");

	aRTPsink = gst.getGstElementByName("artpsink");
	aRTPCsink = gst.getGstElementByName("artcpsink");
	aRTPCsrc = gst.getGstElementByName("artcpsrc");

	dRTPsink = gst.getGstElementByName("drtpsink");
	dRTPCsink = gst.getGstElementByName("drtcpsink");
	dRTPCsrc = gst.getGstElementByName("drtcpsrc");

	oRTPsink = gst.getGstElementByName("ortpsink");
	oRTPCsink = gst.getGstElementByName("ortcpsink");
	oRTPCsrc = gst.getGstElementByName("ortcpsrc");

	vEncoder = gst.getGstElementByName("vencoder");
	dEncoder = gst.getGstElementByName("dencoder");
	aEncoder = gst.getGstElementByName("aencoder");
	appSrcVideoRGB = gst.getGstElementByName("appsrcvideo");
	appSrcDepth = gst.getGstElementByName("appsrcdepth");
	appSrcOsc = gst.getGstElementByName("appsrcosc");

#if ENABLE_ECHO_CANCEL
	if(echoCancel && audioChannelReady){
		appSrcAudio = gst.getGstElementByName("audioechosrc");
		if(appSrcAudio){
			gst_app_src_set_stream_type((GstAppSrc*)appSrcAudio,GST_APP_STREAM_TYPE_STREAM);
		}

		#ifdef TARGET_LINUX
			gstAudioIn.setPipelineWithSink("pulsesrc stream-properties=\"props,media.role=phone\" name=audiocapture ! audio/x-raw,format=S16LE,rate=44100,channels=1 ! audioresample ! audioconvert ! audio/x-raw,format=S16LE,rate=32000,channels=1 ! appsink name=audioechosink");
			volume = gstAudioIn.getGstElementByName("audiocapture");
		#elif defined(TARGET_OSX)
			// for osx we specify the output format since osxaudiosrc doesn't report the formats supported by the hw
			// FIXME: we should detect the format somehow and set it automatically
			gstAudioIn.setPipelineWithSink("osxaudiosrc name=audiocapture ! audio/x-raw,rate=44100,channels=1 ! volume name=volume ! audioresample ! audioconvert ! audio/x-raw,format=S16LE,rate=32000,channels=1 ! appsink name=audioechosink");
			volume = gstAudioIn.getGstElementByName("volume");
		#endif

		appSinkAudio = gstAudioIn.getGstElementByName("audioechosink");
		audiocapture = gstAudioIn.getGstElementByName("audiocapture");

		// set callbacks to receive audio data
		GstAppSinkCallbacks gstCallbacks;
		gstCallbacks.eos = &on_eos_from_audio;
		gstCallbacks.new_preroll = &on_new_preroll_from_audio;
		gstCallbacks.new_sample = &on_new_buffer_from_audio;
		gst_app_sink_set_callbacks(GST_APP_SINK(appSinkAudio), &gstCallbacks, this, NULL);
		gst_app_sink_set_emit_signals(GST_APP_SINK(appSinkAudio),0);
	}
#endif

#if ENABLE_NAT_TRANSVERSAL
	if(videoStream){
		g_object_set(G_OBJECT(vRTPsink),"agent",videoStream->getAgent(),"stream",videoStream->getStreamID(),"component",1,NULL);
		g_object_set(G_OBJECT(vRTPCsink),"agent",videoStream->getAgent(),"stream",videoStream->getStreamID(),"component",2,NULL);
		g_object_set(G_OBJECT(vRTPCsrc),"agent",videoStream->getAgent(),"stream",videoStream->getStreamID(),"component",3,NULL);
	}
	if(depthStream){
		g_object_set(G_OBJECT(dRTPsink),"agent",depthStream->getAgent(),"stream",depthStream->getStreamID(),"component",1,NULL);
		g_object_set(G_OBJECT(dRTPCsink),"agent",depthStream->getAgent(),"stream",depthStream->getStreamID(),"component",2,NULL);
		g_object_set(G_OBJECT(dRTPCsrc),"agent",depthStream->getAgent(),"stream",depthStream->getStreamID(),"component",3,NULL);
	}
	if(audioStream){
		g_object_set(G_OBJECT(aRTPsink),"agent",audioStream->getAgent(),"stream",audioStream->getStreamID(),"component",1,NULL);
		g_object_set(G_OBJECT(aRTPCsink),"agent",audioStream->getAgent(),"stream",audioStream->getStreamID(),"component",2,NULL);
		g_object_set(G_OBJECT(aRTPCsrc),"agent",audioStream->getAgent(),"stream",audioStream->getStreamID(),"component",3,NULL);
	}
	if(oscStream){
		g_object_set(G_OBJECT(oRTPsink),"agent",oscStream->getAgent(),"stream",oscStream->getStreamID(),"component",1,NULL);
		g_object_set(G_OBJECT(oRTPCsink),"agent",oscStream->getAgent(),"stream",oscStream->getStreamID(),"component",2,NULL);
		g_object_set(G_OBJECT(oRTPCsrc),"agent",oscStream->getAgent(),"stream",oscStream->getStreamID(),"component",3,NULL);
	}
#endif


	if(appSrcVideoRGB) gst_app_src_set_stream_type((GstAppSrc*)appSrcVideoRGB,GST_APP_STREAM_TYPE_STREAM);
	if(appSrcDepth) gst_app_src_set_stream_type((GstAppSrc*)appSrcDepth,GST_APP_STREAM_TYPE_STREAM);
	if(appSrcOsc) gst_app_src_set_stream_type((GstAppSrc*)appSrcOsc,GST_APP_STREAM_TYPE_STREAM);

	g_signal_connect(rtpbin,"on-new-ssrc",G_CALLBACK(&ofxGstRTPServer::on_new_ssrc_handler),this);

#if ENABLE_ECHO_CANCEL
	if(echoCancel && audioChannelReady){
		gstAudioIn.startPipeline();
		gstAudioIn.play();
	}
#endif

	gst.startPipeline();
	gst.play();

	ofAddListener(ofEvents().update,this,&ofxGstRTPServer::update);
}