Example #1
0
    void VoiceSession::CreateFarsightChannel()
    {
        try
        {
            // todo: for linux use "autoaudiosrc" for audio_src_name
            //       CURRENT IMPLEMENTATION WORKS ONLY ON WINDOWS
            farsight_channel_ = new FarsightChannel(tp_channel_, "dshowaudiosrc", "autovideosrc", "autovideosink");
            if ( !farsight_channel_->IsAudioSupported() )
            {
                SAFE_DELETE(farsight_channel_);
                QString message = QString("Cannot initialize audio features.");
                reason_ = message;
                LogError(message.toStdString());
                state_ = STATE_ERROR;
                emit StateChanged(state_);
                return;
            }
        }
        catch(Exception &e) 
        {
            QString message = QString("Cannot create FarsightChannel object - ").append(e.what());
            reason_ = message;
            LogError(message.toStdString());
            state_ = STATE_ERROR;
            emit StateChanged(state_);
            return;
        }

        connect( farsight_channel_, SIGNAL(AudioDataAvailable(int)), SLOT( OnFarsightAudioDataAvailable(int ) ), Qt::QueuedConnection );
        connect( farsight_channel_, SIGNAL(AudioBufferOverflow(int)), SLOT( OnFarsightAudioBufferOverflow(int ) ), Qt::QueuedConnection );

        connect(tp_channel_->becomeReady(Tp::StreamedMediaChannel::FeatureStreams),
             SIGNAL( finished(Tp::PendingOperation*) ),
             SLOT( OnStreamFeatureReady(Tp::PendingOperation*) ));

        connect(farsight_channel_,
            SIGNAL(StatusChanged(TelepathyIM::FarsightChannel::Status)),
            SLOT(OnFarsightChannelStatusChanged(TelepathyIM::FarsightChannel::Status)),  Qt::QueuedConnection);

        connect(farsight_channel_,
            SIGNAL( AudioStreamReceived() ),
            SLOT( OnFarsightChannelAudioStreamReceived() ),  Qt::QueuedConnection);

        connect(farsight_channel_,
            SIGNAL( VideoStreamReceived() ),
            SLOT( OnFarsightChannelVideoStreamReceived() ), Qt::QueuedConnection);
    }
Example #2
0
    void FarsightChannel::LinkIncomingSourcePad(TfStream *stream, GstPad *src_pad, FsCodec *codec)
    {           
        incoming_video_widget_mutex_.lock();

        // todo: Check if source pad is already linked!
        gint clock_rate = codec->clock_rate;
        audio_stream_in_clock_rate_ = clock_rate;
        gint channel_count = codec->channels;

        guint media_type;
        g_object_get(stream, "media-type", &media_type, NULL);

        GstPad *output_pad;
        GstElement *output_element = 0;

        bool sink_already_linked = false;

        switch (media_type)
        {
            case TP_MEDIA_STREAM_TYPE_AUDIO:
            {
                output_element = audio_playback_bin_;
                if (audio_in_src_pad_)
                    sink_already_linked = true;
                LogInfo("Got pad for incoming AUDIO stream.");
                break;
            }
            case TP_MEDIA_STREAM_TYPE_VIDEO:
            {
                if (!video_supported_)
                {
                    LogInfo("Got incoming VIDEO stream but ignore that because lack of video support.");
                    incoming_video_widget_mutex_.unlock();
                    return;
                }

                output_element = received_video_playback_element_;
                if (video_in_src_pad_)
                    sink_already_linked = true;
                LogDebug("Got pad for incoming VIDEO stream.");
                break;
            }
            default:
            {
                Q_ASSERT(false);
            }
        }

        if (sink_already_linked)
        {
            LogInfo("FarsightChannel: another Src pad added with same type.");
        }
        else
        {
            if (!gst_bin_add(GST_BIN(pipeline_), output_element))
            {
                LogWarning("Cannot and output element to GStreamer pipeline!");
            }
        }

        output_pad = gst_element_get_static_pad(output_element, "sink");
        if (!output_pad)
        {
            LogError("Cannot get sink pad from output element");
        }
        switch (media_type)
        {
            case TP_MEDIA_STREAM_TYPE_AUDIO:
            {
                if (audio_in_src_pad_)
                {
                    gst_pad_unlink(audio_in_src_pad_, output_pad);
                }
                audio_in_src_pad_ = src_pad;
                break;
            }
            case TP_MEDIA_STREAM_TYPE_VIDEO:
            {
                if (video_in_src_pad_)
                {
                    gst_pad_unlink(video_in_src_pad_, output_pad);
                }
                video_in_src_pad_ = src_pad;
                break;
            }
        }

        if (gst_pad_link(src_pad, output_pad) != GST_PAD_LINK_OK)
        {
            LogWarning("Cannot link audio src to output element.");
        }
        gst_element_set_state(output_element, GST_STATE_PLAYING);

        incoming_video_widget_mutex_.unlock();

        status_ = StatusConnected;
        emit StatusChanged(status_);
        switch (media_type)
        {
            case TP_MEDIA_STREAM_TYPE_AUDIO:
                emit AudioStreamReceived();
                break;
            case TP_MEDIA_STREAM_TYPE_VIDEO:
                emit VideoStreamReceived();
                break;
        }
    }