Example #1
0
LocalMediaStream::LocalMediaStream(const talk_base::scoped_refptr<webrtc::LocalMediaStreamInterface>& pStream)
    : FB::JSAPIAuto("MediaStream")
    , m_label(pStream->label())
    , m_videoTracks(FB::variant_list_of())
    , m_audioTracks(FB::variant_list_of())
{
    registerProperty("label", make_property(this, &LocalMediaStream::get_label));
    registerProperty("videoTracks", make_property(this,&LocalMediaStream::get_videoTracks));
    registerProperty("audioTracks", make_property(this, &LocalMediaStream::get_audioTracks));

    for(int i=0; i<pStream->video_tracks()->count(); i++)
    {
        talk_base::scoped_refptr<webrtc::VideoTrackInterface> pTrack(pStream->video_tracks()->at(i));
        talk_base::scoped_refptr<webrtc::LocalVideoTrackInterface> pTrack_(
            static_cast<webrtc::LocalVideoTrackInterface*>(pTrack.get())
        );

        AddTrack(LocalVideoTrack::Create(pTrack_));
    }

    for(int i=0; i<pStream->audio_tracks()->count(); i++)
    {
        talk_base::scoped_refptr<webrtc::AudioTrackInterface> pTrack(pStream->audio_tracks()->at(i));
        talk_base::scoped_refptr<webrtc::LocalAudioTrackInterface> pTrack_(
            static_cast<webrtc::LocalAudioTrackInterface*>(pTrack.get())
        );

        AddTrack(LocalAudioTrack::Create(pTrack_));
    }
}
Example #2
0
bool Init(
        const std::string kind, 
        const std::string label,
        talk_base::scoped_refptr<webrtc::PeerConnectionFactoryInterface> pc_factory,
        talk_base::scoped_refptr<webrtc::MediaStreamTrackInterface> track)
{
    m_track = track;
    if (m_track != NULL) {
        if (kind == kAudioKind)
            m_source = ((webrtc::AudioTrackInterface *)m_track.get())->GetSource();
        else
            m_source = ((webrtc::VideoTrackInterface *)m_track.get())->GetSource();
        return true;
    }

    returnv_assert(pc_factory.get(), false);
    if (kind == kAudioKind) {
        if (!m_source.get()) {
            WebrtcMediaConstraints constraints;
            if (m_constraints.mtype == XRTC_AUDIO && m_constraints.ptr) {
                audio_constraints_t *audio = (audio_constraints_t *)m_constraints.ptr;
                if (audio->aec.valid)
                    constraints.AddItem(webrtc::MediaConstraintsInterface::kEchoCancellation, audio->aec.val, audio->aec.optional);
                if (audio->agc.valid)
                    constraints.AddItem(webrtc::MediaConstraintsInterface::kAutoGainControl, audio->agc.val, audio->agc.optional);
                if (audio->ns.valid)
                    constraints.AddItem(webrtc::MediaConstraintsInterface::kNoiseSuppression, audio->ns.val, audio->ns.optional);
                if (audio->highPassFilter.valid)
                    constraints.AddItem(webrtc::MediaConstraintsInterface::kHighpassFilter, audio->highPassFilter.val, audio->highPassFilter.optional);
            }
            m_source = pc_factory->CreateAudioSource(&constraints);
        }
        m_track = pc_factory->CreateAudioTrack(label, (webrtc::AudioSourceInterface *)(m_source.get()));
    }else if (kind == kVideoKind) {
        if (!m_source.get()) {
            std::string vname = "";
            if (m_constraints.mtype == XRTC_VIDEO && m_constraints.ptr) {
                video_constraints_t *video = (video_constraints_t *)m_constraints.ptr;
                if (video && video->device.valid) {
                    vname = video->device.val.did;
                }
            }

            // if vname empty, select default device
            LOGI("vname="<<vname);
            cricket::VideoCapturer* capturer = OpenVideoCaptureDevice(vname);
            if (capturer) {
                m_source = pc_factory->CreateVideoSource(capturer, NULL);
            }
        }

        LOGD("create video track by source");
        if (m_source) {
            m_track = pc_factory->CreateVideoTrack(label, (webrtc::VideoSourceInterface *)(m_source.get()));
        }
    }
    return (m_track != NULL);
}
Example #3
0
MediaStreamTrackState readyState() {
    returnv_assert(m_track.get(), TRACK_ENDED);

    MediaStreamTrackState state = TRACK_ENDED;
    webrtc::MediaStreamTrackInterface::TrackState tstate = m_track->state();
    switch (tstate) {
    case webrtc::MediaStreamTrackInterface::kInitializing: 
        state = TRACK_NEW; 
        break;
    case webrtc::MediaStreamTrackInterface::kLive: 
        state = TRACK_LIVE; 
        break;
    case webrtc::MediaStreamTrackInterface::kEnded: 
    case webrtc::MediaStreamTrackInterface::kFailed: 
        state = TRACK_ENDED; 
        break;
    }
    return state;
}
Example #4
0
RemoteAudioTrack::RemoteAudioTrack(const talk_base::scoped_refptr<webrtc::AudioTrackInterface>& pTrack)
    : MediaStreamTrack(pTrack->kind(), pTrack->label())
{

}
Example #5
0
LocalAudioTrack::LocalAudioTrack(const talk_base::scoped_refptr<webrtc::LocalAudioTrackInterface>& pTrack)
    : LocalMediaStreamTrack(pTrack->kind(), pTrack->label(), pTrack->enabled())
{

}
Example #6
0
LocalVideoTrack::LocalVideoTrack(const talk_base::scoped_refptr<webrtc::LocalVideoTrackInterface>& pTrack)
    : LocalMediaStreamTrack(pTrack->kind(), pTrack->label(), pTrack->enabled())
{
    registerProperty("effect", make_property(this, &LocalVideoTrack::get_effect,
                     &LocalVideoTrack::set_effect));
}
Example #7
0
void Put_enabled(boolean enable) {
    return_assert(m_track.get());
    m_track->set_enabled(enable);
}
Example #8
0
boolean muted() {
    returnv_assert(m_source.get(), false);
    webrtc::MediaSourceInterface::SourceState state = m_source->state();
    return (state == webrtc::MediaSourceInterface::kMuted);
}
Example #9
0
boolean enabled() {
    returnv_assert(m_track.get(), false);
    return m_track->enabled();
}
Example #10
0
DOMString label() {
    returnv_assert(m_track.get(), "");
    return m_track->id();
}
Example #11
0
DOMString kind() {
    returnv_assert(m_track.get(), "");
    return m_track->kind();
}
Example #12
0
void * getptr()
{
    return m_track.get();
}
Example #13
0
virtual ~CMediaStreamTrack()
{
    m_source.release();
    m_track.release();
}