void NavigatorUserMedia::getUserMedia (const MediaStreamConstraints & constraints, NavigatorUserMediaCallback *sink) { return_assert(sink); return_assert(_pc_factory.get()); NavigatorUserMediaError error; std::string slabel = kLocalStreamLabel; ubase::zeroptr<MediaStream> stream = CreateMediaStream(slabel, _pc_factory, NULL); return_assert(stream.get()); // for audio track if (constraints.has_audio) { LOGD("get audio track and add it to stream"); std::string alabel = kAudioLabel; MediaTrackConstraints audio_constraints(constraints.audio); ubase::zeroptr<MediaStreamTrack> audio_track = CreateMediaStreamTrack(XRTC_AUDIO, alabel, &audio_constraints, _pc_factory, NULL); if (audio_track->getptr() == NULL) { LOGW("fail to get audio track") error.errstr = "no audio track"; sink->ErrorCallback(error); }else { stream->addTrack(audio_track); } } // for video track if (constraints.has_video) { LOGD("get video track and add it to stream"); std::string vlabel = kVideoLabel; MediaTrackConstraints video_constraints(constraints.video); ubase::zeroptr<MediaStreamTrack> video_track = CreateMediaStreamTrack(XRTC_VIDEO, vlabel, &video_constraints, _pc_factory, NULL); if (video_track->getptr() == NULL) { LOGW("fail to get video track") error.errstr = "no video track"; sink->ErrorCallback(error); }else { stream->addTrack(video_track); } } LOGD("return a/v stream") sink->SuccessCallback(stream); }
void CGstPlayback::AnalyzeStreams() { return_assert(m_playbin); g_print("%s, begin", __func__); g_object_get (m_playbin, "n-video", &m_numVideo, NULL); g_object_get (m_playbin, "n-audio", &m_numAudio, NULL); g_object_get (m_playbin, "n-text", &m_numText, NULL); for (gint i = 0; i < m_numVideo; i++) { GstTagList *tags = NULL; gchar *str = NULL; /* Retrieve the stream's video tags */ g_signal_emit_by_name (m_playbin, "get-video-tags", i, &tags); if (tags) { g_print ("video stream %d:\n", i); gst_tag_list_get_string (tags, GST_TAG_VIDEO_CODEC, &str); g_print (" codec: %s\n", str ? str : "unknown"); g_free (str); gst_tag_list_free (tags); } } for (gint i = 0; i < m_numAudio; i++) { GstTagList *tags = NULL; gchar *str = NULL; guint rate = 0; /* Retrieve the stream's audio tags */ g_signal_emit_by_name (m_playbin, "get-audio-tags", i, &tags); if (tags) { g_print ("audio stream %d:\n", i); if (gst_tag_list_get_string (tags, GST_TAG_AUDIO_CODEC, &str)) { g_print (" codec: %s\n", str); g_free (str); } if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) { g_print (" language: %s\n", str); g_free (str); } if (gst_tag_list_get_uint (tags, GST_TAG_BITRATE, &rate)) { g_print (" bitrate: %d\n", rate); } gst_tag_list_free (tags); } } for (gint i = 0; i < m_numText; i++) { GstTagList *tags = NULL; gchar *str = NULL; /* Retrieve the stream's subtitle tags */ g_signal_emit_by_name (m_playbin, "get-text-tags", i, &tags); if (tags) { g_print ("subtitle stream %d:\n", i); if (gst_tag_list_get_string (tags, GST_TAG_LANGUAGE_CODE, &str)) { g_print (" language: %s\n", str); g_free (str); } gst_tag_list_free (tags); } } g_object_get (GST_OBJECT(m_playbin), "current-video", &m_curVideo, NULL); g_object_get (GST_OBJECT(m_playbin), "current-audio", &m_curAudio, NULL); g_object_get (GST_OBJECT(m_playbin), "current-text", &m_curText, NULL); }
void Put_enabled(boolean enable) { return_assert(m_track.get()); m_track->set_enabled(enable); }