/** @brief Wait until all the kinect handler updated */ void Kinect::WaitAndUpdateAll() { std::vector< HANDLE > handle; if ( VideoStream().hStream_ != NULL ) { handle.push_back( VideoStream().event_.get() ); } if ( DepthStream().hStream_ != NULL ) { handle.push_back( DepthStream().event_.get() ); } if( AudioStream().hStream_ != NULL ) { handle.push_back( AudioStream().event_.get() ); } if ( Skeleton().IsEnabled() ) { handle.push_back( Skeleton().event_.get() ); } if ( handle.size() == 0 ) { return; } DWORD ret = ::WaitForMultipleObjects( handle.size(), &handle[0], TRUE, INFINITE ); if ( ret == WAIT_FAILED ) { return; } }
/** @brief Wait until the kinect handler updated */ void Kinect::WaitAndUpdate(UINT flag) { std::vector< HANDLE > handle; if ( VideoStream().hStream_ != NULL && (flag & UPDATE_FLAG_VIDEO)) { handle.push_back( VideoStream().event_.get() ); } if ( DepthStream().hStream_ != NULL && (flag & UPDATE_FLAG_DEPTH)) { handle.push_back( DepthStream().event_.get() ); } if( AudioStream().hStream_ != NULL && (flag & UPDATE_FLAG_AUDIO)) { handle.push_back( AudioStream().event_.get() ); } if ( Skeleton().IsEnabled() && (flag & UPDATE_FLAG_SKELETON)) { handle.push_back( Skeleton().event_.get() ); } if ( handle.size() == 0 ) { return; } DWORD ret = ::WaitForMultipleObjects( handle.size(), &handle[0], TRUE, INFINITE ); if ( ret == WAIT_FAILED ) { return; } }
/** * @brief VoiceEndpoint::handleFrame * @param data * Handles raw unframed packet for audio endpoint * It handles both - bitstream and stop packet * When bitstream stops it fires a timer to detect timeout and cleanup session * Important: response should be made within 2000ms window after stop */ void VoiceEndpoint::handleFrame(const QByteArray &data) { WatchDataReader reader(data); quint8 cmd = reader.read<quint8>(); quint16 sid = reader.readLE<quint16>(); if(cmd == FrmDataTransfer) { if(sid == m_sessId) { AudioStream str; str.count = reader.read<quint8>(); for(int i=0;i<str.count;i++) { Frame frm; frm.length = reader.read<quint8>(); frm.data = reader.readBytes(frm.length); str.frames.append(frm); } emit audioFrame(sid,str); m_sesPhase = PhAudioStarted; } else { stopAudioStream(sid); } } else if(cmd == FrmStopTransfer) { if(sid != m_sessId) return; if(m_sesTimer) killTimer(m_sesTimer); qDebug() << "Pebble finished sending audio at session" << m_sessId; emit audioFrame(m_sessId,AudioStream()); m_sesPhase = PhAudioStopped; m_sesTimer = startTimer(1500); } else { qWarning() << "Unknown audio frame type" << data.toHex(); } }
// When a map is selected in the song wheel void OnMapSelected(MapIndex* map) { if(map == m_currentPreviewAudio) return; // Set current preview audio DifficultyIndex* previewDiff = map->difficulties[0]; String audioPath = map->path + Path::sep + previewDiff->settings.audioNoFX; AudioStream previewAudio = g_audio->CreateStream(audioPath); if(previewAudio) { previewAudio->SetPosition(previewDiff->settings.previewOffset); m_previewPlayer.FadeTo(previewAudio); } else { Logf("Failed to load preview audio from [%s]", Logger::Warning, audioPath); m_previewPlayer.FadeTo(AudioStream()); } m_currentPreviewAudio = map; }