/// <summary> /// Create the first connected Kinect found /// </summary> /// <returns>S_OK on success, otherwise failure code</returns> HRESULT CBackgroundRemovalBasics::CreateFirstConnected() { // Get the Kinect and specify that we'll be using depth HRESULT hr = m_pSensorChooser->GetSensor(NUI_INITIALIZE_FLAG_USES_DEPTH_AND_PLAYER_INDEX | NUI_INITIALIZE_FLAG_USES_COLOR, &m_pNuiSensor); if (SUCCEEDED(hr) && NULL != m_pNuiSensor) { // Open a depth image stream to receive depth frames hr = m_pNuiSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_DEPTH_AND_PLAYER_INDEX, cDepthResolution, 0, 2, m_hNextDepthFrameEvent, &m_pDepthStreamHandle); if (SUCCEEDED(hr)) { // Open a color image stream to receive color frames hr = m_pNuiSensor->NuiImageStreamOpen( NUI_IMAGE_TYPE_COLOR, cColorResolution, 0, 2, m_hNextColorFrameEvent, &m_pColorStreamHandle); if (SUCCEEDED(hr)) { hr = m_pNuiSensor->NuiSkeletonTrackingEnable(m_hNextSkeletonFrameEvent, NUI_SKELETON_TRACKING_FLAG_ENABLE_IN_NEAR_RANGE); } } } if (NULL == m_pNuiSensor || FAILED(hr)) { SafeRelease(m_pNuiSensor); // Reset all the event to nonsignaled state ResetEvent(m_hNextDepthFrameEvent); ResetEvent(m_hNextColorFrameEvent); ResetEvent(m_hNextSkeletonFrameEvent); SetStatusMessage(L"No ready Kinect found!"); return E_FAIL; } else { SetStatusMessage(L"Kinect found!"); } return hr; }
void MainFrame::SaveAsCurrentConfig(){ wxString defaultDir = _appPrefs.GetLastConfigFileDirectory(); wxString defaultFile = ""; wxFileDialog fileDialog(this, "Save As Configuration", defaultDir, defaultFile, CONFIG_FILE_FILTER,wxFD_SAVE); int result = fileDialog.ShowModal(); if (wxID_OK == result){ RaceCaptureConfigFileWriter writer; const wxString fileName = fileDialog.GetPath(); if (!wxFile::Exists(fileName) || (wxFile::Exists(fileName) && QueryFileOverwrite())){ writer.SetFileName(fileName); try{ writer.WriteConfigData(m_currentConfig); m_currentConfigFileName = new wxString(fileName); m_configModified = false; SetStatusMessage("RaceCapture Configuration Saved"); _appPrefs.SetLastConfigFileDirectory(fileDialog.GetDirectory()); } catch (FileAccessException &e){ wxMessageDialog dlg(this, wxString::Format("Failed to save RaceCapture Configuration:\n\n%s", e.GetMessage().ToAscii()), "Error saving", wxOK | wxICON_HAND); dlg.ShowModal(); return; } } } UpdateConfigFileStatus(); }
/// <summary> /// Handle a completed frame from the Kinect Fusion processor. /// </summary> /// <returns>S_OK on success, otherwise failure code</returns> void CKinectFusionExplorer::HandleCompletedFrame() { KinectFusionProcessorFrame const* pFrame = nullptr; // Flush any extra WM_FRAMEREADY messages from the queue MSG msg; while (PeekMessage(&msg, m_hWnd, WM_FRAMEREADY, WM_FRAMEREADY, PM_REMOVE)) {} m_processor.LockFrame(&pFrame); if (!m_bSavingMesh) // don't render while a mesh is being saved { if (m_processor.IsVolumeInitialized()) { m_pDrawDepth->Draw(pFrame->m_pDepthRGBX, pFrame->m_cbImageSize); m_pDrawReconstruction->Draw(pFrame->m_pReconstructionRGBX, pFrame->m_cbImageSize); m_pDrawTrackingDataAssociation->Draw(pFrame->m_pTrackingDataRGBX, pFrame->m_cbImageSize); } SetStatusMessage(pFrame->m_statusMessage); SetFramesPerSecond(pFrame->m_fFramesPerSecond); } if (pFrame->m_bIntegrationResumed) { m_params.m_bPauseIntegration = false; CheckDlgButton(m_hWnd, IDC_CHECK_PAUSE_INTEGRATION, BST_UNCHECKED); m_processor.SetParams(m_params); } m_processor.UnlockFrame(); }
/// <summary> /// Renders the color and face streams /// </summary> /// <param name="nTime">timestamp of frame</param> /// <param name="pBuffer">pointer to frame data</param> /// <param name="nWidth">width (in pixels) of input image data</param> /// <param name="nHeight">height (in pixels) of input image data</param> void CFaceBasics::DrawStreams(INT64 nTime, RGBQUAD* pBuffer, int nWidth, int nHeight) { if (m_hWnd) { HRESULT hr; hr = m_pDrawDataStreams->BeginDrawing(); if (SUCCEEDED(hr)) { // Make sure we've received valid color data if (pBuffer && (nWidth == cColorWidth) && (nHeight == cColorHeight)) { // Draw the data with Direct2D hr = m_pDrawDataStreams->DrawBackground(reinterpret_cast<BYTE*>(pBuffer), cColorWidth * cColorHeight * sizeof(RGBQUAD)); } else { // Recieved invalid data, stop drawing hr = E_INVALIDARG; } if (SUCCEEDED(hr)) { // begin processing the face frames ProcessFaces(); } m_pDrawDataStreams->EndDrawing(); } if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = {0}; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[64]; StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d", fps, (nTime - m_nStartTime)); if (SetStatusMessage(szStatusMessage, 1000, false)) { m_nLastCounter = qpcNow.QuadPart; m_nFramesSinceUpdate = 0; } } }
NS_IMETHODIMP nsMsgPrintEngine::StartNextPrintOperation() { nsresult rv; // Only do this the first time through... if (mCurrentlyPrintingURI == -1) InitializeDisplayCharset(); mCurrentlyPrintingURI++; // First, check if we are at the end of this stuff! if (mCurrentlyPrintingURI >= mURIArray.Length()) { // This is the end...dum, dum, dum....my only friend...the end mWindow->Close(); // Tell the user we are done... nsString msg; GetString(NS_LITERAL_STRING("PrintingComplete").get(), msg); SetStatusMessage(msg); return NS_OK; } if (!mDocShell) return StartNextPrintOperation(); const nsString &uri = mURIArray[mCurrentlyPrintingURI]; rv = FireThatLoadOperationStartup(uri); if (NS_FAILED(rv)) return StartNextPrintOperation(); else return rv; }
void MainFrame::InitComms(){ try{ m_raceAnalyzerComm.SetSerialPort(m_appOptions.GetSerialPort()); } catch(CommException &e){ SetStatusMessage(e.GetErrorMessage()); } }
/// <summary> /// Handle windows messages for the class instance /// </summary> /// <param name="hWnd">window message is for</param> /// <param name="uMsg">message</param> /// <param name="wParam">message data</param> /// <param name="lParam">additional message data</param> /// <returns>result of message processing</returns> LRESULT CALLBACK Direct2DWindow::DlgProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) { switch (message) { case WM_INITDIALOG: { // Bind application window handle m_hWnd = hWnd; // Init Direct2D D2D1CreateFactory(D2D1_FACTORY_TYPE_SINGLE_THREADED, &m_pD2DFactory); // Create and initialize a new Direct2D image renderer (take a look at ImageRenderer.h) // We'll use this to draw the data we receive from the Kinect to the screen m_pDrawDepth = new ImageRenderer(); HRESULT hr = m_pDrawDepth->Initialize(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), m_pD2DFactory, m_cdmap->GetFieldWidth(), m_cdmap->GetFieldHeight(), m_cdmap->GetFieldWidth() * sizeof(long)); if (FAILED(hr)) { SetStatusMessage(L"Failed to initialize the Direct2D draw device."); } } break; // If the titlebar X is clicked, destroy app case WM_CLOSE: DestroyWindow(hWnd); break; case WM_DESTROY: // Quit the main message pump PostQuitMessage(0); break; // Handle button press case WM_COMMAND: // If it was for the near mode control and a clicked event, change near mode if (IDC_CHECK_NEARMODE == LOWORD(wParam) && BN_CLICKED == HIWORD(wParam)) { // Toggle out internal state for near mode //m_bNearMode = !m_bNearMode; //if (NULL != m_pNuiSensor) //{ // // Set near mode based on our internal state // m_pNuiSensor->NuiImageStreamSetImageFrameFlags(m_pDepthStreamHandle, m_bNearMode ? NUI_IMAGE_STREAM_FLAG_ENABLE_NEAR_MODE : 0); //} } break; } return FALSE; }
void MainFrame::RaceEventUpdated(){ UpdateAnalyzerView(); m_datalogPlayer.DatalogSessionsUpdated(); m_datalogPlayer.RequeryAll(); UpdateCommControls(); NotifyConfigChanged(); SyncControls(); UpdateConfigFileStatus(); SetStatusMessage("Race Event Loaded"); }
// // StatusDock::Update Status void StatusDock::UpdateStatus(float delta, const char *message) { LogDebug("STATUSDOCK - Update status."); if (message) SetStatusMessage(message); if (delta > 0) statusBar->Update(delta); Invalidate(); }
/// <summary> /// Initializes the default Kinect sensor /// </summary> /// <returns>indicates success or failure</returns> HRESULT CColorBasics::InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get the color reader IColorFrameSource* pColorFrameSource = NULL; IBodyFrameSource* pBodyFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_ColorFrameSource(&pColorFrameSource); } if (SUCCEEDED(hr)) { hr = pColorFrameSource->OpenReader(&m_pColorFrameReader); } // Body if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_BodyFrameSource(&pBodyFrameSource); } if (SUCCEEDED(hr)) { hr = pBodyFrameSource->OpenReader(&m_pBodyFrameReader); } SafeRelease(pBodyFrameSource); SafeRelease(pColorFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { SetStatusMessage(L"No ready Kinect found!", 10000, true); return E_FAIL; } return hr; }
/// <summary> /// Create the first connected Kinect found /// </summary> /// <returns>indicates success or failure</returns> HRESULT CSkeletonBasics::CreateFirstConnected() { INuiSensor * pNuiSensor; int iSensorCount = 0; HRESULT hr = NuiGetSensorCount(&iSensorCount); if (FAILED(hr)) { return hr; } // Look at each Kinect sensor for (int i = 0; i < iSensorCount; ++i) { // Create the sensor so we can check status, if we can't create it, move on to the next hr = NuiCreateSensorByIndex(i, &pNuiSensor); if (FAILED(hr)) { continue; } // Get the status of the sensor, and if connected, then we can initialize it hr = pNuiSensor->NuiStatus(); if (S_OK == hr) { m_pNuiSensor = pNuiSensor; break; } // This sensor wasn't OK, so release it since we're not using it pNuiSensor->Release(); } if (NULL != m_pNuiSensor) { // Initialize the Kinect and specify that we'll be using skeleton hr = m_pNuiSensor->NuiInitialize(NUI_INITIALIZE_FLAG_USES_SKELETON); if (SUCCEEDED(hr)) { // Create an event that will be signaled when skeleton data is available m_hNextSkeletonEvent = CreateEventW(NULL, TRUE, FALSE, NULL); // Open a skeleton stream to receive skeleton data hr = m_pNuiSensor->NuiSkeletonTrackingEnable(m_hNextSkeletonEvent, 0); } } if (NULL == m_pNuiSensor || FAILED(hr)) { SetStatusMessage(L"No ready Kinect found!"); return E_FAIL; } return hr; }
bool MediaConverterWindow::QuitRequested() { if (!fConverting) { BMessenger(be_app).SendMessage(B_QUIT_REQUESTED); return true; } else if (!fCancelling) { fCancelling = true; SetStatusMessage(B_TRANSLATE("Cancelling")); BMessenger(be_app).SendMessage(CANCEL_CONVERSION_MESSAGE); } return false; }
/// <summary> /// Handle windows messages for the class instance /// </summary> /// <param name="hWnd">window message is for</param> /// <param name="uMsg">message</param> /// <param name="wParam">message data</param> /// <param name="lParam">additional message data</param> /// <returns>result of message processing</returns> LRESULT CALLBACK CCoordinateMappingBasics::DlgProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) { UNREFERENCED_PARAMETER(wParam); UNREFERENCED_PARAMETER(lParam); switch (message) { case WM_INITDIALOG: { // Bind application window handle m_hWnd = hWnd; // Init Direct2D D2D1CreateFactory(D2D1_FACTORY_TYPE_SINGLE_THREADED, &m_pD2DFactory); // Create and initialize a new Direct2D image renderer (take a look at ImageRenderer.h) // We'll use this to draw the data we receive from the Kinect to the screen m_pDrawCoordinateMapping = new ImageRenderer(); HRESULT hr = m_pDrawCoordinateMapping->Initialize(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), m_pD2DFactory, cDepthWidth, cDepthHeight, cDepthWidth * sizeof(RGBQUAD)); if (FAILED(hr)) { SetStatusMessage(L"Failed to initialize the Direct2D draw device.", 10000, true); } // Get and initialize the default Kinect sensor InitializeDefaultSensor(); } break; // If the titlebar X is clicked, destroy app case WM_CLOSE: DestroyWindow(hWnd); break; case WM_DESTROY: // Quit the main message pump PostQuitMessage(0); break; // Handle button press case WM_COMMAND: // If it was for the screenshot control and a button clicked event, save a screenshot next frame if (IDC_BUTTON_SCREENSHOT == LOWORD(wParam) && BN_CLICKED == HIWORD(wParam)) { m_bSaveScreenshot = true; } break; } return FALSE; }
void MainFrame::LoadInitialConfig(){ if (m_appOptions.GetAutoLoadConfig()){ ReadRaceCaptureConfig(); } else{ SetStatusMessage("Default Config Loaded"); } NotifyConfigChanged(); m_configModified = false; UpdateCommControls(); SyncControls(); UpdateConfigFileStatus(); }
/// <summary> /// Ensure necessary Direct2d resources are created /// </summary> /// <returns>S_OK if successful, otherwise an error code</returns> HRESULT CSkeletonBasics::EnsureDirect2DResources() { HRESULT hr = S_OK; // If there isn't currently a render target, we need to create one if (NULL == m_pRenderTarget) { RECT rc; GetWindowRect( GetDlgItem( m_hWnd, IDC_VIDEOVIEW ), &rc ); int width = rc.right - rc.left; int height = rc.bottom - rc.top; D2D1_SIZE_U size = D2D1::SizeU( width, height ); D2D1_RENDER_TARGET_PROPERTIES rtProps = D2D1::RenderTargetProperties(); rtProps.pixelFormat = D2D1::PixelFormat( DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_IGNORE); rtProps.usage = D2D1_RENDER_TARGET_USAGE_GDI_COMPATIBLE; // Create a Hwnd render target, in order to render to the window set in initialize hr = m_pD2DFactory->CreateHwndRenderTarget( rtProps, D2D1::HwndRenderTargetProperties(GetDlgItem( m_hWnd, IDC_VIDEOVIEW), size), &m_pRenderTarget ); if ( FAILED(hr) ) { SetStatusMessage(L"Couldn't create Direct2D render target!"); return hr; } //light green m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(68, 192, 68), &m_pBrushJointTracked); //yellow m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(255, 255, 0), &m_pBrushJointInferred); //green m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(0, 128, 0), &m_pBrushBoneTracked); //gray m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(128, 128, 128), &m_pBrushBoneInferred); } return hr; }
/// <summary> /// Ensure necessary Direct2d resources are created /// </summary> /// <returns>S_OK if successful, otherwise an error code</returns> HRESULT CBodyBasics::EnsureDirect2DResources() { HRESULT hr = S_OK; if (m_pD2DFactory && !m_pRenderTarget) { RECT rc; GetWindowRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rc); int width = rc.right - rc.left; int height = rc.bottom - rc.top; D2D1_SIZE_U size = D2D1::SizeU(width, height); D2D1_RENDER_TARGET_PROPERTIES rtProps = D2D1::RenderTargetProperties(); rtProps.pixelFormat = D2D1::PixelFormat(DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_IGNORE); rtProps.usage = D2D1_RENDER_TARGET_USAGE_GDI_COMPATIBLE; // Create a Hwnd render target, in order to render to the window set in initialize hr = m_pD2DFactory->CreateHwndRenderTarget( rtProps, D2D1::HwndRenderTargetProperties(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), size), &m_pRenderTarget ); if (FAILED(hr)) { SetStatusMessage(L"Couldn't create Direct2D render target!", 10000, true); return hr; } // light green m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(0.27f, 0.75f, 0.27f), &m_pBrushJointTracked); m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Yellow, 1.0f), &m_pBrushJointInferred); m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Green, 1.0f), &m_pBrushBoneTracked); m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Gray, 1.0f), &m_pBrushBoneInferred); m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Red, 0.5f), &m_pBrushHandClosed); m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Green, 0.5f), &m_pBrushHandOpen); m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Blue, 0.5f), &m_pBrushHandLasso); } return hr; }
void FriendListItem::UpdateFriendsStatus(const QString &new_status, const QString &new_message) { if (new_status != GetStatus()) { SetStatus(new_status); setIcon(UiDefines::PresenceStatus::GetIconForStatusCode(GetStatus())); } if (new_message != GetStatusMessage()) { QString status_string; if (new_message.isEmpty() || new_message.isNull()) status_string = name_; else status_string = name_ + "\n" + new_message; SetStatusMessage(new_message); setText(status_string); } }
void MainFrame::SaveCurrentConfig(){ if ( m_currentConfigFileName ){ RaceCaptureConfigFileWriter writer; const wxString fileName(*m_currentConfigFileName); writer.SetFileName(fileName); try{ writer.WriteConfigData(m_currentConfig); m_configModified = false; SetStatusMessage("Configuration Saved"); } catch (FileAccessException &e){ wxMessageDialog dlg(this, wxString::Format("Failed to save Configuration:\n\n%s", e.GetMessage().ToAscii()), "Error saving", wxOK | wxICON_HAND); dlg.ShowModal(); return; } } else{ SaveAsCurrentConfig(); } }
/// <summary> /// Initializes the default Kinect sensor /// </summary> /// <returns>indicates success or failure</returns> HRESULT CDepthBasics::InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get the depth reader IDepthFrameSource* pDepthFrameSource = NULL; hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_DepthFrameSource(&pDepthFrameSource); } if (SUCCEEDED(hr)) { hr = pDepthFrameSource->OpenReader(&m_pDepthFrameReader); } SafeRelease(pDepthFrameSource); } if (!m_pKinectSensor || FAILED(hr)) { SetStatusMessage(L"No ready Kinect found!", 10000, true); return E_FAIL; } return hr; }
// // StatusDock::MessageRecieved void StatusDock::MessageReceived(BMessage* message) { switch(message->what) { case STATDOCK_SHOW: { int32 modus = message->FindInt32("modus"); const char* statusmsg = message->FindString("statusmessage"); float total = message->GetFloat("total", -1); SetMaxStatusBar(total); SetStatusMessage(statusmsg); Show(modus); break; } case UPDATE_STAT: { const char *statusmsg = message->FindString("statusmessage"); float count = message->FindFloat("count"); UpdateStatus(count, statusmsg); break; } default: BView::MessageReceived(message); } }
/// <summary> /// Initializes the default Kinect sensor /// </summary> /// <returns>indicates success or failure</returns> HRESULT CCoordinateMappingBasics::InitializeDefaultSensor() { HRESULT hr; hr = GetDefaultKinectSensor(&m_pKinectSensor); if (FAILED(hr)) { return hr; } if (m_pKinectSensor) { // Initialize the Kinect and get coordinate mapper and the frame reader if (SUCCEEDED(hr)) { hr = m_pKinectSensor->get_CoordinateMapper(&m_pCoordinateMapper); } hr = m_pKinectSensor->Open(); if (SUCCEEDED(hr)) { hr = m_pKinectSensor->OpenMultiSourceFrameReader( FrameSourceTypes::FrameSourceTypes_Depth | FrameSourceTypes::FrameSourceTypes_Color | FrameSourceTypes::FrameSourceTypes_BodyIndex | FrameSourceTypes::FrameSourceTypes_Body, &m_pMultiSourceFrameReader); } } if (!m_pKinectSensor || FAILED(hr)) { SetStatusMessage(L"No ready Kinect found!", 10000, true); return E_FAIL; } return hr; }
void MainFrame::LoadConfigurationFile(const wxString fileName){ RaceCaptureConfigFileReader reader; reader.SetFileName(fileName); try{ reader.ReadConfiguration(m_currentConfig); if (m_currentConfigFileName) delete m_currentConfigFileName; m_currentConfigFileName = new wxString(fileName); m_configModified = false; UpdateCommControls(); NotifyConfigChanged(); SyncControls(); UpdateConfigFileStatus(); SetStatusMessage("RaceCapture Configuration Loaded"); } catch( FileAccessException &e ){ wxMessageDialog dlg(this, wxString::Format("Failed to load RaceCapture Configuration:\n\n%s", e.GetMessage().ToAscii()), "Error loading", wxOK | wxICON_HAND); dlg.ShowModal(); return; } UpdateConfigFileStatus(); }
/////////////////////////////////////////////////////////////////// // DoFileEvent // // // TE_FILETERMINAL is a file terminal event. pEvent is an ITFileTerminalEvent // // /////////////////////////////////////////////////////////////////// HRESULT DoFileEvent( IN IDispatch * pEvent) { // // check if we still have a call // if(NULL == g_pCall) { return E_UNEXPECTED; } ITFileTerminalEvent* pITFTEvent = NULL; HRESULT hr = pEvent->QueryInterface( IID_ITFileTerminalEvent, reinterpret_cast<void **>(&pITFTEvent) ); if (FAILED(hr)) { // // fatal error - can not continue - disconnect the call // DoMessage( _T("ITFileTerminalEvent, but failed to get the interface")); g_pCall->Disconnect(DC_NORMAL); return hr; } // // get the state - we'll make some decision based on this // TERMINAL_MEDIA_STATE ftState=TMS_ACTIVE; hr = pITFTEvent->get_State(&ftState); if(FAILED(hr)) { // // fatal error - can not continue - disconnect the call // pITFTEvent->Release(); DoMessage( _T("ITFileTerminalEvent, but failed to get_State")); g_pCall->Disconnect(DC_NORMAL); return hr; } // // we are interesred in TMS_IDLE because we will unselect playback and // select recording // if(ftState != TMS_IDLE) { pITFTEvent->Release(); return hr; } // // get the terminal // ITTerminal *pTerminal = NULL; hr = pITFTEvent->get_Terminal(&pTerminal); // // do not need this anymore // pITFTEvent->Release(); if(FAILED(hr)) { // // fatal error - can not continue - disconnect the call // DoMessage( _T("ITFileTerminalEvent, but failed to get_Terminal")); g_pCall->Disconnect(DC_NORMAL); return hr; } TERMINAL_DIRECTION td; hr = pTerminal->get_Direction( &td); pTerminal->Release(); if(FAILED(hr)) { // // fatal error - can not continue - disconnect the call // DoMessage( _T("ITFileTerminalEvent, but failed to get_Direction")); g_pCall->Disconnect(DC_NORMAL); return hr; } if((td == TD_CAPTURE) && (NULL != g_pPlayFileTerm)) { // // unselect playback - it is done - we reached the end or an error // ITBasicCallControl2 *pITBCC2 = NULL; hr = g_pCall->QueryInterface(IID_ITBasicCallControl2, (void**)&pITBCC2); if(FAILED(hr)) { // // fatal error - can not continue - disconnect the call // DoMessage( _T("ITFileTerminalEvent, but failed to QI for ITBasicCallControl2")); g_pCall->Disconnect(DC_NORMAL); return hr; } // // use ITBasicCallControl2 methods - much easier than // enumerate stream, terminals, etc // hr = pITBCC2->UnselectTerminalOnCall(g_pPlayFileTerm); g_pPlayFileTerm->Release(); g_pPlayFileTerm = NULL; pITBCC2->Release(); if(FAILED(hr)) { // // fatal error - can not continue - disconnect the call // DoMessage( _T("ITFileTerminalEvent, but failed to ITBasicCallControl2::UnselectTerminalOnCall")); g_pCall->Disconnect(DC_NORMAL); return hr; } // // select record - do not use automatic selection // we'll see how to use ITMultiTrackTerminal methods // hr = CreateAndSelectFileRecordTerminal(); if(FAILED(hr)) { // // fatal error - can not continue - disconnect the call // g_pCall->Disconnect(DC_NORMAL); SetStatusMessage(_T("CreateAndSelectFileRecordTerminal failed")); return hr; } // // get ITMediaControl interface - we need to call Start // ITMediaControl* pITMC = NULL; hr = g_pRecordFileTerm->QueryInterface(IID_ITMediaControl, (void**)&pITMC); if(FAILED(hr)) { // // fatal error - can not continue - disconnect the call // DoMessage( _T("ITFileTerminalEvent, but failed to QI ITMediaControl")); g_pCall->Disconnect(DC_NORMAL); return hr; } hr = pITMC->Start(); pITMC->Release(); if(FAILED(hr)) { // // fatal error - can not continue - disconnect the call // DoMessage( _T("ITFileTerminalEvent, but ITMediaControl::Start")); g_pCall->Disconnect(DC_NORMAL); return hr; } SetStatusMessage(_T("File Record Terminal started ")); // // will stop recording after one minute // SetTimer(g_hDlg, TIMER_ID, MAX_REC_TIME, NULL); g_dwMessages++; } return hr; }
/////////////////////////////////////////////////////////////////// // DoCallNotification // // TE_CALLNOTIFICATION means that the application is being notified // of a new call. // // Note that we don't answer to call at this point. The application // should wait for a CS_OFFERING CallState message before answering // the call. // /////////////////////////////////////////////////////////////////// HRESULT DoCallNotification( IN IDispatch * pEvent) { ITCallNotificationEvent * pNotify = NULL; HRESULT hr = pEvent->QueryInterface( IID_ITCallNotificationEvent, (void **)&pNotify ); if (FAILED(hr)) { DoMessage( _T("Incoming call, but failed to get the interface")); return hr; } CALL_PRIVILEGE cp = CP_MONITOR; ITCallInfo * pCall = NULL; // // get the call // hr = pNotify->get_Call( &pCall ); // // release the event object // pNotify->Release(); if(FAILED(hr)) { DoMessage( _T("Incoming call, but failed to get the call")); return hr; } // // check to see if we own the call // hr = pCall->get_Privilege( &cp ); if(FAILED(hr)) { pCall->Release(); DoMessage( _T("Incoming call, but failed to get_Privilege")); return hr; } if ( CP_OWNER != cp ) { // // just ignore it if we don't own it // pCall->Release(); return hr; } // //check if we already have a call - if yes reject it // if(NULL != g_pCall) { ITBasicCallControl* pITBCC = NULL; hr = pCall->QueryInterface( IID_ITBasicCallControl, (void**)&pITBCC ); pCall->Release(); // //sanity check // if(SUCCEEDED(hr)) { // //disconnect - we'll handle the other events from this call later // pITBCC->Disconnect(DC_REJECTED); pITBCC->Release(); } return hr; } // //get the call if do not have already one // hr = pCall->QueryInterface( IID_ITBasicCallControl, (void**)&g_pCall ); pCall->Release(); if(FAILED(hr)) { DoMessage( _T("Incoming call, but failed to QI ITBasicCallControl")); } else { // // update UI // SetStatusMessage(_T("Incoming Owner Call")); } // //clean up // return hr; }
/////////////////////////////////////////////////////////////////// // DoCallMedia // // // TE_CALLMEDIA is a media event. pEvent is an ITCallMediaEvent // // /////////////////////////////////////////////////////////////////// HRESULT DoCallMedia( IN IDispatch * pEvent) { CALL_MEDIA_EVENT cme = CME_STREAM_INACTIVE; ITCallMediaEvent * pCallMediaEvent = NULL; // // check if we still have a call // if(NULL == g_pCall) { return E_UNEXPECTED; } // // Get the interface // HRESULT hr = pEvent->QueryInterface( IID_ITCallMediaEvent, (void **)&pCallMediaEvent ); if(FAILED(hr)) { DoMessage( _T("ITCallMediaEvent, but failed to get the interface")); return hr; } // // get the CALL_MEDIA_EVENT that we are being notified of. // hr = pCallMediaEvent->get_Event( &cme ); if(FAILED(hr)) { pCallMediaEvent->Release(); DoMessage( _T("ITCallMediaEvent, but failed to get_Event")); return hr; } switch ( cme ) { // // the only event we process // case CME_STREAM_ACTIVE: { // // Get the terminal that's now active. // ITTerminal * pTerminal = NULL; hr = GetTerminalFromStreamEvent(pCallMediaEvent, &pTerminal); if ( FAILED(hr) ) { DoMessage(_T("ITCallMediaEvent: GetTerminalFromStreamEvent failed")); g_pCall->Disconnect(DC_NORMAL); break; } // // Process this terminal based on the direction. // TERMINAL_DIRECTION td; hr = pTerminal->get_Direction( &td); // //clean up // pTerminal->Release(); if ( FAILED(hr) ) { DoMessage(_T("ITCallMediaEvent: get_Direction failed")); g_pCall->Disconnect(DC_NORMAL); break; } // // if TD_CAPTURE and we have playback terminal start streaming // if ( TD_CAPTURE == td && NULL != g_pPlayFileTerm) { ITMediaControl* pITMC = NULL; hr = g_pPlayFileTerm->QueryInterface(IID_ITMediaControl, (void**)&pITMC); // // get ITMediaControl so we can start streaming // if(FAILED(hr)) { DoMessage(_T("ITCallMediaEvent: g_pPlayFileTerm QI for ITMediaControl failed")); g_pCall->Disconnect(DC_NORMAL); break; } // // Start streaming // hr = pITMC->Start(); pITMC->Release(); if(SUCCEEDED(hr)) { SetStatusMessage(_T("File Playback Terminal started ")); } else { DoMessage(_T("ITCallMediaEvent: ITMediaControl::Start() failed")); g_pCall->Disconnect(DC_NORMAL); } } break; } default: break; } // // clean up // pCallMediaEvent->Release(); return hr; }
/// <summary> /// Ensure necessary Direct2d resources are created /// </summary> /// <returns>S_OK if successful, otherwise an error code</returns> HRESULT CBodyBasics::EnsureDirect2DResources() { HRESULT hr = S_OK; if (m_pD2DFactory && !m_pRenderTarget) { RECT rc; GetWindowRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rc); int width = rc.right - rc.left; int height = rc.bottom - rc.top; D2D1_SIZE_U size = D2D1::SizeU(width, height); D2D1_RENDER_TARGET_PROPERTIES rtProps = D2D1::RenderTargetProperties(); rtProps.pixelFormat = D2D1::PixelFormat(DXGI_FORMAT_B8G8R8A8_UNORM, D2D1_ALPHA_MODE_IGNORE); rtProps.usage = D2D1_RENDER_TARGET_USAGE_GDI_COMPATIBLE; // Create a Hwnd render target, in order to render to the window set in initialize hr = m_pD2DFactory->CreateHwndRenderTarget( rtProps, D2D1::HwndRenderTargetProperties(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), size), &m_pRenderTarget ); if (FAILED(hr)) { SetStatusMessage(L"Couldn't create Direct2D render target!", 10000, true); return hr; } hr = DWriteCreateFactory( DWRITE_FACTORY_TYPE_SHARED, __uuidof(IDWriteFactory), reinterpret_cast<IUnknown**>(&m_pDWriteFactory) ); static const WCHAR msc_fontName[] = L"Arial"; static const FLOAT msc_fontSize = 30; hr = m_pDWriteFactory->CreateTextFormat( msc_fontName, NULL, DWRITE_FONT_WEIGHT_NORMAL, DWRITE_FONT_STYLE_NORMAL, DWRITE_FONT_STRETCH_NORMAL, msc_fontSize, L"", //locale &m_pTextFormat ); if (SUCCEEDED(hr)) { hr = m_pTextFormat->SetTextAlignment(DWRITE_TEXT_ALIGNMENT_LEADING); } if (SUCCEEDED(hr)) { hr = m_pTextFormat->SetParagraphAlignment(DWRITE_PARAGRAPH_ALIGNMENT_CENTER); } // light green m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(0.27f, 0.75f, 0.27f), &m_pBrushJointTracked); m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Yellow, 1.0f), &m_pBrushJointInferred); m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Green, 1.0f), &m_pBrushBoneTracked); m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Gray, 1.0f), &m_pBrushBoneInferred); m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Red, 0.5f), &m_pBrushHandClosed); m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Green, 0.5f), &m_pBrushHandOpen); m_pRenderTarget->CreateSolidColorBrush(D2D1::ColorF(D2D1::ColorF::Blue, 0.5f), &m_pBrushHandLasso); } return hr; }
/// <summary> /// Handle new body data /// <param name="nTime">timestamp of frame</param> /// <param name="nBodyCount">body data count</param> /// <param name="ppBodies">body data in frame</param> /// </summary> void CBodyBasics::ProcessBody(INT64 nTime, int nBodyCount, IBody** ppBodies) { if (m_hWnd) { HRESULT hr = EnsureDirect2DResources(); DetectionResult nEngaged[6] = { DetectionResult_Unknown }; int width = 0; int height = 0; if (SUCCEEDED(hr) && m_pRenderTarget && m_pCoordinateMapper) { m_pRenderTarget->BeginDraw(); m_pRenderTarget->Clear(); RECT rct; GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); width = rct.right; height = rct.bottom; for (int i = 0; i < nBodyCount; ++i) { nEngaged[i] = DetectionResult_Maybe; IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; hr = pBody->get_IsTracked(&bTracked); // Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。 hr = pBody->get_Engaged( &nEngaged[i] ); // 以下はまだ使えないようだ //hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, width, height); } DrawBody(joints, jointPoints); // ここに頭部に丸を描いて、ボディ番号を表示 DrawHead(jointPoints[JointType_Head], i, nEngaged[i]); DrawHand(leftHandState, jointPoints[JointType_HandLeft]); DrawHand(rightHandState, jointPoints[JointType_HandRight]); } } } } hr = m_pRenderTarget->EndDraw(); // Device lost, need to recreate the render target // We'll dispose it now and retry drawing if (D2DERR_RECREATE_TARGET == hr) { hr = S_OK; DiscardDirect2DResources(); } } if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = {0}; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[128] ; StringCchPrintf( szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d width:%d height:%d", fps, (nTime - m_nStartTime), width, height); if (SetStatusMessage(szStatusMessage, 1000, false)) { m_nLastCounter = qpcNow.QuadPart; m_nFramesSinceUpdate = 0; } } }
QMenuBar *SessionManager::ConstructMenuBar() { // QMenuBar and QMenu init menu_bar_ = new QMenuBar(main_parent_); // FILE MENU QMenu *file_menu = new QMenu("File", main_parent_); QAction *hide_action = file_menu->addAction("Hide", this, SLOT( Hide() )); //hide_action->setEnabled(false); // Can't support hide on external mode // STATUS menu QMenu *status_menu = new QMenu("Status", main_parent_); set_status_message = status_menu->addAction("Set Status Message", session_helper_, SLOT( SetStatusMessage() )); status_menu->addSeparator(); available_status = status_menu->addAction("Available", this, SLOT( StatusAvailable() )); chatty_status = status_menu->addAction("Chatty", this, SLOT( StatusChatty() )); away_status = status_menu->addAction("Away", this, SLOT( StatusAway() )); extended_away_status = status_menu->addAction("Extended Away", this, SLOT( StatusExtendedAway() )); busy_status = status_menu->addAction("Busy", this, SLOT( StatusBusy() )); hidden_status = status_menu->addAction("Hidden", this, SLOT( StatusHidden() )); set_status_message->setIcon(QIcon(":images/iconRename.png")); available_status->setCheckable(true); available_status->setIcon(UiDefines::PresenceStatus::GetIconForStatusCode("available")); chatty_status->setCheckable(true); chatty_status->setIcon(UiDefines::PresenceStatus::GetIconForStatusCode("chat")); away_status->setCheckable(true); away_status->setIcon(UiDefines::PresenceStatus::GetIconForStatusCode("away")); extended_away_status->setCheckable(true); extended_away_status->setIcon(UiDefines::PresenceStatus::GetIconForStatusCode("xa")); busy_status->setCheckable(true); busy_status->setIcon(UiDefines::PresenceStatus::GetIconForStatusCode("dnd")); hidden_status->setCheckable(true); hidden_status->setIcon(UiDefines::PresenceStatus::GetIconForStatusCode("hidden")); QActionGroup *status_group = new QActionGroup(main_parent_); status_group->addAction(available_status); status_group->addAction(chatty_status); status_group->addAction(away_status); status_group->addAction(extended_away_status); status_group->addAction(busy_status); status_group->addAction(hidden_status); available_status->setChecked(true); status_menu->addSeparator(); signout = status_menu->addAction("Sign out", this, SLOT( SignOut() )); signout->setIcon(QIcon(":images/iconSignout.png")); // JOIN MENU QMenu *actions_menu = new QMenu("Actions", main_parent_); add_new_friend = actions_menu->addAction("Add New Friend"); add_new_friend->setIcon(QIcon(":images/iconAdd.png")); join_chat_room = actions_menu->addAction("Join Chat Room", this, SLOT( JoinChatRoom() )); join_chat_room->setIcon(QIcon(":/images/iconConference.png")); manage_spatial_voice = actions_menu->addAction("Manage 3D Voice", this, SLOT( Show3DSoundManager() )); manage_spatial_voice->setIcon(QIcon(":images/iconProperties.png")); // Add sub menus to menu bar menu_bar_->addMenu(file_menu); menu_bar_->addMenu(status_menu); menu_bar_->addMenu(actions_menu); menu_bar_->addAction("Show Friend List", this, SLOT( ToggleShowFriendList() )); // Connect signals connect(add_new_friend, SIGNAL( triggered() ), session_helper_, SLOT( SendFriendRequest() )); connect(this, SIGNAL( StatusChange(const QString&) ), session_helper_, SLOT( SetMyStatus(const QString&) )); connect(session_helper_, SIGNAL( ChangeMenuBarStatus(const QString &) ), this, SLOT( StatusChangedOutSideMenuBar(const QString &) )); return menu_bar_; }
static void PlayGame() { Uint8 *keystate; int quit = 0; int turn; int prev_ticks = 0, cur_ticks = 0; /* for keeping track of timing */ int awaiting_respawn = 0; /* framerate counter variables */ int start_time, end_time; int frames_drawn = 0; /* respawn timer */ int respawn_timer = -1; prev_ticks = SDL_GetTicks(); start_time = time(NULL); /* Reset the score counters. */ player.score = 0; opponent.score = 0; /* Start sound playback. */ StartAudio(); StartMusic(); /* Start the music update thread. */ music_update_thread = SDL_CreateThread(UpdateMusicThread, NULL); if (music_update_thread == NULL) { printf("Unable to start music update thread.\n"); } /* Start the game! */ while ((quit == 0) && network_ok) { /* Determine how many milliseconds have passed since the last frame, and update our motion scaling. */ prev_ticks = cur_ticks; cur_ticks = SDL_GetTicks(); time_scale = (double)(cur_ticks-prev_ticks)/30.0; /* Update SDL's internal input state information. */ SDL_PumpEvents(); /* Grab a snapshot of the keyboard. */ keystate = SDL_GetKeyState(NULL); /* Lock the mutex so we can access the player's data. */ SDL_LockMutex(player_mutex); /* If this is a network game, take note of variables set by the network thread. These are handled differently for a scripted opponent. */ if (opponent_type == OPP_NETWORK) { /* Has the opponent respawned? */ if (network_opponent_respawn) { printf("Remote player has respawned.\n"); opponent.shields = 100; network_opponent_respawn = 0; awaiting_respawn = 0; } /* Has the local player been hit? */ if (local_player_hit) { local_player_hit--; player.shields -= PHASER_DAMAGE; ShowPhaserHit(&player); /* No need to check for death, the other computer will tell us. */ } } /* Update phasers. */ player.firing -= time_scale; if (player.firing < 0) player.firing = 0; opponent.firing -= time_scale; if (opponent.firing < 0) opponent.firing = 0; ChargePhasers(&player); /* If the local player is destroyed, the respawn timer will start counting. During this time the controls are disabled and explosion sequence occurs. */ if (respawn_timer >= 0) { respawn_timer++; if (respawn_timer >= ((double)RESPAWN_TIME / time_scale)) { respawn_timer = -1; InitPlayer(&player); /* Set the local_player_respawn flag so the network thread will notify the opponent of the respawn. */ local_player_respawn = 1; SetStatusMessage("GOOD LUCK, WARRIOR!"); } } /* Respond to input and network events, but not if we're in a respawn. */ if (respawn_timer == -1) { if (keystate[SDLK_q] || keystate[SDLK_ESCAPE]) quit = 1; /* Left and right arrow keys control turning. */ turn = 0; if (keystate[SDLK_LEFT]) turn += 10; if (keystate[SDLK_RIGHT]) turn -= 10; /* Forward and back arrow keys activate thrusters. */ player.accel = 0; if (keystate[SDLK_UP]) player.accel = PLAYER_FORWARD_THRUST; if (keystate[SDLK_DOWN]) player.accel = PLAYER_REVERSE_THRUST; /* Spacebar fires phasers. */ if (keystate[SDLK_SPACE]) { if (CanPlayerFire(&player)) { FirePhasers(&player); /* If it's a hit, either notify the opponent or exact the damage. Create a satisfying particle burst. */ if (!awaiting_respawn && CheckPhaserHit(&player,&opponent)) { ShowPhaserHit(&opponent); DamageOpponent(); /* If that killed the opponent, set the "awaiting respawn" state, to prevent multiple kills. */ if (opponent.shields <= 0 && opponent_type == OPP_NETWORK) awaiting_respawn = 1; } } } /* Turn. */ player.angle += turn * time_scale; if (player.angle < 0) player.angle += 360; if (player.angle >= 360) player.angle -= 360; /* If this is a network game, the remote player will tell us if we've died. Otherwise we have to check for failed shields. */ if (((opponent_type == OPP_NETWORK) && local_player_dead) || (player.shields <= 0)) { printf("Local player has been destroyed.\n"); local_player_dead = 0; /* Kaboom! */ KillPlayer(); /* Respawn. */ respawn_timer = 0; } } /* If this is a player vs. computer game, give the computer a chance. */ if (opponent_type == OPP_COMPUTER) { if (RunGameScript() != 0) { fprintf(stderr, "Ending game due to script error.\n"); quit = 1; } /* Check for phaser hits against the player. */ if (opponent.firing) { if (CheckPhaserHit(&opponent,&player)) { ShowPhaserHit(&player); player.shields -= PHASER_DAMAGE; /* Did that destroy the player? */ if (respawn_timer < 0 && player.shields <= 0) { KillPlayer(); respawn_timer = 0; } } } ChargePhasers(&opponent); UpdatePlayer(&opponent); } /* Update the player's position. */ UpdatePlayer(&player); /* Update the status information. */ SetPlayerStatusInfo(player.score, player.shields, player.charge); SetOpponentStatusInfo(opponent.score, opponent.shields); /* Make the camera follow the player (but impose limits). */ camera_x = player.world_x - SCREEN_WIDTH/2; camera_y = player.world_y - SCREEN_HEIGHT/2; if (camera_x < 0) camera_x = 0; if (camera_x >= WORLD_WIDTH-SCREEN_WIDTH) camera_x = WORLD_WIDTH-SCREEN_WIDTH-1; if (camera_y < 0) camera_y = 0; if (camera_y >= WORLD_HEIGHT-SCREEN_HEIGHT) camera_y = WORLD_HEIGHT-SCREEN_HEIGHT-1; /* Update the particle system. */ UpdateParticles(); /* Keep OpenAL happy. */ UpdateAudio(&player, &opponent); /* Redraw everything. */ DrawBackground(screen, camera_x, camera_y); DrawParallax(screen, camera_x, camera_y); DrawParticles(screen, camera_x, camera_y); if (opponent.firing) DrawPhaserBeam(&opponent, screen, camera_x, camera_y); if (player.firing) DrawPhaserBeam(&player, screen, camera_x, camera_y); if (respawn_timer < 0) DrawPlayer(&player); if (!awaiting_respawn) DrawPlayer(&opponent); UpdateStatusDisplay(screen); /* Release the mutex so the networking system can get it. It doesn't stay unlocked for very long, but the networking system should still have plenty of time. */ SDL_UnlockMutex(player_mutex); /* Flip the page. */ SDL_Flip(screen); frames_drawn++; } end_time = time(NULL); if (start_time == end_time) end_time++; /* Display the average framerate. */ printf("Drew %i frames in %i seconds, for a framerate of %.2f fps.\n", frames_drawn, end_time-start_time, (float)frames_drawn/(float)(end_time-start_time)); /* Terminate the music update thread. */ if (music_update_thread != NULL) { SDL_KillThread(music_update_thread); music_update_thread = NULL; } /* Stop audio playback. */ StopAudio(); StopMusic(); }
/// <summary> /// Handle new depth and color data /// <param name="nTime">timestamp of frame</param> /// <param name="pDepthBuffer">pointer to depth frame data</param> /// <param name="nDepthWidth">width (in pixels) of input depth image data</param> /// <param name="nDepthHeight">height (in pixels) of input depth image data</param> /// <param name="pColorBuffer">pointer to color frame data</param> /// <param name="nColorWidth">width (in pixels) of input color image data</param> /// <param name="nColorHeight">height (in pixels) of input color image data</param> /// <param name="pBodyIndexBuffer">pointer to body index frame data</param> /// <param name="nBodyIndexWidth">width (in pixels) of input body index data</param> /// <param name="nBodyIndexHeight">height (in pixels) of input body index data</param> /// </summary> void CCoordinateMappingBasics::ProcessFrame(INT64 nTime, const UINT16* pDepthBuffer, int nDepthWidth, int nDepthHeight, const RGBQUAD* pColorBuffer, int nColorWidth, int nColorHeight, const BYTE* pBodyIndexBuffer, int nBodyIndexWidth, int nBodyIndexHeight, int nBodyCount, IBody** ppBodies) { if (m_hWnd) { if (!m_nStartTime) { m_nStartTime = nTime; } double fps = 0.0; LARGE_INTEGER qpcNow = {0}; if (m_fFreq) { if (QueryPerformanceCounter(&qpcNow)) { if (m_nLastCounter) { m_nFramesSinceUpdate++; fps = m_fFreq * m_nFramesSinceUpdate / double(qpcNow.QuadPart - m_nLastCounter); } } } WCHAR szStatusMessage[64]; StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L" FPS = %0.2f Time = %I64d", fps, (nTime - m_nStartTime)); if (SetStatusMessage(szStatusMessage, 1000, false)) { m_nLastCounter = qpcNow.QuadPart; m_nFramesSinceUpdate = 0; } } // Make sure we've received valid data if (m_pCoordinateMapper && m_pColorCoordinates && m_pOutputRGBX && pDepthBuffer && (nDepthWidth == cDepthWidth) && (nDepthHeight == cDepthHeight) && pColorBuffer && (nColorWidth == cColorWidth) && (nColorHeight == cColorHeight) && pBodyIndexBuffer && (nBodyIndexWidth == cDepthWidth) && (nBodyIndexHeight == cDepthHeight)) { HRESULT hr = m_pCoordinateMapper->MapDepthFrameToColorSpace(nDepthWidth * nDepthHeight, (UINT16*)pDepthBuffer,nDepthWidth * nDepthHeight, m_pColorCoordinates); if (SUCCEEDED(hr)) { RGBQUAD c_green = {0, 255, 0}; // loop over pixel of the output for (int depthIndex = 0; depthIndex < (nDepthWidth * nDepthHeight); ++depthIndex) { // default setting source to copy from the background pixel const RGBQUAD* pSrc = (m_pBackgroundRGBX) ? (m_pBackgroundRGBX + depthIndex) : &c_green; BYTE player = pBodyIndexBuffer[depthIndex]; // if we're tracking a player for the current pixel, draw from the color camera if (player != 0xff) { // retrieve the depth to color mapping for the current depth pixel ColorSpacePoint colorPoint = m_pColorCoordinates[depthIndex]; // make sure the depth pixel maps to a valid point in color space int colorX = (int)(floor(colorPoint.X + 0.5)); int colorY = (int)(floor(colorPoint.Y + 0.5)); if ((colorX >= 0) && (colorX < nColorWidth) && (colorY >= 0) && (colorY < nColorHeight)) { // calculate index into color array int colorIndex = colorX + (colorY * nColorWidth); // set source for copy to the color pixel pSrc = m_pColorRGBX + colorIndex; } } // write output m_pOutputRGBX[depthIndex] = *pSrc; } // Draw the data with Direct2D m_pDrawCoordinateMapping->Draw(reinterpret_cast<BYTE*>(m_pOutputRGBX), cDepthWidth * cDepthHeight * sizeof(RGBQUAD)); if (m_bSaveScreenshot) { WCHAR szScreenshotPath[MAX_PATH]; // Retrieve the path to My Photos GetScreenshotFileName(szScreenshotPath, _countof(szScreenshotPath)); // Write out the bitmap to disk HRESULT hr = SaveBitmapToFile(reinterpret_cast<BYTE*>(m_pOutputRGBX), nDepthWidth, nDepthHeight, sizeof(RGBQUAD) * 8, szScreenshotPath); WCHAR szStatusMessage[64 + MAX_PATH]; if (SUCCEEDED(hr)) { // Set the status bar to show where the screenshot was saved StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L"Screenshot saved to %s", szScreenshotPath); } else { StringCchPrintf(szStatusMessage, _countof(szStatusMessage), L"Failed to write screenshot to %s", szScreenshotPath); } SetStatusMessage(szStatusMessage, 5000, true); // toggle off so we don't save a screenshot again next frame m_bSaveScreenshot = false; } } } D2D1_POINT_2F center; center.x = 400.0; center.y = 100.0; int width = 0; int height = 0; if (m_pCoordinateMapper) { RECT rct; GetClientRect(GetDlgItem(m_hWnd, IDC_VIDEOVIEW), &rct); width = rct.right; height = rct.bottom; DWORD clipedge = 0; for (int i = 0; i < nBodyCount; ++i) { IBody* pBody = ppBodies[i]; if (pBody) { BOOLEAN bTracked = false; HRESULT hr = pBody->get_IsTracked(&bTracked); // Engaged()は使えるみたい。これは、視野に入ってきた人を認識するものだろう。 //hr = pBody->get_Engaged(&nEngaged[i]); // 以下はまだ使えないようだ //hr = pBody->GetAppearanceDetectionResults((UINT)i, &nEngaged[i]); pBody->get_ClippedEdges(&clipedge); if (SUCCEEDED(hr) && bTracked) { Joint joints[JointType_Count]; D2D1_POINT_2F jointPoints[JointType_Count]; HandState leftHandState = HandState_Unknown; HandState rightHandState = HandState_Unknown; pBody->get_HandLeftState(&leftHandState); pBody->get_HandRightState(&rightHandState); hr = pBody->GetJoints(_countof(joints), joints); if (SUCCEEDED(hr)) { for (int j = 0; j < _countof(joints); ++j) { jointPoints[j] = BodyToScreen(joints[j].Position, width, height); } m_pDrawCoordinateMapping->DrawBody(joints, jointPoints); // ここに頭部に丸を描いて、ボディ番号を表示 m_pDrawCoordinateMapping->DrawHead(jointPoints[JointType_Head], i, clipedge/*, nEngaged[i]*/); m_pDrawCoordinateMapping->DrawHand(leftHandState, jointPoints[JointType_HandLeft]); m_pDrawCoordinateMapping->DrawHand(rightHandState, jointPoints[JointType_HandRight]); // 手先がある領域にきたら実行 // ボタンのような // 現状、複数人が認識されても実行するので、本来は最初に認識された一人のみにする必要がある。 float xy[2] = { 0.0 }; xy[0] = jointPoints[JointType_HandTipRight].x - center.x; xy[1] = jointPoints[JointType_HandTipRight].y - center.y; m_nButton = 0; if (sqrt(xy[0] * xy[0] + xy[1] * xy[1]) < 50.0) { m_nButton = 1; } m_pDrawCoordinateMapping->DrawButton(center, m_nButton); } } } } m_pDrawCoordinateMapping->EndDraw(); } }