void MemoryCapture::Destroy() { bInitialized = false; if(hMemoryMutex) OSEnterMutex(hMemoryMutex); copyData = NULL; textureBuffers[0] = NULL; textureBuffers[1] = NULL; delete texture; texture = NULL; if(sharedMemory) UnmapViewOfFile(sharedMemory); if(hFileMap) CloseHandle(hFileMap); if(hMemoryMutex) { OSLeaveMutex(hMemoryMutex); OSCloseMutex(hMemoryMutex); } }
NVENCEncoder::~NVENCEncoder() { if (alive) { for (int i = 0; i < maxSurfaceCount; ++i) { if (inputSurfaces[i].locked) pNvEnc->nvEncUnlockInputBuffer(encoder, inputSurfaces[i].inputSurface); pNvEnc->nvEncDestroyInputBuffer(encoder, inputSurfaces[i].inputSurface); pNvEnc->nvEncDestroyBitstreamBuffer(encoder, outputSurfaces[i].outputSurface); } pNvEnc->nvEncDestroyEncoder(encoder); cuCtxDestroy(cuContext); NvLog(TEXT("Encoder closed")); } outputSurfaceQueueReady = std::queue<NVENCEncoderOutputSurface*>(); outputSurfaceQueue = std::queue<NVENCEncoderOutputSurface*>(); delete[] inputSurfaces; delete[] outputSurfaces; if (alive) encoderRefDec(); OSCloseMutex(frameMutex); delete[] pstart; }
FastAlloc::~FastAlloc() { OSCloseMutex(hAllocationMutex); Pool *pool; BOOL bHasLeaks = 0; for(int i=0; i<256; i++) { if(PoolList[i]) { for(int j=0; j<256; j++) { pool = &PoolList[i][j]; if(pool->lpMem) { if(!bHasLeaks) { Log(TEXT("Memory Leaks Were Detected.\r\n")); bHasLeaks = 1; } OSVirtualFree(pool->lpMem); } } OSVirtualFree(PoolList[i]); PoolList[i] = NULL; } } }
DebugAlloc::~DebugAlloc() { OSCloseMutex(hDebugMutex); if(numAllocations) { Log(TEXT("%d Memory leaks detected on exit!\r\n"), numAllocations); Log(TEXT("Allocation Tracking Results: Memory Leaks:\r\n=========================================================\r\n")); for(DWORD i=0;i<numAllocations;i++) { if(AllocationList[i].allocationID != INVALID) Log(TEXT("\tID: %d\r\n\tAddress: 0x%lX\r\n\tDeclared in file %s on line %d\r\n"), AllocationList[i].allocationID, AllocationList[i].Address, AllocationList[i].lpFile, AllocationList[i].dwLine); else Log(TEXT("\tID: Track point was not enabled when allocation was made\r\n\tAddress: 0x%lX\r\n\tDeclared in file %s on line %d\r\n"), AllocationList[i].Address, AllocationList[i].lpFile, AllocationList[i].dwLine); } Log(TEXT("=========================================================\r\n")); /*tsprintf_s(temp, 4095, TEXT("%d Memory leaks detected on exit!\r\n"), numAllocations); LogFile.WriteStr(temp); LogFile.WriteStr(TEXT("Allocation Tracking Results: Memory Leaks:\r\n=========================================================\r\n")); for(DWORD i=0;i<numAllocations;i++) { if(AllocationList[i].allocationID != INVALID) tsprintf_s(temp, 4095, TEXT("\tID: %d\r\n\tAddress: 0x%lX\r\n\tDeclared in file %s on line %d\r\n"), AllocationList[i].allocationID, AllocationList[i].Address, AllocationList[i].lpFile, AllocationList[i].dwLine); else tsprintf_s(temp, 4095, TEXT("\tID: Track point was not enabled when allocation was made\r\n\tAddress: 0x%lX\r\n\tDeclared in file %s on line %d\r\n"), AllocationList[i].Address, AllocationList[i].lpFile, AllocationList[i].dwLine); LogFile.WriteStr(temp); } LogFile.WriteStr(TEXT("=========================================================\r\n"));*/ } }
DeviceAudioSource::~DeviceAudioSource() { if (m_pAudioWaveOut) delete m_pAudioWaveOut; if(hAudioMutex) OSCloseMutex(hAudioMutex); // fclose(audiosource); if (NULL != m_pSecWaveOut) { delete m_pSecWaveOut; m_pSecWaveOut = NULL; } }
PipeAudioSource::~PipeAudioSource() { Log::writeMessage(LOG_RTSPSERV, 1, "LINE:%d,FUNC:%s delete PipeAudioSource", __LINE__, __FUNCTION__); if(hAudioMutex) OSCloseMutex(hAudioMutex); if (m_pAudioWaveOut) { delete m_pAudioWaveOut; m_pAudioWaveOut = NULL; } if (NULL != m_pSecWaveOut) { delete m_pSecWaveOut; m_pSecWaveOut = NULL; } }
DeviceSource::~DeviceSource() { Stop(); UnloadFilters(); SafeRelease(capture); SafeRelease(graph); if(hConvertThreads) Free(hConvertThreads); if(convertData) Free(convertData); if(hSampleMutex) OSCloseMutex(hSampleMutex); }
void OBS::Stop() { if(!bRunning) return; OSEnterMutex(hStartupShutdownMutex); //we only want the capture thread to stop first, so we can ensure all packets are flushed bShutdownEncodeThread = true; ShowWindow(hwndProjector, SW_HIDE); if(hEncodeThread) { OSTerminateThread(hEncodeThread, 30000); hEncodeThread = NULL; } bShutdownVideoThread = true; SetEvent(hVideoEvent); if(hVideoThread) { OSTerminateThread(hVideoThread, 30000); hVideoThread = NULL; } bRunning = false; ReportStopStreamTrigger(); for(UINT i=0; i<globalSources.Num(); i++) globalSources[i].source->EndScene(); if(scene) scene->EndScene(); //------------------------------------------------------------- if(hSoundThread) { //ReleaseSemaphore(hRequestAudioEvent, 1, NULL); OSTerminateThread(hSoundThread, 20000); } //if(hRequestAudioEvent) // CloseHandle(hRequestAudioEvent); if(hSoundDataMutex) OSCloseMutex(hSoundDataMutex); hSoundThread = NULL; //hRequestAudioEvent = NULL; hSoundDataMutex = NULL; //------------------------------------------------------------- StopBlankSoundPlayback(); //------------------------------------------------------------- delete network; network = NULL; delete fileStream; fileStream = NULL; delete micAudio; micAudio = NULL; delete desktopAudio; desktopAudio = NULL; delete audioEncoder; audioEncoder = NULL; delete videoEncoder; videoEncoder = NULL; //------------------------------------------------------------- for(UINT i=0; i<pendingAudioFrames.Num(); i++) pendingAudioFrames[i].audioData.Clear(); pendingAudioFrames.Clear(); //------------------------------------------------------------- if(GS) GS->UnloadAllData(); //------------------------------------------------------------- delete scene; scene = NULL; for(UINT i=0; i<globalSources.Num(); i++) globalSources[i].FreeData(); globalSources.Clear(); //------------------------------------------------------------- for(UINT i=0; i<auxAudioSources.Num(); i++) delete auxAudioSources[i]; auxAudioSources.Clear(); //------------------------------------------------------------- for(UINT i=0; i<NUM_RENDER_BUFFERS; i++) { delete mainRenderTextures[i]; delete yuvRenderTextures[i]; mainRenderTextures[i] = NULL; yuvRenderTextures[i] = NULL; } for(UINT i=0; i<NUM_RENDER_BUFFERS; i++) { SafeRelease(copyTextures[i]); } delete transitionTexture; transitionTexture = NULL; //------------------------------------------------------------- delete mainVertexShader; delete mainPixelShader; delete yuvScalePixelShader; delete solidVertexShader; delete solidPixelShader; mainVertexShader = NULL; mainPixelShader = NULL; yuvScalePixelShader = NULL; solidVertexShader = NULL; solidPixelShader = NULL; //------------------------------------------------------------- delete GS; GS = NULL; //------------------------------------------------------------- ResizeRenderFrame(false); RedrawWindow(hwndRenderFrame, NULL, NULL, RDW_INVALIDATE|RDW_UPDATENOW); //------------------------------------------------------------- AudioDeviceList audioDevices; GetAudioDevices(audioDevices, ADT_RECORDING); String strDevice = AppConfig->GetString(TEXT("Audio"), TEXT("Device"), NULL); if(strDevice.IsEmpty() || !audioDevices.HasID(strDevice)) { AppConfig->SetString(TEXT("Audio"), TEXT("Device"), TEXT("Disable")); strDevice = TEXT("Disable"); } audioDevices.FreeData(); EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), !strDevice.CompareI(TEXT("Disable"))); //------------------------------------------------------------- ClearStreamInfo(); DumpProfileData(); FreeProfileData(); Log(TEXT("=====Stream End: %s================================================="), CurrentDateTimeString().Array()); //update notification icon to reflect current status UpdateNotificationAreaIcon(); SetWindowText(GetDlgItem(hwndMain, ID_TESTSTREAM), Str("MainWindow.TestStream")); EnableWindow(GetDlgItem(hwndMain, ID_STARTSTOP), TRUE); SetWindowText(GetDlgItem(hwndMain, ID_STARTSTOP), Str("MainWindow.StartStream")); EnableWindow(GetDlgItem(hwndMain, ID_TESTSTREAM), TRUE); bEditMode = false; SendMessage(GetDlgItem(hwndMain, ID_SCENEEDITOR), BM_SETCHECK, BST_UNCHECKED, 0); EnableWindow(GetDlgItem(hwndMain, ID_SCENEEDITOR), FALSE); ClearStatusBar(); InvalidateRect(hwndRenderFrame, NULL, TRUE); SystemParametersInfo(SPI_SETSCREENSAVEACTIVE, 1, 0, 0); SetThreadExecutionState(ES_CONTINUOUS); String processPriority = AppConfig->GetString(TEXT("General"), TEXT("Priority"), TEXT("Normal")); if (scmp(processPriority, TEXT("Normal"))) SetPriorityClass(GetCurrentProcess(), NORMAL_PRIORITY_CLASS); bTestStream = false; UpdateRenderViewMessage(); OSLeaveMutex(hStartupShutdownMutex); }
void OBS::Stop(bool overrideKeepRecording) { if((!bStreaming && !bRecording && !bRunning) && (!bTestStream)) return; //ugly hack to prevent hotkeys from being processed while we're stopping otherwise we end up //with callbacks from the ProcessEvents call in DelayedPublisher which causes havoc. OSEnterMutex(hHotkeyMutex); int networkMode = AppConfig->GetInt(TEXT("Publish"), TEXT("Mode"), 2); if(!overrideKeepRecording && bRecording && bKeepRecording && networkMode == 0) { NetworkStream *tempStream = NULL; videoEncoder->RequestKeyframe(); tempStream = network; network = NULL; Log(TEXT("=====Stream End (recording continues): %s========================="), CurrentDateTimeString().Array()); delete tempStream; bStreaming = false; bSentHeaders = false; ReportStopStreamingTrigger(); ConfigureStreamButtons(); OSLeaveMutex(hHotkeyMutex); return; } OSEnterMutex(hStartupShutdownMutex); //we only want the capture thread to stop first, so we can ensure all packets are flushed bShutdownEncodeThread = true; ShowWindow(hwndProjector, SW_HIDE); if(hEncodeThread) { OSTerminateThread(hEncodeThread, 30000); hEncodeThread = NULL; } bShutdownVideoThread = true; SetEvent(hVideoEvent); if(hVideoThread) { OSTerminateThread(hVideoThread, 30000); hVideoThread = NULL; } bRunning = false; ReportStopStreamTrigger(); for(UINT i=0; i<globalSources.Num(); i++) globalSources[i].source->EndScene(); if(scene) scene->EndScene(); //------------------------------------------------------------- if(hSoundThread) { //ReleaseSemaphore(hRequestAudioEvent, 1, NULL); OSTerminateThread(hSoundThread, 20000); } //if(hRequestAudioEvent) // CloseHandle(hRequestAudioEvent); if(hSoundDataMutex) OSCloseMutex(hSoundDataMutex); hSoundThread = NULL; //hRequestAudioEvent = NULL; hSoundDataMutex = NULL; //------------------------------------------------------------- StopBlankSoundPlayback(); //------------------------------------------------------------- delete network; network = NULL; if (bStreaming) ReportStopStreamingTrigger(); bStreaming = false; if(bRecording) StopRecording(); delete micAudio; micAudio = NULL; delete desktopAudio; desktopAudio = NULL; delete audioEncoder; audioEncoder = NULL; delete videoEncoder; videoEncoder = NULL; //------------------------------------------------------------- for(UINT i=0; i<pendingAudioFrames.Num(); i++) pendingAudioFrames[i].audioData.Clear(); pendingAudioFrames.Clear(); //------------------------------------------------------------- if(GS) GS->UnloadAllData(); //------------------------------------------------------------- delete scene; scene = NULL; for(UINT i=0; i<globalSources.Num(); i++) globalSources[i].FreeData(); globalSources.Clear(); //------------------------------------------------------------- for(UINT i=0; i<auxAudioSources.Num(); i++) delete auxAudioSources[i]; auxAudioSources.Clear(); //------------------------------------------------------------- for(UINT i=0; i<NUM_RENDER_BUFFERS; i++) { delete mainRenderTextures[i]; delete yuvRenderTextures[i]; mainRenderTextures[i] = NULL; yuvRenderTextures[i] = NULL; } for(UINT i=0; i<NUM_RENDER_BUFFERS; i++) { SafeRelease(copyTextures[i]); } delete transitionTexture; transitionTexture = NULL; //------------------------------------------------------------- delete mainVertexShader; delete mainPixelShader; delete yuvScalePixelShader; delete solidVertexShader; delete solidPixelShader; mainVertexShader = NULL; mainPixelShader = NULL; yuvScalePixelShader = NULL; solidVertexShader = NULL; solidPixelShader = NULL; //------------------------------------------------------------- delete GS; GS = NULL; //------------------------------------------------------------- ResizeRenderFrame(false); RedrawWindow(hwndRenderFrame, NULL, NULL, RDW_INVALIDATE|RDW_UPDATENOW); //------------------------------------------------------------- AudioDeviceList audioDevices; GetAudioDevices(audioDevices, ADT_RECORDING, false, true); String strDevice = AppConfig->GetString(TEXT("Audio"), TEXT("Device"), NULL); if(strDevice.IsEmpty() || !audioDevices.HasID(strDevice)) { AppConfig->SetString(TEXT("Audio"), TEXT("Device"), TEXT("Disable")); strDevice = TEXT("Disable"); } audioDevices.FreeData(); EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), !strDevice.CompareI(TEXT("Disable"))); //------------------------------------------------------------- ClearStreamInfo(); DumpProfileData(); FreeProfileData(); Log(TEXT("=====Stream End: %s================================================="), CurrentDateTimeString().Array()); //update notification icon to reflect current status UpdateNotificationAreaIcon(); bEditMode = false; SendMessage(GetDlgItem(hwndMain, ID_SCENEEDITOR), BM_SETCHECK, BST_UNCHECKED, 0); EnableWindow(GetDlgItem(hwndMain, ID_SCENEEDITOR), FALSE); ClearStatusBar(); InvalidateRect(hwndRenderFrame, NULL, TRUE); SystemParametersInfo(SPI_SETSCREENSAVEACTIVE, 1, 0, 0); SetThreadExecutionState(ES_CONTINUOUS); String processPriority = AppConfig->GetString(TEXT("General"), TEXT("Priority"), TEXT("Normal")); if (scmp(processPriority, TEXT("Normal"))) SetPriorityClass(GetCurrentProcess(), NORMAL_PRIORITY_CLASS); bTestStream = false; ConfigureStreamButtons(); UpdateRenderViewMessage(); DisableMenusWhileStreaming(false); OSLeaveMutex(hStartupShutdownMutex); OSLeaveMutex(hHotkeyMutex); }
RTMPPublisher::~RTMPPublisher() { //OSDebugOut (TEXT("*** ~RTMPPublisher (%d queued, %d buffered, %d data)\n"), queuedPackets.Num(), bufferedPackets.Num(), curDataBufferLen); bStopping = true; //we're in the middle of connecting! wait for that to happen to avoid all manner of race conditions if (hConnectionThread) { WaitForSingleObject(hConnectionThread, INFINITE); OSCloseThread(hConnectionThread); } //send all remaining buffered packets, this may block since it respects timestamps FlushBufferedPackets (); //OSDebugOut (TEXT("%d queued after flush\n"), queuedPackets.Num()); if(hSendThread) { //this marks the thread to exit after current work is done SetEvent(hSendLoopExit); //these wake up the thread ReleaseSemaphore(hSendSempahore, 1, NULL); SetEvent(hBufferSpaceAvailableEvent); //wait 50 sec for all data to finish sending if (WaitForSingleObject(hSendThread, 50000) == WAIT_TIMEOUT) { Log(TEXT("~RTMPPublisher: Network appears stalled with %d / %d buffered, dropping connection!"), curDataBufferLen, dataBufferSize); FatalSocketShutdown(); //this will wake up and flush the sendloop if it's still trying to send out stuff ReleaseSemaphore(hSendSempahore, 1, NULL); SetEvent(hBufferSpaceAvailableEvent); } OSTerminateThread(hSendThread, 10000); } if(hSendSempahore) CloseHandle(hSendSempahore); //OSDebugOut (TEXT("*** ~RTMPPublisher hSendThread terminated (%d queued, %d buffered, %d data)\n"), queuedPackets.Num(), bufferedPackets.Num(), curDataBufferLen); if (hSocketThread) { //mark the socket loop to shut down after the buffer is empty SetEvent(hSocketLoopExit); //wake it up in case it already is empty SetEvent(hBufferEvent); //wait 60 sec for it to exit OSTerminateThread(hSocketThread, 60000); } //OSDebugOut (TEXT("*** ~RTMPPublisher hSocketThread terminated (%d queued, %d buffered, %d data)\n"), queuedPackets.Num(), bufferedPackets.Num(), curDataBufferLen); if(rtmp) { if (RTMP_IsConnected(rtmp)) { //at this point nothing should be in the buffer, flush out what remains to the net and make it blocking FlushDataBuffer(); //disable the buffered send, so RTMP_* functions write directly to the net (and thus block) rtmp->m_bCustomSend = 0; //manually shut down the stream and issue a graceful socket shutdown RTMP_DeleteStream(rtmp); shutdown(rtmp->m_sb.sb_socket, SD_SEND); //this waits for the socket shutdown to complete gracefully for (;;) { char buff[1024]; int ret; ret = recv(rtmp->m_sb.sb_socket, buff, sizeof(buff), 0); if (!ret) break; else if (ret == -1) { Log(TEXT("~RTMPublisher: Received error %d while waiting for graceful shutdown."), WSAGetLastError()); break; } } //OSDebugOut(TEXT("Graceful shutdown complete.\n")); } //this closes the socket if not already done RTMP_Close(rtmp); } if(hDataMutex) OSCloseMutex(hDataMutex); while (bufferedPackets.Num()) { //this should not happen any more... bufferedPackets[0].data.Clear(); bufferedPackets.Remove(0); } if (dataBuffer) Free(dataBuffer); if (hDataBufferMutex) OSCloseMutex(hDataBufferMutex); if (hBufferEvent) CloseHandle(hBufferEvent); if (hSendLoopExit) CloseHandle(hSendLoopExit); if (hSocketLoopExit) CloseHandle(hSocketLoopExit); if (hSendBacklogEvent) CloseHandle(hSendBacklogEvent); if (hBufferSpaceAvailableEvent) CloseHandle(hBufferSpaceAvailableEvent); if (hWriteEvent) CloseHandle(hWriteEvent); if(rtmp) { if (rtmp->Link.pubUser.av_val) Free(rtmp->Link.pubUser.av_val); if (rtmp->Link.pubPasswd.av_val) Free(rtmp->Link.pubPasswd.av_val); RTMP_Free(rtmp); } //-------------------------- for(UINT i=0; i<queuedPackets.Num(); i++) queuedPackets[i].data.Clear(); queuedPackets.Clear(); double dBFrameDropPercentage = double(numBFramesDumped)/NumTotalVideoFrames()*100.0; double dPFrameDropPercentage = double(numPFramesDumped)/NumTotalVideoFrames()*100.0; if (totalSendCount) Log(TEXT("Average send payload: %d bytes, average send interval: %d ms"), (DWORD)(totalSendBytes / totalSendCount), totalSendPeriod / totalSendCount); Log(TEXT("Number of times waited to send: %d, Waited for a total of %d bytes"), totalTimesWaited, totalBytesWaited); Log(TEXT("Number of b-frames dropped: %u (%0.2g%%), Number of p-frames dropped: %u (%0.2g%%), Total %u (%0.2g%%)"), numBFramesDumped, dBFrameDropPercentage, numPFramesDumped, dPFrameDropPercentage, numBFramesDumped+numPFramesDumped, dBFrameDropPercentage+dPFrameDropPercentage); /*if(totalCalls) Log(TEXT("average send time: %u"), totalTime/totalCalls);*/ strRTMPErrors.Clear(); //-------------------------- }
void STDCALL OSExit() { timeEndPeriod(1); OSCloseMutex(hProfilerMutex); }
OBS::~OBS() { Stop(); bShuttingDown = true; OSTerminateThread(hHotkeyThread, 250); for(UINT i=0; i<plugins.Num(); i++) { PluginInfo &pluginInfo = plugins[i]; UNLOADPLUGINPROC unloadPlugin = (UNLOADPLUGINPROC)GetProcAddress(pluginInfo.hModule, "UnloadPlugin"); if(unloadPlugin) unloadPlugin(); FreeLibrary(pluginInfo.hModule); pluginInfo.strFile.Clear(); } //DestroyWindow(hwndMain); RECT rcWindow; GetWindowRect(hwndMain, &rcWindow); GlobalConfig->SetInt(TEXT("General"), TEXT("PosX"), rcWindow.left); GlobalConfig->SetInt(TEXT("General"), TEXT("PosY"), rcWindow.top); GlobalConfig->SetInt(TEXT("General"), TEXT("Width"), clientWidth); GlobalConfig->SetInt(TEXT("General"), TEXT("Height"), clientHeight); scenesConfig.Close(true); for(UINT i=0; i<Icons.Num(); i++) DeleteObject(Icons[i].hIcon); Icons.Clear(); for(UINT i=0; i<Fonts.Num(); i++) { DeleteObject(Fonts[i].hFont); Fonts[i].strFontFace.Clear(); } Fonts.Clear(); for(UINT i=0; i<sceneClasses.Num(); i++) sceneClasses[i].FreeData(); for(UINT i=0; i<imageSourceClasses.Num(); i++) imageSourceClasses[i].FreeData(); if(hSceneMutex) OSCloseMutex(hSceneMutex); if(hAuxAudioMutex) OSCloseMutex(hAuxAudioMutex); delete API; API = NULL; if(hInfoMutex) OSCloseMutex(hInfoMutex); if(hHotkeyMutex) OSCloseMutex(hHotkeyMutex); }
RTMPPublisher::~RTMPPublisher() { //OSDebugOut (TEXT("*** ~RTMPPublisher (%d queued, %d buffered)\n"), queuedPackets.Num(), bufferedPackets.Num()); bStopping = true; //we're in the middle of connecting! wait for that to happen to avoid all manner of race conditions if (hConnectionThread) { WaitForSingleObject(hConnectionThread, INFINITE); OSCloseThread(hConnectionThread); } FlushBufferedPackets (); //OSDebugOut (TEXT("%d queued after flush\n"), queuedPackets.Num()); if(hSendThread) { //this marks the thread to exit after current work is done SetEvent(hSendLoopExit); //this wakes up the thread ReleaseSemaphore(hSendSempahore, 1, NULL); //wait 60 sec for it to exit OSTerminateThread(hSendThread, 60000); } if(hSendSempahore) CloseHandle(hSendSempahore); //OSDebugOut (TEXT("*** ~RTMPPublisher hSendThread terminated (%d queued, %d buffered, %d data)\n"), queuedPackets.Num(), bufferedPackets.Num(), curDataBufferLen); if (hSocketThread) { //mark the socket loop to shut down after the buffer is empty SetEvent(hSocketLoopExit); //wait 60 sec for it to exit OSTerminateThread(hSocketThread, 60000); } //OSDebugOut (TEXT("*** ~RTMPPublisher hSocketThread terminated (%d queued, %d buffered, %d data)\n"), queuedPackets.Num(), bufferedPackets.Num(), curDataBufferLen); if(rtmp) { //at this point nothing should be in the buffer, flush out what remains and make it blocking FlushDataBuffer(); //disable the buffered send, so RTMP_Close writes directly to the net rtmp->m_bCustomSend = 0; //ideally we need some kind of delay here, since we just dumped several seconds worth of timestamps to the network //at once, and Twitch at shows the offline screen as soon as the connection is severed even if there are //pending video frames. if (RTMP_IsConnected(rtmp)) Sleep (500); //for now RTMP_Close(rtmp); } if(hDataMutex) OSCloseMutex(hDataMutex); while (bufferedPackets.Num()) { //this should not happen any more... bufferedPackets[0].data.Clear(); bufferedPackets.Remove(0); } if (dataBuffer) Free(dataBuffer); if (hDataBufferMutex) OSCloseMutex(hDataBufferMutex); if (hBufferEvent) CloseHandle(hBufferEvent); if (hSendLoopExit) CloseHandle(hSendLoopExit); if (hSocketLoopExit) CloseHandle(hSocketLoopExit); if (hSendBacklogEvent) CloseHandle(hSendBacklogEvent); if (hBufferSpaceAvailableEvent) CloseHandle(hBufferSpaceAvailableEvent); if (hWriteEvent) CloseHandle(hWriteEvent); if(rtmp) RTMP_Free(rtmp); //-------------------------- for(UINT i=0; i<queuedPackets.Num(); i++) queuedPackets[i].data.Clear(); queuedPackets.Clear(); double dBFrameDropPercentage = double(numBFramesDumped)/NumTotalVideoFrames()*100.0; double dPFrameDropPercentage = double(numPFramesDumped)/NumTotalVideoFrames()*100.0; Log(TEXT("Number of times waited to send: %d, Waited for a total of %d bytes"), totalTimesWaited, totalBytesWaited); Log(TEXT("Number of b-frames dropped: %u (%0.2g%%), Number of p-frames dropped: %u (%0.2g%%), Total %u (%0.2g%%)"), numBFramesDumped, dBFrameDropPercentage, numPFramesDumped, dPFrameDropPercentage, numBFramesDumped+numPFramesDumped, dBFrameDropPercentage+dPFrameDropPercentage); /*if(totalCalls) Log(TEXT("average send time: %u"), totalTime/totalCalls);*/ strRTMPErrors.Clear(); //-------------------------- }
RTMPPublisher::~RTMPPublisher() { bStopping = true; //we're in the middle of connecting! wait for that to happen to avoid all manner of race conditions if (hConnectionThread) { WaitForSingleObject(hConnectionThread, INFINITE); OSCloseThread(hConnectionThread); } if(hSendThread) { ReleaseSemaphore(hSendSempahore, 1, NULL); //wake it up in case it's waiting for buffer space SetEvent(hBufferSpaceAvailableEvent); OSTerminateThread(hSendThread, 20000); } if(hSendSempahore) CloseHandle(hSendSempahore); if(hDataMutex) OSCloseMutex(hDataMutex); while (bufferedPackets.Num()) { bufferedPackets[0].data.Clear(); bufferedPackets.Remove(0); } //wake up and shut down the buffered sender SetEvent(hWriteEvent); SetEvent(hBufferEvent); if (hSocketThread) { OSTerminateThread(hSocketThread, 20000); //at this point nothing new should be coming in to the buffer, flush out what remains FlushDataBuffer(); } if(rtmp) { //disable the buffered send, so RTMP_Close writes directly to the net rtmp->m_bCustomSend = 0; RTMP_Close(rtmp); } if (dataBuffer) Free(dataBuffer); if (hDataBufferMutex) OSCloseMutex(hDataBufferMutex); if (hBufferEvent) CloseHandle(hBufferEvent); if (hBufferSpaceAvailableEvent) CloseHandle(hBufferSpaceAvailableEvent); if (hWriteEvent) CloseHandle(hWriteEvent); if(rtmp) RTMP_Free(rtmp); //-------------------------- for(UINT i=0; i<queuedPackets.Num(); i++) queuedPackets[i].data.Clear(); queuedPackets.Clear(); double dBFrameDropPercentage = double(numBFramesDumped)/NumTotalVideoFrames()*100.0; double dPFrameDropPercentage = double(numPFramesDumped)/NumTotalVideoFrames()*100.0; Log(TEXT("Number of times waited to send: %d, Waited for a total of %d bytes"), totalTimesWaited, totalBytesWaited); Log(TEXT("Number of b-frames dropped: %u (%0.2g%%), Number of p-frames dropped: %u (%0.2g%%), Total %u (%0.2g%%)"), numBFramesDumped, dBFrameDropPercentage, numPFramesDumped, dPFrameDropPercentage, numBFramesDumped+numPFramesDumped, dBFrameDropPercentage+dPFrameDropPercentage); /*if(totalCalls) Log(TEXT("average send time: %u"), totalTime/totalCalls);*/ strRTMPErrors.Clear(); //-------------------------- }
void ClearGLData() { if(copyData) copyData->lastRendered = -1; if(hCopyThread) { bKillThread = true; SetEvent(hCopyEvent); if(WaitForSingleObject(hCopyThread, 500) != WAIT_OBJECT_0) TerminateThread(hCopyThread, -1); CloseHandle(hCopyThread); CloseHandle(hCopyEvent); hCopyThread = NULL; hCopyEvent = NULL; } for(int i=0; i<NUM_BUFFERS; i++) { if(glLockedTextures[i]) { OSEnterMutex(glDataMutexes[i]); glBindBuffer(GL_PIXEL_PACK_BUFFER, gltextures[i]); glUnmapBuffer(GL_PIXEL_PACK_BUFFER); glBindBuffer(GL_PIXEL_PACK_BUFFER, 0); glLockedTextures[i] = false; OSLeaveMutex(glDataMutexes[i]); } } if(bHasTextures) { glDeleteBuffers(NUM_BUFFERS, gltextures); bHasTextures = false; ZeroMemory(gltextures, sizeof(gltextures)); } for(int i=0; i<NUM_BUFFERS; i++) { if(glDataMutexes[i]) { OSCloseMutex(glDataMutexes[i]); glDataMutexes[i] = NULL; } } DestroySharedMemory(); copyData = NULL; copyWait = 0; lastTime = 0; curCapture = 0; curCPUTexture = 0; keepAliveTime = 0; resetCount++; pCopyData = NULL; logOutput << CurrentTimeString() << "---------------------- Cleared OpenGL Capture ----------------------" << endl; }
WebSocketOBSTriggerHandler::~WebSocketOBSTriggerHandler() { OSCloseMutex(updateQueueMutex); }