// documented in recorderbase.cpp bool FirewireRecorder::PauseAndWait(int timeout) { QMutexLocker locker(&pauseLock); if (request_pause) { LOG(VB_RECORD, LOG_INFO, LOC + QString("PauseAndWait(%1) -- pause").arg(timeout)); if (!IsPaused(true)) { StopStreaming(); paused = true; pauseWait.wakeAll(); if (tvrec) tvrec->RecorderPaused(); } unpauseWait.wait(&pauseLock, timeout); } if (!request_pause && IsPaused(true)) { LOG(VB_RECORD, LOG_INFO, LOC + QString("PauseAndWait(%1) -- unpause").arg(timeout)); paused = false; StartStreaming(); unpauseWait.wakeAll(); } return IsPaused(true); }
// documented in recorderbase.cpp bool FirewireRecorder::PauseAndWait(int timeout) { QMutexLocker locker(&pauseLock); if (request_pause) { VERBOSE(VB_RECORD, LOC + "PauseAndWait("<<timeout<<") -- pause"); if (!IsPaused(true)) { StopStreaming(); paused = true; pauseWait.wakeAll(); if (tvrec) tvrec->RecorderPaused(); } unpauseWait.wait(&pauseLock, timeout); } if (!request_pause && IsPaused(true)) { paused = false; VERBOSE(VB_RECORD, LOC + "PauseAndWait("<<timeout<<") -- unpause"); StartStreaming(); unpauseWait.wakeAll(); } return IsPaused(true); }
PvResult Source::Close() { // Stop image reception PvResult lResult = StopStreaming(); if ( !lResult.IsOK() ) { return lResult; } // Close stream if ( mStream->IsOpen() ) { lResult = mStream->Close(); if ( !lResult.IsOK() ) { return lResult; } } if ( ( mDevice != NULL ) && mDevice->IsConnected() ) { // Unregister acquisition mode update notifications PvGenEnum *lMode = mDevice->GetGenParameters()->GetEnum( "AcquisitionMode" ); if ( lMode != NULL ) { lMode->UnregisterEventSink( this ); } } // We don't own the device, let's just forget about it mDevice = NULL; return PvResult::Code::OK; }
void OBSBasic::on_stopStreamBtn_clicked() { tbliveLog.Log(lss_info, L"on_stopStreamBtn_clicked"); if (outputHandler->StreamingActive()) { QMessageBox::StandardButton button = QMessageBox::question(this, QTStr("ConfirmStop.Title"), QTStr("ConfirmStop.Text")); if (button == QMessageBox::No) return; StopStreaming(); } }
void FirewireRecorder::run(void) { LOG(VB_RECORD, LOG_INFO, LOC + "run"); if (!Open()) { _error = "Failed to open firewire device"; LOG(VB_GENERAL, LOG_ERR, LOC + _error); return; } _continuity_error_count = 0; { QMutexLocker locker(&pauseLock); request_recording = true; recording = true; recordingWait.wakeAll(); } StartStreaming(); while (IsRecordingRequested() && !IsErrored()) { if (PauseAndWait()) continue; if (!IsRecordingRequested()) break; { // sleep 1 seconds unless StopRecording() or Unpause() is called, // just to avoid running this too often. QMutexLocker locker(&pauseLock); if (!request_recording || request_pause) continue; unpauseWait.wait(&pauseLock, 1000); } } StopStreaming(); FinishRecording(); QMutexLocker locker(&pauseLock); recording = false; recordingWait.wakeAll(); }
void OBSBasic::on_pauseStreamBtn_clicked() { tbliveLog.Log(lss_info, L"on_pauseStreamBtn_clicked"); if (outputHandler->StreamingActive()) { // Already streaming QMessageBox::StandardButton button = QMessageBox::question(this, QTStr("ConfirmPause.Title"), QTStr("ConfirmPause.Text")); if (button == QMessageBox::No) return; m_bPauseClicked = true; ui->pauseStreamBtn->setEnabled(false); StopStreaming(); } }
STDMETHODIMP CTransformFilter::Stop() { CAutoLock lck1(&m_csFilter); if (m_State == State_Stopped) { return NOERROR; } // Succeed the Stop if we are not completely connected ASSERT(m_pInput == NULL || m_pOutput != NULL); if (m_pInput == NULL || m_pInput->IsConnected() == FALSE || m_pOutput->IsConnected() == FALSE) { m_State = State_Stopped; m_bEOSDelivered = FALSE; return NOERROR; } ASSERT(m_pInput); ASSERT(m_pOutput); // decommit the input pin before locking or we can deadlock m_pInput->Inactive(); // synchronize with Receive calls CAutoLock lck2(&m_csReceive); m_pOutput->Inactive(); // allow a class derived from CTransformFilter // to know about starting and stopping streaming HRESULT hr = StopStreaming(); if (SUCCEEDED(hr)) { // complete the state transition m_State = State_Stopped; m_bEOSDelivered = FALSE; } return hr; }
STDMETHODIMP CudaDecodeFilter::Stop() { CAutoLock lck1(&m_cStateLock); if (m_State == State_Stopped) { return NOERROR; } // Succeed the Stop if we are not completely connected ASSERT(m_CudaDecodeInputPin == NULL || m_paStreams[0] != NULL); if (m_CudaDecodeInputPin == NULL || m_CudaDecodeInputPin->IsConnected() == FALSE || m_paStreams[0]->IsConnected() == FALSE) { m_State = State_Stopped; m_EOSDelivered = FALSE; return NOERROR; } // Important!!! Refuse to receive any more samples m_MediaController->FlushAllPending(); // decommit the input pin before locking or we can deadlock m_CudaDecodeInputPin->Inactive(); // synchronize with Receive calls CAutoLock lck2(&m_csReceive); OutputPin()->BeginFlush(); OutputPin()->Inactive(); OutputPin()->EndFlush(); // allow a class derived from CTransformFilter // to know about starting and stopping streaming HRESULT hr = StopStreaming(); if (SUCCEEDED(hr)) { // complete the state transition m_State = State_Stopped; m_EOSDelivered = FALSE; } return hr; }
BOOL C4Record::Stop(StdStrBuf *pRecordName, BYTE *pRecordSHA1) { // safety if (!fRecording) return FALSE; if (!DirectoryExists(sFilename.getData())) return FALSE; // streaming finished StopStreaming(); // save desc into record group C4GameSaveRecord saveRec(false, Index, Game.Parameters.isLeague()); saveRec.SaveDesc(RecordGrp); // save end player infos into record group Game.PlayerInfos.Save(RecordGrp, C4CFN_RecPlayerInfos); RecordGrp.Close(); // write last entry and close C4RecordChunkHead Head; Head.iFrm = Game.FrameCounter + 37; Head.Type = RCT_End; CtrlRec.Write(&Head, sizeof(Head)); CtrlRec.Close(); // pack group #ifndef DEBUGREC if (!C4Group_PackDirectory(sFilename.getData())) return FALSE; #endif // return record data if (pRecordName) pRecordName->Copy(sFilename); if (pRecordSHA1) if (!C4Group_GetFileSHA1(sFilename.getData(), pRecordSHA1)) return false; // ok fRecording = false; return true; }
HRESULT CVideoTransformFilter::Receive(IMediaSample *pSample) { // If the next filter downstream is the video renderer, then it may // be able to operate in DirectDraw mode which saves copying the data // and gives higher performance. In that case the buffer which we // get from GetDeliveryBuffer will be a DirectDraw buffer, and // drawing into this buffer draws directly onto the display surface. // This means that any waiting for the correct time to draw occurs // during GetDeliveryBuffer, and that once the buffer is given to us // the video renderer will count it in its statistics as a frame drawn. // This means that any decision to drop the frame must be taken before // calling GetDeliveryBuffer. ASSERT(CritCheckIn(&m_csReceive)); AM_MEDIA_TYPE *pmtOut, *pmt; #ifdef _DEBUG FOURCCMap fccOut; #endif HRESULT hr; ASSERT(pSample); IMediaSample * pOutSample; // If no output pin to deliver to then no point sending us data ASSERT (m_pOutput != NULL) ; // The source filter may dynamically ask us to start transforming from a // different media type than the one we're using now. If we don't, we'll // draw garbage. (typically, this is a palette change in the movie, // but could be something more sinister like the compression type changing, // or even the video size changing) #define rcS1 ((VIDEOINFOHEADER *)(pmt->pbFormat))->rcSource #define rcT1 ((VIDEOINFOHEADER *)(pmt->pbFormat))->rcTarget pSample->GetMediaType(&pmt); if (pmt != NULL && pmt->pbFormat != NULL) { // spew some debug output ASSERT(!IsEqualGUID(pmt->majortype, GUID_NULL)); #ifdef _DEBUG fccOut.SetFOURCC(&pmt->subtype); LONG lCompression = HEADER(pmt->pbFormat)->biCompression; LONG lBitCount = HEADER(pmt->pbFormat)->biBitCount; LONG lStride = (HEADER(pmt->pbFormat)->biWidth * lBitCount + 7) / 8; lStride = (lStride + 3) & ~3; DbgLog((LOG_TRACE,3,TEXT("*Changing input type on the fly to"))); DbgLog((LOG_TRACE,3,TEXT("FourCC: %lx Compression: %lx BitCount: %ld"), fccOut.GetFOURCC(), lCompression, lBitCount)); DbgLog((LOG_TRACE,3,TEXT("biHeight: %ld rcDst: (%ld, %ld, %ld, %ld)"), HEADER(pmt->pbFormat)->biHeight, rcT1.left, rcT1.top, rcT1.right, rcT1.bottom)); DbgLog((LOG_TRACE,3,TEXT("rcSrc: (%ld, %ld, %ld, %ld) Stride: %ld"), rcS1.left, rcS1.top, rcS1.right, rcS1.bottom, lStride)); #endif // now switch to using the new format. I am assuming that the // derived filter will do the right thing when its media type is // switched and streaming is restarted. StopStreaming(); m_pInput->CurrentMediaType() = *pmt; DeleteMediaType(pmt); // if this fails, playback will stop, so signal an error hr = StartStreaming(); if (FAILED(hr)) { return AbortPlayback(hr); } } // Now that we have noticed any format changes on the input sample, it's // OK to discard it. if (ShouldSkipFrame(pSample)) { MSR_NOTE(m_idSkip); m_bSampleSkipped = TRUE; return NOERROR; } // Set up the output sample hr = InitializeOutputSample(pSample, &pOutSample); if (FAILED(hr)) { return hr; } m_bSampleSkipped = FALSE; // The renderer may ask us to on-the-fly to start transforming to a // different format. If we don't obey it, we'll draw garbage #define rcS ((VIDEOINFOHEADER *)(pmtOut->pbFormat))->rcSource #define rcT ((VIDEOINFOHEADER *)(pmtOut->pbFormat))->rcTarget pOutSample->GetMediaType(&pmtOut); if (pmtOut != NULL && pmtOut->pbFormat != NULL) { // spew some debug output ASSERT(!IsEqualGUID(pmtOut->majortype, GUID_NULL)); #ifdef _DEBUG fccOut.SetFOURCC(&pmtOut->subtype); LONG lCompression = HEADER(pmtOut->pbFormat)->biCompression; LONG lBitCount = HEADER(pmtOut->pbFormat)->biBitCount; LONG lStride = (HEADER(pmtOut->pbFormat)->biWidth * lBitCount + 7) / 8; lStride = (lStride + 3) & ~3; DbgLog((LOG_TRACE,3,TEXT("*Changing output type on the fly to"))); DbgLog((LOG_TRACE,3,TEXT("FourCC: %lx Compression: %lx BitCount: %ld"), fccOut.GetFOURCC(), lCompression, lBitCount)); DbgLog((LOG_TRACE,3,TEXT("biHeight: %ld rcDst: (%ld, %ld, %ld, %ld)"), HEADER(pmtOut->pbFormat)->biHeight, rcT.left, rcT.top, rcT.right, rcT.bottom)); DbgLog((LOG_TRACE,3,TEXT("rcSrc: (%ld, %ld, %ld, %ld) Stride: %ld"), rcS.left, rcS.top, rcS.right, rcS.bottom, lStride)); #endif // now switch to using the new format. I am assuming that the // derived filter will do the right thing when its media type is // switched and streaming is restarted. StopStreaming(); m_pOutput->CurrentMediaType() = *pmtOut; DeleteMediaType(pmtOut); hr = StartStreaming(); if (SUCCEEDED(hr)) { // a new format, means a new empty buffer, so wait for a keyframe // before passing anything on to the renderer. // !!! a keyframe may never come, so give up after 30 frames DbgLog((LOG_TRACE,3,TEXT("Output format change means we must wait for a keyframe"))); m_nWaitForKey = 30; // if this fails, playback will stop, so signal an error } else { // Must release the sample before calling AbortPlayback // because we might be holding the win16 lock or // ddraw lock pOutSample->Release(); AbortPlayback(hr); return hr; } } // After a discontinuity, we need to wait for the next key frame if (pSample->IsDiscontinuity() == S_OK) { DbgLog((LOG_TRACE,3,TEXT("Non-key discontinuity - wait for keyframe"))); m_nWaitForKey = 30; } // Start timing the transform (and log it if PERF is defined) if (SUCCEEDED(hr)) { m_tDecodeStart = timeGetTime(); MSR_START(m_idTransform); // have the derived class transform the data hr = Transform(pSample, pOutSample); // Stop the clock (and log it if PERF is defined) MSR_STOP(m_idTransform); m_tDecodeStart = timeGetTime()-m_tDecodeStart; m_itrAvgDecode = m_tDecodeStart*(10000/16) + 15*(m_itrAvgDecode/16); // Maybe we're waiting for a keyframe still? if (m_nWaitForKey) m_nWaitForKey--; if (m_nWaitForKey && pSample->IsSyncPoint() == S_OK) m_nWaitForKey = FALSE; // if so, then we don't want to pass this on to the renderer if (m_nWaitForKey && hr == NOERROR) { DbgLog((LOG_TRACE,3,TEXT("still waiting for a keyframe"))); hr = S_FALSE; } } if (FAILED(hr)) { DbgLog((LOG_TRACE,1,TEXT("Error from video transform"))); } else { // the Transform() function can return S_FALSE to indicate that the // sample should not be delivered; we only deliver the sample if it's // really S_OK (same as NOERROR, of course.) // Try not to return S_FALSE to a direct draw buffer (it's wasteful) // Try to take the decision earlier - before you get it. if (hr == NOERROR) { hr = m_pOutput->Deliver(pOutSample); } else { // S_FALSE returned from Transform is a PRIVATE agreement // We should return NOERROR from Receive() in this case because returning S_FALSE // from Receive() means that this is the end of the stream and no more data should // be sent. if (S_FALSE == hr) { // We must Release() the sample before doing anything // like calling the filter graph because having the // sample means we may have the DirectDraw lock // (== win16 lock on some versions) pOutSample->Release(); m_bSampleSkipped = TRUE; if (!m_bQualityChanged) { m_bQualityChanged = TRUE; NotifyEvent(EC_QUALITY_CHANGE,0,0); } return NOERROR; } } } // release the output buffer. If the connected pin still needs it, // it will have addrefed it itself. pOutSample->Release(); ASSERT(CritCheckIn(&m_csReceive)); return hr; }
QTSS_Error EasyCameraSource::StartStreaming(Easy_StartStream_Params* inParams) { QTSS_Error theErr = QTSS_NoErr; do { if (NULL == fPusherHandle) { if (!cameraLogin()) { theErr = QTSS_RequestFailed; break; } std::map<HI_U32, Easy_U32> mapSDK2This; mapSDK2This[HI_NET_DEV_AUDIO_TYPE_G711] = EASY_SDK_AUDIO_CODEC_G711A; mapSDK2This[HI_NET_DEV_AUDIO_TYPE_G726] = EASY_SDK_AUDIO_CODEC_G726; EASY_MEDIA_INFO_T mediainfo; memset(&mediainfo, 0x00, sizeof(EASY_MEDIA_INFO_T)); mediainfo.u32VideoCodec = EASY_SDK_VIDEO_CODEC_H264; HI_S32 s32Ret = HI_FAILURE; HI_S_Video sVideo; sVideo.u32Channel = HI_NET_DEV_CHANNEL_1; sVideo.blFlag = sStreamType ? HI_TRUE : HI_FALSE; s32Ret = HI_NET_DEV_GetConfig(m_u32Handle, HI_NET_DEV_CMD_VIDEO_PARAM, &sVideo, sizeof(HI_S_Video)); if (s32Ret == HI_SUCCESS) { mediainfo.u32VideoFps = sVideo.u32Frame; } else { mediainfo.u32VideoFps = 25; } HI_S_Audio sAudio; sAudio.u32Channel = HI_NET_DEV_CHANNEL_1; sAudio.blFlag = sStreamType ? HI_TRUE : HI_FALSE; s32Ret = HI_NET_DEV_GetConfig(m_u32Handle, HI_NET_DEV_CMD_AUDIO_PARAM, &sAudio, sizeof(HI_S_Audio)); if (s32Ret == HI_SUCCESS) { mediainfo.u32AudioCodec = mapSDK2This[sAudio.u32Type]; mediainfo.u32AudioChannel = sAudio.u32Channel; } else { mediainfo.u32AudioCodec = EASY_SDK_AUDIO_CODEC_G711A; mediainfo.u32AudioChannel = 1; } mediainfo.u32AudioSamplerate = 8000; fPusherHandle = EasyPusher_Create(); if (fPusherHandle == NULL) { //EasyPusher初始化创建失败,可能是EasyPusher SDK未授权 theErr = QTSS_Unimplemented; break; } // 注册流推送事件回调 EasyPusher_SetEventCallback(fPusherHandle, __EasyPusher_Callback, 0, NULL); // 根据接收到的命令生成流信息 char sdpName[128] = { 0 }; sprintf(sdpName, "%s/%s/%s.sdp", inParams->inStreamID, inParams->inSerial, inParams->inChannel); // 开始推送流媒体数据 EasyPusher_StartStream(fPusherHandle, (char*)inParams->inIP, inParams->inPort, sdpName, "", "", &mediainfo, 1024/* 1M Buffer*/, 0); saveStartStreamParams(inParams); } theErr = netDevStartStream(); } while (0); if (theErr != QTSS_NoErr) { // 如果推送不成功,需要释放之前已经开启的资源 StopStreaming(NULL); } else { // 推送成功,将当前正在推送的参数信息回调 inParams->inChannel = fStartStreamInfo.channel; inParams->inIP = fStartStreamInfo.ip; inParams->inPort = fStartStreamInfo.port; inParams->inProtocol = fStartStreamInfo.protocol; inParams->inSerial = fStartStreamInfo.serial; inParams->inStreamID = fStartStreamInfo.streamId; } return theErr; }
/** * The window procedure which handles the application events. */ LRESULT CALLBACK WndProc(HWND hWnd, UINT message, WPARAM wParam, LPARAM lParam) { switch (message) { case WM_SETFOCUS: { GetCursorPos(&gLastMousePos); gFocused = true; break; } case WM_KILLFOCUS: { gFocused = false; break; } // An explicit paint message case WM_PAINT: { if (!gReinitializeRequired) { // This is only really called when something is dragged over top of the window RenderScene(); } ValidateRect(hWnd, nullptr); break; } // Handle window size changes and pause streaming when minimized since the back buffer might not be available case WM_SIZE: { // Update the pause state int wmEvent = LOWORD(wParam); if (wmEvent == SIZE_MINIMIZED) { gPaused = true; Pause(); } else if (wmEvent == SIZE_RESTORED) { gPaused = false; } break; } // Handle key presses case WM_KEYDOWN: { switch (wParam) { // Toggle streaming case VK_F5: { if (IsStreaming()) { gStreamingDesired = false; StopStreaming(); } else { gStreamingDesired = true; StartStreaming(gBroadcastWidth, gBroadcastHeight, gBroadcastFramesPerSecond); } break; } // Toggle fullscreen case VK_F12: { gFullscreen = !gFullscreen; gReinitializeRequired = true; break; } // Toggle broadcast resolution case VK_F1: { bool streaming = IsStreaming(); if (streaming) { StopStreaming(); } if (gBroadcastWidth == 640) { gBroadcastWidth = 1024; gBroadcastHeight = 768; } else { gBroadcastWidth = 640; gBroadcastHeight = 368; } if (streaming) { StartStreaming(gBroadcastWidth, gBroadcastHeight, gBroadcastFramesPerSecond); } break; } } break; } // Close the application case WM_DESTROY: { PostQuitMessage(0); break; } default: { return DefWindowProc(hWnd, message, wParam, lParam); } } return 0; }
/** * The main entry point for the application. */ int APIENTRY _tWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPTSTR lpCmdLine, int nCmdShow) { UNREFERENCED_PARAMETER(hPrevInstance); UNREFERENCED_PARAMETER(lpCmdLine); // Initialize global strings LoadString(hInstance, IDS_APP_TITLE, gWindowTitle, sizeof(gWindowTitle)); LoadString(hInstance, IDC_STREAMING, gWindowClass, sizeof(gWindowClass)); // Register the window class RegisterWindowClass(hInstance); // Perform application initialization: if ( !InitInstance(hInstance, nCmdShow) ) { return FALSE; } // Set the view to the default position ResetView(); // Cache the last mouse position GetCursorPos(&gLastMousePos); // Initialize the Twitch SDK InitializeStreaming("<username>", "<password>", "<clientId>", "<clientSecret>", GetIntelDllPath()); // Main message loop MSG msg; while (true) { // Check to see if any messages are waiting in the queue while (PeekMessage(&msg, nullptr, 0, 0, PM_REMOVE)) { // Process window messages TranslateMessage(&msg); DispatchMessage(&msg); // Received a quit message if (msg.message == WM_QUIT) { break; } } // Received a quit message so exit the app if (msg.message == WM_QUIT) { break; } if (gReinitializeRequired) { gReinitializeRequired = false; InitializeRendering(); } // Draw the scene RenderScene(); UpdateWaveMesh(); // Process user input independent of the event queue if (gFocused) { HandleInput(); } // Record the frame time unsigned __int64 curTime = GetSystemTimeMs(); // Begin streaming when ready if (gStreamingDesired && !IsStreaming() && IsReadyToStream()) { StartStreaming(gBroadcastWidth, gBroadcastHeight, gBroadcastFramesPerSecond); gLastCaptureTime = 0; } // If you send frames too quickly to the SDK (based on the broadcast FPS you configured) it will not be able // to make use of them all. In that case, it will simply release buffers without using them which means the // game wasted time doing the capture. To mitigate this, the app should pace the captures to the broadcast FPS. unsigned __int64 captureDelta = curTime - gLastCaptureTime; bool isTimeForNextCapture = (captureDelta / 1000.0) >= (1.0 / gBroadcastFramesPerSecond); // streaming is in progress so try and capture a frame if (IsStreaming() && !gPaused && isTimeForNextCapture) { // capture a snapshot of the back buffer unsigned char* pBgraFrame = nullptr; int width = 0; int height = 0; bool gotFrame = false; switch (gCaptureMethod) { case CaptureMethod::Slow: gotFrame = CaptureFrame_Slow(gBroadcastWidth, gBroadcastHeight, pBgraFrame); break; case CaptureMethod::Fast: gotFrame = CaptureFrame_Fast(gBroadcastWidth, gBroadcastHeight, pBgraFrame, width, height); break; } // send a frame to the stream if (gotFrame) { SubmitFrame(pBgraFrame); } } // The SDK may generate events that need to be handled by the main thread so we should handle them FlushStreamingEvents(); unsigned __int64 timePerFrame = curTime - gLastFrameTime; unsigned int fps = 0; if (timePerFrame > 0) { fps = static_cast<int>(1000 / timePerFrame); } gLastFrameTime = curTime; // Update the window title to show the state #undef STREAM_STATE #define STREAM_STATE(__state__) #__state__, char buffer[128]; const char* streamStates[] = { STREAM_STATE_LIST }; #undef STREAM_STATE sprintf_s(buffer, sizeof(buffer), "Twitch Direct3D Streaming Sample - %s - %s FPS=%d", GetUsername().c_str(), streamStates[GetStreamState()], fps); SetWindowTextA(gWindowHandle, buffer); } // Shutdown the Twitch SDK StopStreaming(); ShutdownStreaming(); // Cleanup the rendering method switch (gCaptureMethod) { case CaptureMethod::Slow: DeinitRendering_Slow(); break; case CaptureMethod::Fast: DeinitRendering_Fast(); break; } // Shutdown the app gGraphicsDevice->Release(); gDirect3D->Release(); // Cleanup the mesh DestroyWaveMesh(); return (int)msg.wParam; }