int main(int argc, char* argv[]) { char deviceName[GOIO_MAX_SIZE_DEVICE_NAME]; gtype_int32 vendorId; //USB vendor id gtype_int32 productId; //USB product id char tmpstring[100]; gtype_uint16 MajorVersion; gtype_uint16 MinorVersion; char units[20]; char equationType = 0; gtype_int32 rawMeasurements[MAX_NUM_MEASUREMENTS]; gtype_real64 volts[MAX_NUM_MEASUREMENTS]; gtype_real64 calbMeasurements[MAX_NUM_MEASUREMENTS]; gtype_int32 numMeasurements, i; gtype_real64 averageCalbMeasurement; printf("GoIO_DeviceCheck version 1.0\n"); GoIO_Init(); GoIO_GetDLLVersion(&MajorVersion, &MinorVersion); printf("This app is linked to GoIO lib version %d.%d .\n", MajorVersion, MinorVersion); bool bFoundDevice = GetAvailableDeviceName(deviceName, GOIO_MAX_SIZE_DEVICE_NAME, &vendorId, &productId); if (!bFoundDevice) printf("No Go devices found.\n"); else { GOIO_SENSOR_HANDLE hDevice = GoIO_Sensor_Open(deviceName, vendorId, productId, 0); if (hDevice != NULL) { printf("Successfully opened %s device %s .\n", deviceDesc[productId], deviceName); unsigned char charId; GoIO_Sensor_DDSMem_GetSensorNumber(hDevice, &charId, 0, 0); printf("Sensor id = %d", charId); GoIO_Sensor_DDSMem_GetLongName(hDevice, tmpstring, sizeof(tmpstring)); if (strlen(tmpstring) != 0) printf("(%s)", tmpstring); printf("\n"); GoIO_Sensor_SetMeasurementPeriod(hDevice, 0.040, SKIP_TIMEOUT_MS_DEFAULT);//40 milliseconds measurement period. GoIO_Sensor_SendCmdAndGetResponse(hDevice, SKIP_CMD_ID_START_MEASUREMENTS, NULL, 0, NULL, NULL, SKIP_TIMEOUT_MS_DEFAULT); OSSleep(1000); //Wait 1 second. numMeasurements = GoIO_Sensor_ReadRawMeasurements(hDevice, rawMeasurements, MAX_NUM_MEASUREMENTS); printf("%d measurements received after about 1 second.\n", numMeasurements); averageCalbMeasurement = 0.0; for (i = 0; i < numMeasurements; i++) { volts[i] = GoIO_Sensor_ConvertToVoltage(hDevice, rawMeasurements[i]); calbMeasurements[i] = GoIO_Sensor_CalibrateData(hDevice, volts[i]); averageCalbMeasurement += calbMeasurements[i]; } if (numMeasurements > 1) averageCalbMeasurement = averageCalbMeasurement/numMeasurements; GoIO_Sensor_DDSMem_GetCalibrationEquation(hDevice, &equationType); if (equationType != kEquationType_Linear) strcpy(units, "volts"); else { gtype_real32 a, b, c; unsigned char activeCalPage = 0; GoIO_Sensor_DDSMem_GetActiveCalPage(hDevice, &activeCalPage); GoIO_Sensor_DDSMem_GetCalPage(hDevice, activeCalPage, &a, &b, &c, units, sizeof(units)); } printf("Average measurement = %8.3f %s .\n", averageCalbMeasurement, units); #ifdef TARGET_OS_WIN getchar(); #endif GoIO_Sensor_Close(hDevice); } } GoIO_Uninit(); return 0; }
void OBS::EncodeLoop() { QWORD streamTimeStart = GetQPCTimeNS(); QWORD frameTimeNS = 1000000000/fps; bool bufferedFrames = true; //to avoid constantly polling number of frames int numTotalDuplicatedFrames = 0, numTotalFrames = 0, numFramesSkipped = 0; bufferedTimes.Clear(); bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0; QWORD sleepTargetTime = streamTimeStart+frameTimeNS; latestVideoTime = firstSceneTimestamp = streamTimeStart/1000000; latestVideoTimeNS = streamTimeStart; firstFrameTimestamp = 0; UINT encoderInfo = 0; QWORD messageTime = 0; EncoderPicture *lastPic = NULL; UINT skipThreshold = encoderSkipThreshold*2; UINT no_sleep_counter = 0; CircularList<QWORD> bufferedTimes; while(!bShutdownEncodeThread || (bufferedFrames && !bTestStream)) { if (!SleepToNS(sleepTargetTime += (frameTimeNS/2))) no_sleep_counter++; else no_sleep_counter = 0; latestVideoTime = sleepTargetTime/1000000; latestVideoTimeNS = sleepTargetTime; if (no_sleep_counter < skipThreshold) { SetEvent(hVideoEvent); if (encoderInfo) { if (messageTime == 0) { messageTime = latestVideoTime+3000; } else if (latestVideoTime >= messageTime) { RemoveStreamInfo(encoderInfo); encoderInfo = 0; messageTime = 0; } } } else { numFramesSkipped++; if (!encoderInfo) encoderInfo = AddStreamInfo(Str("EncoderLag"), StreamInfoPriority_Critical); messageTime = 0; } if (!SleepToNS(sleepTargetTime += (frameTimeNS/2))) no_sleep_counter++; else no_sleep_counter = 0; bufferedTimes << latestVideoTime; if (curFramePic && firstFrameTimestamp) { while (bufferedTimes[0] < firstFrameTimestamp) bufferedTimes.Remove(0); DWORD curFrameTimestamp = DWORD(bufferedTimes[0] - firstFrameTimestamp); bufferedTimes.Remove(0); profileIn("encoder thread frame"); FrameProcessInfo frameInfo; frameInfo.firstFrameTime = firstFrameTimestamp; frameInfo.frameTimestamp = curFrameTimestamp; frameInfo.pic = curFramePic; if (lastPic == frameInfo.pic) numTotalDuplicatedFrames++; if(bUsingQSV) curFramePic->mfxOut->Data.TimeStamp = curFrameTimestamp; else curFramePic->picOut->i_pts = curFrameTimestamp; ProcessFrame(frameInfo); if (bShutdownEncodeThread) bufferedFrames = videoEncoder->HasBufferedFrames(); lastPic = frameInfo.pic; profileOut; numTotalFrames++; } } //flush all video frames in the "scene buffering time" buffer if (firstFrameTimestamp && bufferedVideo.Num()) { QWORD startTime = GetQPCTimeMS(); DWORD baseTimestamp = bufferedVideo[0].timestamp; for(UINT i=0; i<bufferedVideo.Num(); i++) { //we measure our own time rather than sleep between frames due to potential sleep drift QWORD curTime; do { curTime = GetQPCTimeMS(); OSSleep (1); } while (curTime - startTime < bufferedVideo[i].timestamp - baseTimestamp); SendFrame(bufferedVideo[i], firstFrameTimestamp); bufferedVideo[i].Clear(); numTotalFrames++; } bufferedVideo.Clear(); } Log(TEXT("Total frames encoded: %d, total frames duplicated: %d (%0.2f%%)"), numTotalFrames, numTotalDuplicatedFrames, (numTotalFrames > 0) ? (double(numTotalDuplicatedFrames)/double(numTotalFrames))*100.0 : 0.0f); if (numFramesSkipped) Log(TEXT("Number of frames skipped due to encoder lag: %d (%0.2f%%)"), numFramesSkipped, (numTotalFrames > 0) ? (double(numFramesSkipped)/double(numTotalFrames))*100.0 : 0.0f); SetEvent(hVideoEvent); bShutdownVideoThread = true; }
//todo: this function is an abomination, this is just disgusting. fix it. //...seriously, this is really, really horrible. I mean this is amazingly bad. void OBS::MainCaptureLoop() { int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0; int copyWait = NUM_RENDER_BUFFERS-1; bSentHeaders = false; bFirstAudioPacket = true; bool bLogLongFramesProfile = GlobalConfig->GetInt(TEXT("General"), TEXT("LogLongFramesProfile"), LOGLONGFRAMESDEFAULT) != 0; float logLongFramesProfilePercentage = GlobalConfig->GetFloat(TEXT("General"), TEXT("LogLongFramesProfilePercentage"), 10.f); Vect2 baseSize = Vect2(float(baseCX), float(baseCY)); Vect2 outputSize = Vect2(float(outputCX), float(outputCY)); Vect2 scaleSize = Vect2(float(scaleCX), float(scaleCY)); HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI")); //---------------------------------------- // x264 input buffers int curOutBuffer = 0; bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0; if(bUsingQSV) bUsing444 = false; EncoderPicture lastPic; EncoderPicture outPics[NUM_OUT_BUFFERS]; DWORD outTimes[NUM_OUT_BUFFERS] = {0, 0, 0}; for(int i=0; i<NUM_OUT_BUFFERS; i++) { if(bUsingQSV) { outPics[i].mfxOut = new mfxFrameSurface1; memset(outPics[i].mfxOut, 0, sizeof(mfxFrameSurface1)); mfxFrameData& data = outPics[i].mfxOut->Data; videoEncoder->RequestBuffers(&data); } else { outPics[i].picOut = new x264_picture_t; x264_picture_init(outPics[i].picOut); } } if(bUsing444) { for(int i=0; i<NUM_OUT_BUFFERS; i++) { outPics[i].picOut->img.i_csp = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV outPics[i].picOut->img.i_plane = 1; } } else { if(!bUsingQSV) for(int i=0; i<NUM_OUT_BUFFERS; i++) x264_picture_alloc(outPics[i].picOut, X264_CSP_NV12, outputCX, outputCY); } int bCongestionControl = AppConfig->GetInt (TEXT("Video Encoding"), TEXT("CongestionControl"), 0); bool bDynamicBitrateSupported = App->GetVideoEncoder()->DynamicBitrateSupported(); int defaultBitRate = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000); int currentBitRate = defaultBitRate; QWORD lastAdjustmentTime = 0; UINT adjustmentStreamId = 0; //---------------------------------------- // time/timestamp stuff bufferedTimes.Clear(); ctsOffsets.Clear(); int bufferedFrames = 1; //to avoid constantly polling number of frames #ifdef USE_100NS_TIME QWORD streamTimeStart = GetQPCTime100NS(); QWORD frameTime100ns = 10000000/fps; QWORD sleepTargetTime = 0; bool bWasLaggedFrame = false; #else DWORD streamTimeStart = OSGetTime(); DWORD fpsTimeAdjust = 0; #endif totalStreamTime = 0; lastAudioTimestamp = 0; latestVideoTime = firstSceneTimestamp = GetQPCTimeMS(); DWORD fpsTimeNumerator = 1000-(frameTime*fps); DWORD fpsTimeDenominator = fps; DWORD cfrTime = 0; DWORD cfrTimeAdjust = 0; //---------------------------------------- // start audio capture streams desktopAudio->StartCapture(); if(micAudio) micAudio->StartCapture(); //---------------------------------------- // status bar/statistics stuff DWORD fpsCounter = 0; int numLongFrames = 0; int numTotalFrames = 0; int numTotalDuplicatedFrames = 0; bytesPerSec = 0; captureFPS = 0; curFramesDropped = 0; curStrain = 0.0; PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0); QWORD lastBytesSent[3] = {0, 0, 0}; DWORD lastFramesDropped = 0; #ifdef USE_100NS_TIME double bpsTime = 0.0; #else float bpsTime = 0.0f; #endif double lastStrain = 0.0f; DWORD numSecondsWaited = 0; //---------------------------------------- // 444->420 thread data int numThreads = MAX(OSGetTotalCores()-2, 1); HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads); Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads); zero(h420Threads, sizeof(HANDLE)*numThreads); zero(convertInfo, sizeof(Convert444Data)*numThreads); for(int i=0; i<numThreads; i++) { convertInfo[i].width = outputCX; convertInfo[i].height = outputCY; convertInfo[i].hSignalConvert = CreateEvent(NULL, FALSE, FALSE, NULL); convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL); convertInfo[i].bNV12 = bUsingQSV; if(i == 0) convertInfo[i].startY = 0; else convertInfo[i].startY = convertInfo[i-1].endY; if(i == (numThreads-1)) convertInfo[i].endY = outputCY; else convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE; } bool bFirstFrame = true; bool bFirstImage = true; bool bFirst420Encode = true; bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444; List<HANDLE> completeEvents; if(bUseThreaded420) { for(int i=0; i<numThreads; i++) { h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i); completeEvents << convertInfo[i].hSignalComplete; } } //---------------------------------------- QWORD curStreamTime = 0, lastStreamTime, firstFrameTime = GetQPCTimeMS(); #ifdef USE_100NS_TIME lastStreamTime = GetQPCTime100NS()-frameTime100ns; #else lastStreamTime = firstFrameTime-frameTime; #endif //bool bFirstAudioPacket = true; List<ProfilerNode> threadedProfilers; bool bUsingThreadedProfilers = false; while(bRunning || bufferedFrames) { #ifdef USE_100NS_TIME QWORD renderStartTime = GetQPCTime100NS(); totalStreamTime = DWORD((renderStartTime-streamTimeStart)/10000); if(sleepTargetTime == 0 || bWasLaggedFrame) sleepTargetTime = renderStartTime; #else DWORD renderStartTime = OSGetTime(); totalStreamTime = renderStartTime-streamTimeStart; DWORD frameTimeAdjust = frameTime; fpsTimeAdjust += fpsTimeNumerator; if(fpsTimeAdjust > fpsTimeDenominator) { fpsTimeAdjust -= fpsTimeDenominator; ++frameTimeAdjust; } #endif bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled; profileIn("frame"); #ifdef USE_100NS_TIME QWORD qwTime = renderStartTime/10000; latestVideoTime = qwTime; QWORD frameDelta = renderStartTime-lastStreamTime; double fSeconds = double(frameDelta)*0.0000001; //Log(TEXT("frameDelta: %f"), fSeconds); lastStreamTime = renderStartTime; #else QWORD qwTime = GetQPCTimeMS(); latestVideoTime = qwTime; QWORD frameDelta = qwTime-lastStreamTime; float fSeconds = float(frameDelta)*0.001f; //Log(TEXT("frameDelta: %llu"), frameDelta); lastStreamTime = qwTime; #endif bool bUpdateBPS = false; profileIn("frame preprocessing and rendering"); //------------------------------------ if(bRequestKeyframe && keyframeWait > 0) { keyframeWait -= int(frameDelta); if(keyframeWait <= 0) { GetVideoEncoder()->RequestKeyframe(); bRequestKeyframe = false; } } if(!bPushToTalkDown && pushToTalkTimeLeft > 0) { pushToTalkTimeLeft -= int(frameDelta); OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft); if(pushToTalkTimeLeft <= 0) { pushToTalkTimeLeft = 0; bPushToTalkOn = false; } } //------------------------------------ OSEnterMutex(hSceneMutex); if(bResizeRenderView) { GS->ResizeView(); bResizeRenderView = false; } //------------------------------------ if(scene) { profileIn("scene->Preprocess"); scene->Preprocess(); for(UINT i=0; i<globalSources.Num(); i++) globalSources[i].source->Preprocess(); profileOut; scene->Tick(float(fSeconds)); for(UINT i=0; i<globalSources.Num(); i++) globalSources[i].source->Tick(float(fSeconds)); } //------------------------------------ QWORD curBytesSent = network->GetCurrentSentBytes(); curFramesDropped = network->NumDroppedFrames(); bpsTime += fSeconds; if(bpsTime > 1.0f) { if(numSecondsWaited < 3) ++numSecondsWaited; //bytesPerSec = DWORD(curBytesSent - lastBytesSent); bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited; if(bpsTime > 2.0) bpsTime = 0.0f; else bpsTime -= 1.0; if(numSecondsWaited == 3) { lastBytesSent[0] = lastBytesSent[1]; lastBytesSent[1] = lastBytesSent[2]; lastBytesSent[2] = curBytesSent; } else lastBytesSent[numSecondsWaited] = curBytesSent; captureFPS = fpsCounter; fpsCounter = 0; bUpdateBPS = true; } fpsCounter++; curStrain = network->GetPacketStrain(); EnableBlending(TRUE); BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA); //------------------------------------ // render the mini render texture LoadVertexShader(mainVertexShader); LoadPixelShader(mainPixelShader); SetRenderTarget(mainRenderTextures[curRenderTarget]); Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f); SetViewport(0, 0, baseSize.x, baseSize.y); if(scene) scene->Render(); //------------------------------------ if(bTransitioning) { if(!transitionTexture) { transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE); if(transitionTexture) { D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture); D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]); GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture); } else bTransitioning = false; } else if(transitionAlpha >= 1.0f) { delete transitionTexture; transitionTexture = NULL; bTransitioning = false; } } if(bTransitioning) { EnableBlending(TRUE); transitionAlpha += float(fSeconds)*5.0f; if(transitionAlpha > 1.0f) transitionAlpha = 1.0f; } else EnableBlending(FALSE); //------------------------------------ // render the mini view thingy if(bRenderView) { // Cache const Vect2 renderFrameSize = GetRenderFrameSize(); const Vect2 renderFrameOffset = GetRenderFrameOffset(); const Vect2 renderFrameCtrlSize = GetRenderFrameControlSize(); SetRenderTarget(NULL); LoadVertexShader(mainVertexShader); LoadPixelShader(mainPixelShader); Ortho(0.0f, renderFrameCtrlSize.x, renderFrameCtrlSize.y, 0.0f, -100.0f, 100.0f); if(renderFrameCtrlSize.x != oldRenderFrameCtrlWidth || renderFrameCtrlSize.y != oldRenderFrameCtrlHeight) { // User is drag resizing the window. We don't recreate the swap chains so our coordinates are wrong SetViewport(0.0f, 0.0f, (float)oldRenderFrameCtrlWidth, (float)oldRenderFrameCtrlHeight); } else SetViewport(0.0f, 0.0f, renderFrameCtrlSize.x, renderFrameCtrlSize.y); // Draw background (Black if fullscreen, window colour otherwise) if(bFullscreenMode) ClearColorBuffer(0x000000); else ClearColorBuffer(GetSysColor(COLOR_BTNFACE)); if(bTransitioning) { BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO); DrawSprite(transitionTexture, 0xFFFFFFFF, renderFrameOffset.x, renderFrameOffset.y, renderFrameOffset.x + renderFrameSize.x, renderFrameOffset.y + renderFrameSize.y); BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha); } DrawSprite(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, renderFrameOffset.x, renderFrameOffset.y, renderFrameOffset.x + renderFrameSize.x, renderFrameOffset.y + renderFrameSize.y); //draw selections if in edit mode if(bEditMode && !bSizeChanging) { if(scene) { LoadVertexShader(solidVertexShader); LoadPixelShader(solidPixelShader); solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFF0000); scene->RenderSelections(solidPixelShader); } } } else if(bForceRenderViewErase) { InvalidateRect(hwndRenderFrame, NULL, TRUE); UpdateWindow(hwndRenderFrame); bForceRenderViewErase = false; } //------------------------------------ // actual stream output LoadVertexShader(mainVertexShader); LoadPixelShader(yuvScalePixelShader); Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget]; SetRenderTarget(yuvRenderTexture); if(downscale < 2.01) yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize); else if(downscale < 3.01) yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/(outputSize*3.0f)); Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f); SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y); //why am I using scaleSize instead of outputSize for the texture? //because outputSize can be trimmed by up to three pixels due to 128-bit alignment. //using the scale function with outputSize can cause slightly inaccurate scaled images if(bTransitioning) { BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO); DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, 1.0f, 1.0f); BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha); } DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, 1.0f, 1.0f); //------------------------------------ if(bRenderView && !copyWait) static_cast<D3D10System*>(GS)->swap->Present(0, 0); OSLeaveMutex(hSceneMutex); profileOut; //------------------------------------ // present/upload profileIn("video encoding and uploading"); bool bEncode = true; if(copyWait) { copyWait--; bEncode = false; } else { //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing if(!bRecievedFirstAudioFrame) { static bool bWarnedAboutNoAudio = false; if (qwTime-firstFrameTime > 10000 && !bWarnedAboutNoAudio) { bWarnedAboutNoAudio = true; //AddStreamInfo (TEXT ("WARNING: OBS is not receiving audio frames. Please check your audio devices."), StreamInfoPriority_Critical); } bEncode = false; } else if(bFirstFrame) { firstFrameTime = qwTime; bFirstFrame = false; } if(!bEncode) { if(curYUVTexture == (NUM_RENDER_BUFFERS-1)) curYUVTexture = 0; else curYUVTexture++; } } if(bEncode) { curStreamTime = qwTime-firstFrameTime; UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1; ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture]; profileIn("CopyResource"); if(!bFirst420Encode && bUseThreaded420) { WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE); copyTexture->Unmap(0); } D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]); GetD3D()->CopyResource(copyTexture, d3dYUV->texture); profileOut; ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture]; if(bFirstImage) //ignore the first frame bFirstImage = false; else { HRESULT result; D3D10_MAPPED_TEXTURE2D map; if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map))) { int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1; int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1; EncoderPicture &prevPicOut = outPics[prevOutBuffer]; EncoderPicture &picOut = outPics[curOutBuffer]; EncoderPicture &nextPicOut = outPics[nextOutBuffer]; if(!bUsing444) { profileIn("conversion to 4:2:0"); if(bUseThreaded420) { outTimes[nextOutBuffer] = (DWORD)curStreamTime; bool firstRun = threadedProfilers.Num() == 0; if(firstRun) threadedProfilers.SetSize(numThreads); for(int i=0; i<numThreads; i++) { convertInfo[i].input = (LPBYTE)map.pData; convertInfo[i].inPitch = map.RowPitch; if(bUsingQSV) { mfxFrameData& data = nextPicOut.mfxOut->Data; videoEncoder->RequestBuffers(&data); convertInfo[i].outPitch = data.Pitch; convertInfo[i].output[0] = data.Y; convertInfo[i].output[1] = data.UV; } else { convertInfo[i].output[0] = nextPicOut.picOut->img.plane[0]; convertInfo[i].output[1] = nextPicOut.picOut->img.plane[1]; convertInfo[i].output[2] = nextPicOut.picOut->img.plane[2]; } if(!firstRun) threadedProfilers[i].~ProfilerNode(); ::new (&threadedProfilers[i]) ProfilerNode(TEXT("Convert444Threads"), true); threadedProfilers[i].MonitorThread(h420Threads[i]); bUsingThreadedProfilers = true; SetEvent(convertInfo[i].hSignalConvert); } if(bFirst420Encode) bFirst420Encode = bEncode = false; } else { outTimes[curOutBuffer] = (DWORD)curStreamTime; if(bUsingQSV) { mfxFrameData& data = picOut.mfxOut->Data; videoEncoder->RequestBuffers(&data); LPBYTE output[] = {data.Y, data.UV}; Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, data.Pitch, outputCY, 0, outputCY, output); } else Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, outputCX, outputCY, 0, outputCY, picOut.picOut->img.plane); prevTexture->Unmap(0); } profileOut; } else { outTimes[curOutBuffer] = (DWORD)curStreamTime; picOut.picOut->img.i_stride[0] = map.RowPitch; picOut.picOut->img.plane[0] = (uint8_t*)map.pData; } if(bEncode) { DWORD curFrameTimestamp = outTimes[prevOutBuffer]; //Log(TEXT("curFrameTimestamp: %u"), curFrameTimestamp); //------------------------------------ FrameProcessInfo frameInfo; frameInfo.firstFrameTime = firstFrameTime; frameInfo.prevTexture = prevTexture; if(bDupeFrames) { while(cfrTime < curFrameTimestamp) { DWORD frameTimeAdjust = frameTime; cfrTimeAdjust += fpsTimeNumerator; if(cfrTimeAdjust > fpsTimeDenominator) { cfrTimeAdjust -= fpsTimeDenominator; ++frameTimeAdjust; } DWORD halfTime = (frameTimeAdjust+1)/2; EncoderPicture &nextPic = (curFrameTimestamp-cfrTime <= halfTime) ? picOut : prevPicOut; //Log(TEXT("cfrTime: %u, time: %u"), cfrTime, curFrameTimestamp); //these lines are just for counting duped frames if(nextPic == lastPic) ++numTotalDuplicatedFrames; else lastPic = nextPic; frameInfo.pic = &nextPic; if(bUsingQSV) frameInfo.pic->mfxOut->Data.TimeStamp = cfrTime; else frameInfo.pic->picOut->i_pts = cfrTime; frameInfo.frameTimestamp = cfrTime; ProcessFrame(frameInfo); cfrTime += frameTimeAdjust; //Log(TEXT("cfrTime: %u, chi frame: %u"), cfrTime, (curFrameTimestamp-cfrTime <= halfTime)); } } else { if(bUsingQSV) picOut.mfxOut->Data.TimeStamp = curFrameTimestamp; else picOut.picOut->i_pts = curFrameTimestamp; frameInfo.pic = &picOut; frameInfo.frameTimestamp = curFrameTimestamp; ProcessFrame(frameInfo); } if (!bRunning) bufferedFrames = videoEncoder->GetBufferedFrames (); } if(bUsing444) { prevTexture->Unmap(0); } curOutBuffer = nextOutBuffer; } else { //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled if (result == DXGI_ERROR_DEVICE_REMOVED) { String message; HRESULT reason = GetD3D()->GetDeviceRemovedReason(); switch (reason) { case DXGI_ERROR_DEVICE_RESET: case DXGI_ERROR_DEVICE_HUNG: message = TEXT("Your video card or driver froze and was reset. Please check for possible hardware / driver issues."); break; case DXGI_ERROR_DEVICE_REMOVED: message = TEXT("Your video card disappeared from the system. Please check for possible hardware / driver issues."); break; case DXGI_ERROR_DRIVER_INTERNAL_ERROR: message = TEXT("Your video driver reported an internal error. Please check for possible hardware / driver issues."); break; case DXGI_ERROR_INVALID_CALL: message = TEXT("Your video driver reported an invalid call. Please check for possible driver issues."); break; default: message = TEXT("DXGI_ERROR_DEVICE_REMOVED"); break; } CrashError (TEXT("Texture->Map failed: 0x%08x 0x%08x\r\n\r\n%s"), result, reason, message.Array()); } else CrashError (TEXT("Texture->Map failed: 0x%08x"), result); } } if(curCopyTexture == (NUM_RENDER_BUFFERS-1)) curCopyTexture = 0; else curCopyTexture++; if(curYUVTexture == (NUM_RENDER_BUFFERS-1)) curYUVTexture = 0; else curYUVTexture++; if (bCongestionControl && bDynamicBitrateSupported && !bTestStream) { if (curStrain > 25) { if (qwTime - lastAdjustmentTime > 1500) { if (currentBitRate > 100) { currentBitRate = (int)(currentBitRate * (1.0 - (curStrain / 400))); App->GetVideoEncoder()->SetBitRate(currentBitRate, -1); if (!adjustmentStreamId) adjustmentStreamId = App->AddStreamInfo (FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low); else App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array()); bUpdateBPS = true; } lastAdjustmentTime = qwTime; } } else if (currentBitRate < defaultBitRate && curStrain < 5 && lastStrain < 5) { if (qwTime - lastAdjustmentTime > 5000) { if (currentBitRate < defaultBitRate) { currentBitRate += (int)(defaultBitRate * 0.05); if (currentBitRate > defaultBitRate) currentBitRate = defaultBitRate; } App->GetVideoEncoder()->SetBitRate(currentBitRate, -1); /*if (!adjustmentStreamId) App->AddStreamInfo (FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low); else App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array());*/ bUpdateBPS = true; lastAdjustmentTime = qwTime; } } } } lastRenderTarget = curRenderTarget; if(curRenderTarget == (NUM_RENDER_BUFFERS-1)) curRenderTarget = 0; else curRenderTarget++; if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped) { PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0); lastStrain = curStrain; lastFramesDropped = curFramesDropped; } //------------------------------------ // we're about to sleep so we should flush the d3d command queue profileIn("flush"); GetD3D()->Flush(); profileOut; profileOut; //video encoding and uploading profileOut; //frame //------------------------------------ // frame sync #ifdef USE_100NS_TIME QWORD renderStopTime = GetQPCTime100NS(); sleepTargetTime += frameTime100ns; if(bWasLaggedFrame = (sleepTargetTime <= renderStopTime)) { numLongFrames++; if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage) DumpLastProfileData(); } else SleepTo(sleepTargetTime); #else DWORD renderStopTime = OSGetTime(); DWORD totalTime = renderStopTime-renderStartTime; if(totalTime > frameTimeAdjust) { numLongFrames++; if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage) DumpLastProfileData(); } else if(totalTime < frameTimeAdjust) OSSleep(frameTimeAdjust-totalTime); #endif //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime); numTotalFrames++; } if(!bUsing444) { if(bUseThreaded420) { for(int i=0; i<numThreads; i++) { if(h420Threads[i]) { convertInfo[i].bKillThread = true; SetEvent(convertInfo[i].hSignalConvert); if(bUsingThreadedProfilers) threadedProfilers[i].~ProfilerNode(); OSTerminateThread(h420Threads[i], 10000); h420Threads[i] = NULL; } if(convertInfo[i].hSignalConvert) { CloseHandle(convertInfo[i].hSignalConvert); convertInfo[i].hSignalConvert = NULL; } if(convertInfo[i].hSignalComplete) { CloseHandle(convertInfo[i].hSignalComplete); convertInfo[i].hSignalComplete = NULL; } } if(!bFirst420Encode) { ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture]; copyTexture->Unmap(0); } } if(bUsingQSV) for(int i = 0; i < NUM_OUT_BUFFERS; i++) delete outPics[i].mfxOut; else for(int i=0; i<NUM_OUT_BUFFERS; i++) { x264_picture_clean(outPics[i].picOut); delete outPics[i].picOut; } } Free(h420Threads); Free(convertInfo); Log(TEXT("Total frames rendered: %d, number of frames that lagged: %d (%0.2f%%) (it's okay for some frames to lag)"), numTotalFrames, numLongFrames, (double(numLongFrames)/double(numTotalFrames))*100.0); if(bDupeFrames) Log(TEXT("Total duplicated frames: %d (%0.2f%%)"), numTotalDuplicatedFrames, (double(numTotalDuplicatedFrames)/double(numTotalFrames))*100.0); }
void OBS::MainAudioLoop() { DWORD taskID = 0; HANDLE hTask = AvSetMmThreadCharacteristics(TEXT("Pro Audio"), &taskID); bPushToTalkOn = false; micMax = desktopMax = VOL_MIN; micPeak = desktopPeak = VOL_MIN; UINT audioFramesSinceMeterUpdate = 0; UINT audioFramesSinceMicMaxUpdate = 0; UINT audioFramesSinceDesktopMaxUpdate = 0; List<float> mixedLatestDesktopSamples; List<float> blank10msSample; blank10msSample.SetSize(882); QWORD lastAudioTime = 0; while(TRUE) { OSSleep(5); //screw it, just run it every 5ms if(!bRunning) break; //----------------------------------------------- float *desktopBuffer, *micBuffer; UINT desktopAudioFrames = 0, micAudioFrames = 0; UINT latestDesktopAudioFrames = 0, latestMicAudioFrames = 0; curDesktopVol = desktopVol * desktopBoost; if(bUsingPushToTalk) curMicVol = bPushToTalkOn ? micVol : 0.0f; else curMicVol = micVol; curMicVol *= micBoost; bool bDesktopMuted = (curDesktopVol < EPSILON); bool bMicEnabled = (micAudio != NULL); QWORD timestamp; while(QueryNewAudio(timestamp)) { if (!lastAudioTime) lastAudioTime = App->GetSceneTimestamp(); if (lastAudioTime < timestamp) { while ((lastAudioTime+=10) < timestamp) EncodeAudioSegment(blank10msSample.Array(), 441, lastAudioTime); } //---------------------------------------------------------------------------- // get latest sample for calculating the volume levels float *latestDesktopBuffer = NULL, *latestMicBuffer = NULL; desktopAudio->GetBuffer(&desktopBuffer, &desktopAudioFrames, timestamp-10); desktopAudio->GetNewestFrame(&latestDesktopBuffer, &latestDesktopAudioFrames); UINT totalFloats = desktopAudioFrames*2; if(bDesktopMuted) { // Clearing the desktop audio buffer before mixing in the auxiliary audio sources. zero(desktopBuffer, sizeof(*desktopBuffer)*totalFloats); } if(micAudio != NULL) { micAudio->GetBuffer(&micBuffer, &micAudioFrames, timestamp-10); micAudio->GetNewestFrame(&latestMicBuffer, &latestMicAudioFrames); } //---------------------------------------------------------------------------- // get latest aux volume level samples and mix OSEnterMutex(hAuxAudioMutex); mixedLatestDesktopSamples.CopyArray(latestDesktopBuffer, latestDesktopAudioFrames*2); for(UINT i=0; i<auxAudioSources.Num(); i++) { float *latestAuxBuffer; if(auxAudioSources[i]->GetNewestFrame(&latestAuxBuffer, &latestDesktopAudioFrames)) MixAudio(mixedLatestDesktopSamples.Array(), latestAuxBuffer, latestDesktopAudioFrames*2, false); } //---------------------------------------------------------------------------- // mix output aux sound samples with the desktop for(UINT i=0; i<auxAudioSources.Num(); i++) { float *auxBuffer; if(auxAudioSources[i]->GetBuffer(&auxBuffer, &desktopAudioFrames, timestamp-10)) MixAudio(desktopBuffer, auxBuffer, desktopAudioFrames*2, false); } OSLeaveMutex(hAuxAudioMutex); //---------------------------------------------------------------------------- //UINT totalFloats = desktopAudioFrames*2; //---------------------------------------------------------------------------- /*multiply samples by volume and compute RMS and max of samples*/ float desktopRMS = 0, micRMS = 0, desktopMx = 0, micMx = 0; // Use 1.0f instead of curDesktopVol, since aux audio sources already have their volume set, and shouldn't be boosted anyway. if(latestDesktopBuffer) CalculateVolumeLevels(mixedLatestDesktopSamples.Array(), latestDesktopAudioFrames*2, 1.0f, desktopRMS, desktopMx); if(bMicEnabled && latestMicBuffer) CalculateVolumeLevels(latestMicBuffer, latestMicAudioFrames*2, curMicVol, micRMS, micMx); /*convert RMS and Max of samples to dB*/ desktopRMS = toDB(desktopRMS); micRMS = toDB(micRMS); desktopMx = toDB(desktopMx); micMx = toDB(micMx); /* update max if sample max is greater or after 1 second */ float maxAlpha = 0.15f; UINT peakMeterDelayFrames = 44100 * 3; if(micMx > micMax) { micMax = micMx; } else { micMax = maxAlpha * micMx + (1.0f - maxAlpha) * micMax; } if(desktopMx > desktopMax) { desktopMax = desktopMx; } else { desktopMax = maxAlpha * desktopMx + (1.0f - maxAlpha) * desktopMax; } /*update delayed peak meter*/ if(micMax > micPeak || audioFramesSinceMicMaxUpdate > peakMeterDelayFrames) { micPeak = micMax; audioFramesSinceMicMaxUpdate = 0; } else { audioFramesSinceMicMaxUpdate += desktopAudioFrames; } if(desktopMax > desktopPeak || audioFramesSinceDesktopMaxUpdate > peakMeterDelayFrames) { desktopPeak = desktopMax; audioFramesSinceDesktopMaxUpdate = 0; } else { audioFramesSinceDesktopMaxUpdate += desktopAudioFrames; } /*low pass the level sampling*/ float rmsAlpha = 0.15f; desktopMag = rmsAlpha * desktopRMS + desktopMag * (1.0f - rmsAlpha); micMag = rmsAlpha * micRMS + micMag * (1.0f - rmsAlpha); /*update the meter about every 50ms*/ audioFramesSinceMeterUpdate += desktopAudioFrames; if(audioFramesSinceMeterUpdate >= 2205) { PostMessage(hwndMain, WM_COMMAND, MAKEWPARAM(ID_MICVOLUMEMETER, VOLN_METERED), 0); audioFramesSinceMeterUpdate = 0; } //---------------------------------------------------------------------------- // mix mic and desktop sound, using SSE2 if available // also, it's perfectly fine to just mix into the returned buffer if(bMicEnabled) MixAudio(desktopBuffer, micBuffer, totalFloats, bForceMicMono); EncodeAudioSegment(desktopBuffer, totalFloats>>1, lastAudioTime); } //----------------------------------------------- if(!bRecievedFirstAudioFrame && pendingAudioFrames.Num()) bRecievedFirstAudioFrame = true; } desktopMag = desktopMax = desktopPeak = VOL_MIN; micMag = micMax = micPeak = VOL_MIN; PostMessage(hwndMain, WM_COMMAND, MAKEWPARAM(ID_MICVOLUMEMETER, VOLN_METERED), 0); for(UINT i=0; i<pendingAudioFrames.Num(); i++) pendingAudioFrames[i].audioData.Clear(); AvRevertMmThreadCharacteristics(hTask); }
void OBS::EncodeLoop() { QWORD streamTimeStart = GetQPCTimeNS(); QWORD frameTimeNS = 1000000000/fps; bool bufferedFrames = true; //to avoid constantly polling number of frames int numTotalDuplicatedFrames = 0, numTotalFrames = 0; bufferedTimes.Clear(); bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0; QWORD sleepTargetTime = streamTimeStart+frameTimeNS; latestVideoTime = firstSceneTimestamp = streamTimeStart/1000000; latestVideoTimeNS = streamTimeStart; firstFrameTimestamp = 0; EncoderPicture *lastPic = NULL; CircularList<QWORD> bufferedTimes; while(!bShutdownEncodeThread || (bufferedFrames && !bTestStream)) { SetEvent(hVideoEvent); SleepToNS(sleepTargetTime); latestVideoTime = sleepTargetTime/1000000; latestVideoTimeNS = sleepTargetTime; bufferedTimes << latestVideoTime; if (curFramePic && firstFrameTimestamp) { while (bufferedTimes[0] < firstFrameTimestamp) bufferedTimes.Remove(0); DWORD curFrameTimestamp = DWORD(bufferedTimes[0] - firstFrameTimestamp); bufferedTimes.Remove(0); profileIn("encoder thread frame"); FrameProcessInfo frameInfo; frameInfo.firstFrameTime = firstFrameTimestamp; frameInfo.frameTimestamp = curFrameTimestamp; frameInfo.pic = curFramePic; if (lastPic == frameInfo.pic) numTotalDuplicatedFrames++; if(bUsingQSV) curFramePic->mfxOut->Data.TimeStamp = curFrameTimestamp; else curFramePic->picOut->i_pts = curFrameTimestamp; ProcessFrame(frameInfo); if (bShutdownEncodeThread) bufferedFrames = videoEncoder->HasBufferedFrames(); lastPic = frameInfo.pic; profileOut; numTotalFrames++; } sleepTargetTime += frameTimeNS; } //flush all video frames in the "scene buffering time" buffer if (firstFrameTimestamp && bufferedVideo.Num()) { QWORD startTime = GetQPCTimeMS(); DWORD baseTimestamp = bufferedVideo[0].timestamp; for(UINT i=0; i<bufferedVideo.Num(); i++) { //we measure our own time rather than sleep between frames due to potential sleep drift QWORD curTime; do { curTime = GetQPCTimeMS(); OSSleep (1); } while (curTime - startTime < bufferedVideo[i].timestamp - baseTimestamp); SendFrame(bufferedVideo[i], firstFrameTimestamp); bufferedVideo[i].Clear(); numTotalFrames++; } bufferedVideo.Clear(); } Log(TEXT("Total frames encoded: %d, total frames duplicated %d (%0.2f%%)"), numTotalFrames, numTotalDuplicatedFrames, (double(numTotalDuplicatedFrames)/double(numTotalFrames))*100.0); SetEvent(hVideoEvent); bShutdownVideoThread = true; }
void OBS::MainAudioLoop() { DWORD taskID = 0; HANDLE hTask = AvSetMmThreadCharacteristics(TEXT("Pro Audio"), &taskID); bufferedAudioTimes.Clear(); bPushToTalkOn = false; micMax = desktopMax = VOL_MIN; micPeak = desktopPeak = VOL_MIN; UINT audioFramesSinceMeterUpdate = 0; UINT audioFramesSinceMicMaxUpdate = 0; UINT audioFramesSinceDesktopMaxUpdate = 0; List<float> mixBuffer, levelsBuffer; mixBuffer.SetSize(audioSampleSize*2); levelsBuffer.SetSize(audioSampleSize*2); latestAudioTime = 0; //--------------------------------------------- // the audio loop of doom while (true) { OSSleep(5); //screw it, just run it every 5ms if (!bRunning) break; //----------------------------------------------- float *desktopBuffer, *micBuffer; curDesktopVol = desktopVol * desktopBoost; if (bUsingPushToTalk) curMicVol = bPushToTalkOn ? micVol : 0.0f; else curMicVol = micVol; curMicVol *= micBoost; bool bDesktopMuted = (curDesktopVol < EPSILON); bool bMicEnabled = (micAudio != NULL); while (QueryNewAudio()) { QWORD timestamp = bufferedAudioTimes[0]; bufferedAudioTimes.Remove(0); zero(mixBuffer.Array(), audioSampleSize*2*sizeof(float)); zero(levelsBuffer.Array(), audioSampleSize*2*sizeof(float)); //---------------------------------------------------------------------------- // get latest sample for calculating the volume levels float *latestDesktopBuffer = NULL, *latestMicBuffer = NULL; desktopAudio->GetBuffer(&desktopBuffer, timestamp); desktopAudio->GetNewestFrame(&latestDesktopBuffer); if (micAudio != NULL) { micAudio->GetBuffer(&micBuffer, timestamp); micAudio->GetNewestFrame(&latestMicBuffer); } //---------------------------------------------------------------------------- // mix desktop samples if (desktopBuffer) MixAudio(mixBuffer.Array(), desktopBuffer, audioSampleSize*2, false); if (latestDesktopBuffer) MixAudio(levelsBuffer.Array(), latestDesktopBuffer, audioSampleSize*2, false); //---------------------------------------------------------------------------- // get latest aux volume level samples and mix OSEnterMutex(hAuxAudioMutex); for (UINT i=0; i<auxAudioSources.Num(); i++) { float *latestAuxBuffer; if(auxAudioSources[i]->GetNewestFrame(&latestAuxBuffer)) MixAudio(levelsBuffer.Array(), latestAuxBuffer, audioSampleSize*2, false); } //---------------------------------------------------------------------------- // mix output aux sound samples with the desktop for (UINT i=0; i<auxAudioSources.Num(); i++) { float *auxBuffer; if(auxAudioSources[i]->GetBuffer(&auxBuffer, timestamp)) MixAudio(mixBuffer.Array(), auxBuffer, audioSampleSize*2, false); } OSLeaveMutex(hAuxAudioMutex); //---------------------------------------------------------------------------- // multiply samples by volume and compute RMS and max of samples // Use 1.0f instead of curDesktopVol, since aux audio sources already have their volume set, and shouldn't be boosted anyway. float desktopRMS = 0, micRMS = 0, desktopMx = 0, micMx = 0; if (latestDesktopBuffer) CalculateVolumeLevels(levelsBuffer.Array(), audioSampleSize*2, 1.0f, desktopRMS, desktopMx); if (bMicEnabled && latestMicBuffer) CalculateVolumeLevels(latestMicBuffer, audioSampleSize*2, curMicVol, micRMS, micMx); //---------------------------------------------------------------------------- // convert RMS and Max of samples to dB desktopRMS = toDB(desktopRMS); micRMS = toDB(micRMS); desktopMx = toDB(desktopMx); micMx = toDB(micMx); //---------------------------------------------------------------------------- // update max if sample max is greater or after 1 second float maxAlpha = 0.15f; UINT peakMeterDelayFrames = audioSamplesPerSec * 3; if (micMx > micMax) micMax = micMx; else micMax = maxAlpha * micMx + (1.0f - maxAlpha) * micMax; if(desktopMx > desktopMax) desktopMax = desktopMx; else desktopMax = maxAlpha * desktopMx + (1.0f - maxAlpha) * desktopMax; //---------------------------------------------------------------------------- // update delayed peak meter if (micMax > micPeak || audioFramesSinceMicMaxUpdate > peakMeterDelayFrames) { micPeak = micMax; audioFramesSinceMicMaxUpdate = 0; } else { audioFramesSinceMicMaxUpdate += audioSampleSize; } if (desktopMax > desktopPeak || audioFramesSinceDesktopMaxUpdate > peakMeterDelayFrames) { desktopPeak = desktopMax; audioFramesSinceDesktopMaxUpdate = 0; } else { audioFramesSinceDesktopMaxUpdate += audioSampleSize; } //---------------------------------------------------------------------------- // low pass the level sampling float rmsAlpha = 0.15f; desktopMag = rmsAlpha * desktopRMS + desktopMag * (1.0f - rmsAlpha); micMag = rmsAlpha * micRMS + micMag * (1.0f - rmsAlpha); //---------------------------------------------------------------------------- // update the meter about every 50ms audioFramesSinceMeterUpdate += audioSampleSize; if (audioFramesSinceMeterUpdate >= (audioSampleSize*5)) { PostMessage(hwndMain, WM_COMMAND, MAKEWPARAM(ID_MICVOLUMEMETER, VOLN_METERED), 0); audioFramesSinceMeterUpdate = 0; } //---------------------------------------------------------------------------- // mix mic and desktop sound // also, it's perfectly fine to just mix into the returned buffer if (bMicEnabled && micBuffer) MixAudio(mixBuffer.Array(), micBuffer, audioSampleSize*2, bForceMicMono); EncodeAudioSegment(mixBuffer.Array(), audioSampleSize, timestamp); } //----------------------------------------------- if (!bRecievedFirstAudioFrame && pendingAudioFrames.Num()) bRecievedFirstAudioFrame = true; } desktopMag = desktopMax = desktopPeak = VOL_MIN; micMag = micMax = micPeak = VOL_MIN; PostMessage(hwndMain, WM_COMMAND, MAKEWPARAM(ID_MICVOLUMEMETER, VOLN_METERED), 0); for (UINT i=0; i<pendingAudioFrames.Num(); i++) pendingAudioFrames[i].audioData.Clear(); AvRevertMmThreadCharacteristics(hTask); }
//todo: this function is an abomination, this is just disgusting. fix it. //...seriously, this is really, really horrible. I mean this is amazingly bad. void OBS::MainCaptureLoop() { int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0; int copyWait = NUM_RENDER_BUFFERS-1; bSentHeaders = false; bFirstAudioPacket = true; Vect2 baseSize = Vect2(float(baseCX), float(baseCY)); Vect2 outputSize = Vect2(float(outputCX), float(outputCY)); Vect2 scaleSize = Vect2(float(scaleCX), float(scaleCY)); HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI")); LPVOID nullBuff = NULL; //---------------------------------------- // x264 input buffers int curOutBuffer = 0; x264_picture_t *lastPic = NULL; x264_picture_t outPics[NUM_OUT_BUFFERS]; DWORD outTimes[NUM_OUT_BUFFERS] = {0, 0, 0}; for(int i=0; i<NUM_OUT_BUFFERS; i++) x264_picture_init(&outPics[i]); if(bUsing444) { for(int i=0; i<NUM_OUT_BUFFERS; i++) { outPics[i].img.i_csp = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV outPics[i].img.i_plane = 1; } } else { for(int i=0; i<NUM_OUT_BUFFERS; i++) x264_picture_alloc(&outPics[i], X264_CSP_I420, outputCX, outputCY); } //---------------------------------------- // time/timestamp stuff LARGE_INTEGER clockFreq; QueryPerformanceFrequency(&clockFreq); bufferedTimes.Clear(); ctsOffsets.Clear(); #ifdef USE_100NS_TIME QWORD streamTimeStart = GetQPCTime100NS(clockFreq.QuadPart); QWORD frameTime100ns = 10000000/fps; QWORD sleepTargetTime = 0; bool bWasLaggedFrame = false; #else DWORD streamTimeStart = OSGetTime(); #endif totalStreamTime = 0; lastAudioTimestamp = 0; latestVideoTime = firstSceneTimestamp = GetQPCTimeMS(clockFreq.QuadPart); DWORD fpsTimeNumerator = 1000-(frameTime*fps); DWORD fpsTimeDenominator = fps; DWORD fpsTimeAdjust = 0; DWORD cfrTime = 0; DWORD cfrTimeAdjust = 0; //---------------------------------------- // start audio capture streams desktopAudio->StartCapture(); if(micAudio) micAudio->StartCapture(); //---------------------------------------- // status bar/statistics stuff DWORD fpsCounter = 0; int numLongFrames = 0; int numTotalFrames = 0; int numTotalDuplicatedFrames = 0; bytesPerSec = 0; captureFPS = 0; curFramesDropped = 0; curStrain = 0.0; PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0); QWORD lastBytesSent[3] = {0, 0, 0}; DWORD lastFramesDropped = 0; #ifdef USE_100NS_TIME double bpsTime = 0.0; #else float bpsTime = 0.0f; #endif double lastStrain = 0.0f; DWORD numSecondsWaited = 0; //---------------------------------------- // 444->420 thread data int numThreads = MAX(OSGetTotalCores()-2, 1); HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads); Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads); zero(h420Threads, sizeof(HANDLE)*numThreads); zero(convertInfo, sizeof(Convert444Data)*numThreads); for(int i=0; i<numThreads; i++) { convertInfo[i].width = outputCX; convertInfo[i].height = outputCY; convertInfo[i].hSignalConvert = CreateEvent(NULL, FALSE, FALSE, NULL); convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL); if(i == 0) convertInfo[i].startY = 0; else convertInfo[i].startY = convertInfo[i-1].endY; if(i == (numThreads-1)) convertInfo[i].endY = outputCY; else convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE; } bool bFirstFrame = true; bool bFirstImage = true; bool bFirst420Encode = true; bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444; List<HANDLE> completeEvents; if(bUseThreaded420) { for(int i=0; i<numThreads; i++) { h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i); completeEvents << convertInfo[i].hSignalComplete; } } //---------------------------------------- QWORD curStreamTime = 0, lastStreamTime, firstFrameTime = GetQPCTimeMS(clockFreq.QuadPart); lastStreamTime = firstFrameTime-frameTime; bool bFirstAudioPacket = true; while(bRunning) { #ifdef USE_100NS_TIME QWORD renderStartTime = GetQPCTime100NS(clockFreq.QuadPart); if(sleepTargetTime == 0 || bWasLaggedFrame) sleepTargetTime = renderStartTime; #else DWORD renderStartTime = OSGetTime(); totalStreamTime = renderStartTime-streamTimeStart; DWORD frameTimeAdjust = frameTime; fpsTimeAdjust += fpsTimeNumerator; if(fpsTimeAdjust > fpsTimeDenominator) { fpsTimeAdjust -= fpsTimeDenominator; ++frameTimeAdjust; } #endif bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled; profileIn("frame"); #ifdef USE_100NS_TIME QWORD qwTime = renderStartTime/10000; latestVideoTime = qwTime; QWORD frameDelta = renderStartTime-lastStreamTime; double fSeconds = double(frameDelta)*0.0000001; //Log(TEXT("frameDelta: %f"), fSeconds); lastStreamTime = renderStartTime; #else QWORD qwTime = GetQPCTimeMS(clockFreq.QuadPart); latestVideoTime = qwTime; QWORD frameDelta = qwTime-lastStreamTime; float fSeconds = float(frameDelta)*0.001f; //Log(TEXT("frameDelta: %llu"), frameDelta); lastStreamTime = qwTime; #endif //------------------------------------ if(bRequestKeyframe && keyframeWait > 0) { keyframeWait -= int(frameDelta); if(keyframeWait <= 0) { GetVideoEncoder()->RequestKeyframe(); bRequestKeyframe = false; } } if(!bPushToTalkDown && pushToTalkTimeLeft > 0) { pushToTalkTimeLeft -= int(frameDelta); OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft); if(pushToTalkTimeLeft <= 0) { pushToTalkTimeLeft = 0; bPushToTalkOn = false; } } //------------------------------------ OSEnterMutex(hSceneMutex); if(bResizeRenderView) { GS->ResizeView(); bResizeRenderView = false; } //------------------------------------ if(scene) { profileIn("scene->Preprocess"); scene->Preprocess(); for(UINT i=0; i<globalSources.Num(); i++) globalSources[i].source->Preprocess(); profileOut; scene->Tick(float(fSeconds)); for(UINT i=0; i<globalSources.Num(); i++) globalSources[i].source->Tick(float(fSeconds)); } //------------------------------------ QWORD curBytesSent = network->GetCurrentSentBytes(); curFramesDropped = network->NumDroppedFrames(); bool bUpdateBPS = false; bpsTime += fSeconds; if(bpsTime > 1.0f) { if(numSecondsWaited < 3) ++numSecondsWaited; //bytesPerSec = DWORD(curBytesSent - lastBytesSent); bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited; if(bpsTime > 2.0) bpsTime = 0.0f; else bpsTime -= 1.0; if(numSecondsWaited == 3) { lastBytesSent[0] = lastBytesSent[1]; lastBytesSent[1] = lastBytesSent[2]; lastBytesSent[2] = curBytesSent; } else lastBytesSent[numSecondsWaited] = curBytesSent; captureFPS = fpsCounter; fpsCounter = 0; bUpdateBPS = true; } fpsCounter++; curStrain = network->GetPacketStrain(); EnableBlending(TRUE); BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA); //------------------------------------ // render the mini render texture LoadVertexShader(mainVertexShader); LoadPixelShader(mainPixelShader); SetRenderTarget(mainRenderTextures[curRenderTarget]); Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f); SetViewport(0, 0, baseSize.x, baseSize.y); if(scene) scene->Render(); //------------------------------------ if(bTransitioning) { if(!transitionTexture) { transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE); if(transitionTexture) { D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture); D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]); GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture); } else bTransitioning = false; } else if(transitionAlpha >= 1.0f) { delete transitionTexture; transitionTexture = NULL; bTransitioning = false; } } if(bTransitioning) { EnableBlending(TRUE); transitionAlpha += float(fSeconds)*5.0f; if(transitionAlpha > 1.0f) transitionAlpha = 1.0f; } else EnableBlending(FALSE); //------------------------------------ // render the mini view thingy if(bRenderView) { Vect2 renderFrameSize = Vect2(float(renderFrameWidth), float(renderFrameHeight)); SetRenderTarget(NULL); LoadVertexShader(mainVertexShader); LoadPixelShader(mainPixelShader); Ortho(0.0f, renderFrameSize.x, renderFrameSize.y, 0.0f, -100.0f, 100.0f); SetViewport(0.0f, 0.0f, renderFrameSize.x, renderFrameSize.y); if(bTransitioning) { BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO); if(renderFrameIn1To1Mode) DrawSprite(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y); else DrawSprite(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, renderFrameSize.x, renderFrameSize.y); BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha); } if(renderFrameIn1To1Mode) DrawSprite(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y); else DrawSprite(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, renderFrameSize.x, renderFrameSize.y); Ortho(0.0f, renderFrameSize.x, renderFrameSize.y, 0.0f, -100.0f, 100.0f); //draw selections if in edit mode if(bEditMode && !bSizeChanging) { LoadVertexShader(solidVertexShader); LoadPixelShader(solidPixelShader); solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFFFF0000); if(renderFrameIn1To1Mode) Ortho(0.0f, renderFrameSize.x * downscale, renderFrameSize.y * downscale, 0.0f, -100.0f, 100.0f); else Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f); if(scene) scene->RenderSelections(); } } else if(bForceRenderViewErase) { InvalidateRect(hwndRenderFrame, NULL, TRUE); UpdateWindow(hwndRenderFrame); bForceRenderViewErase = false; } //------------------------------------ // actual stream output LoadVertexShader(mainVertexShader); LoadPixelShader(yuvScalePixelShader); Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget]; SetRenderTarget(yuvRenderTexture); yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize); Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f); SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y); //why am I using scaleSize instead of outputSize for the texture? //because outputSize can be trimmed by up to three pixels due to 128-bit alignment. //using the scale function with outputSize can cause slightly inaccurate scaled images if(bTransitioning) { BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO); DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, scaleSize.x, scaleSize.y); BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha); } DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, outputSize.x, outputSize.y); //------------------------------------ if(bRenderView && !copyWait) static_cast<D3D10System*>(GS)->swap->Present(0, 0); OSLeaveMutex(hSceneMutex); //------------------------------------ // present/upload profileIn("video encoding and uploading"); bool bEncode = true; if(copyWait) { copyWait--; bEncode = false; } else { //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing if(!bRecievedFirstAudioFrame) bEncode = false; else if(bFirstFrame) { firstFrameTime = qwTime; bFirstFrame = false; } if(!bEncode) { if(curYUVTexture == (NUM_RENDER_BUFFERS-1)) curYUVTexture = 0; else curYUVTexture++; } } if(bEncode) { curStreamTime = qwTime-firstFrameTime; UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1; ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture]; profileIn("CopyResource"); if(!bFirst420Encode && bUseThreaded420) { WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE); copyTexture->Unmap(0); } D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]); GetD3D()->CopyResource(copyTexture, d3dYUV->texture); profileOut; ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture]; if(bFirstImage) //ignore the first frame bFirstImage = false; else { HRESULT result; D3D10_MAPPED_TEXTURE2D map; if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map))) { int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1; int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1; x264_picture_t &prevPicOut = outPics[prevOutBuffer]; x264_picture_t &picOut = outPics[curOutBuffer]; x264_picture_t &nextPicOut = outPics[nextOutBuffer]; if(!bUsing444) { profileIn("conversion to 4:2:0"); if(bUseThreaded420) { outTimes[nextOutBuffer] = (DWORD)curStreamTime; for(int i=0; i<numThreads; i++) { convertInfo[i].input = (LPBYTE)map.pData; convertInfo[i].pitch = map.RowPitch; convertInfo[i].output[0] = nextPicOut.img.plane[0]; convertInfo[i].output[1] = nextPicOut.img.plane[1]; convertInfo[i].output[2] = nextPicOut.img.plane[2]; SetEvent(convertInfo[i].hSignalConvert); } if(bFirst420Encode) bFirst420Encode = bEncode = false; } else { outTimes[curOutBuffer] = (DWORD)curStreamTime; Convert444to420((LPBYTE)map.pData, outputCX, map.RowPitch, outputCY, 0, outputCY, picOut.img.plane, SSE2Available()); prevTexture->Unmap(0); } profileOut; } else { outTimes[curOutBuffer] = (DWORD)curStreamTime; picOut.img.i_stride[0] = map.RowPitch; picOut.img.plane[0] = (uint8_t*)map.pData; } if(bEncode) { DWORD curFrameTimestamp = outTimes[prevOutBuffer]; //Log(TEXT("curFrameTimestamp: %u"), curFrameTimestamp); //------------------------------------ FrameProcessInfo frameInfo; frameInfo.firstFrameTime = firstFrameTime; frameInfo.prevTexture = prevTexture; if(bUseCFR) { while(cfrTime < curFrameTimestamp) { DWORD frameTimeAdjust = frameTime; cfrTimeAdjust += fpsTimeNumerator; if(cfrTimeAdjust > fpsTimeDenominator) { cfrTimeAdjust -= fpsTimeDenominator; ++frameTimeAdjust; } DWORD halfTime = (frameTimeAdjust+1)/2; x264_picture_t *nextPic = (curFrameTimestamp-cfrTime <= halfTime) ? &picOut : &prevPicOut; //Log(TEXT("cfrTime: %u, time: %u"), cfrTime, curFrameTimestamp); //these lines are just for counting duped frames if(nextPic == lastPic) ++numTotalDuplicatedFrames; else lastPic = nextPic; frameInfo.picOut = nextPic; frameInfo.picOut->i_pts = cfrTime; frameInfo.frameTimestamp = cfrTime; ProcessFrame(frameInfo); cfrTime += frameTimeAdjust; //Log(TEXT("cfrTime: %u, chi frame: %u"), cfrTime, (curFrameTimestamp-cfrTime <= halfTime)); } } else { picOut.i_pts = curFrameTimestamp; frameInfo.picOut = &picOut; frameInfo.frameTimestamp = curFrameTimestamp; ProcessFrame(frameInfo); } } curOutBuffer = nextOutBuffer; } else { //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled CrashError (TEXT("Texture->Map failed: 0x%08x"), result); } } if(curCopyTexture == (NUM_RENDER_BUFFERS-1)) curCopyTexture = 0; else curCopyTexture++; if(curYUVTexture == (NUM_RENDER_BUFFERS-1)) curYUVTexture = 0; else curYUVTexture++; } lastRenderTarget = curRenderTarget; if(curRenderTarget == (NUM_RENDER_BUFFERS-1)) curRenderTarget = 0; else curRenderTarget++; if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped) { PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0); lastStrain = curStrain; lastFramesDropped = curFramesDropped; } profileOut; profileOut; //------------------------------------ // get audio while sleeping or capturing //ReleaseSemaphore(hRequestAudioEvent, 1, NULL); //------------------------------------ // frame sync #ifdef USE_100NS_TIME QWORD renderStopTime = GetQPCTime100NS(clockFreq.QuadPart); sleepTargetTime += frameTime100ns; if(bWasLaggedFrame = (sleepTargetTime <= renderStopTime)) numLongFrames++; else SleepTo(clockFreq.QuadPart, sleepTargetTime); #else DWORD renderStopTime = OSGetTime(); DWORD totalTime = renderStopTime-renderStartTime; if(totalTime > frameTimeAdjust) numLongFrames++; else if(totalTime < frameTimeAdjust) OSSleep(frameTimeAdjust-totalTime); #endif //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime); numTotalFrames++; } if(!bUsing444) { if(bUseThreaded420) { for(int i=0; i<numThreads; i++) { if(h420Threads[i]) { convertInfo[i].bKillThread = true; SetEvent(convertInfo[i].hSignalConvert); OSTerminateThread(h420Threads[i], 10000); h420Threads[i] = NULL; } if(convertInfo[i].hSignalConvert) { CloseHandle(convertInfo[i].hSignalConvert); convertInfo[i].hSignalConvert = NULL; } if(convertInfo[i].hSignalComplete) { CloseHandle(convertInfo[i].hSignalComplete); convertInfo[i].hSignalComplete = NULL; } } if(!bFirst420Encode) { ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture]; copyTexture->Unmap(0); } } for(int i=0; i<NUM_OUT_BUFFERS; i++) x264_picture_clean(&outPics[i]); } Free(h420Threads); Free(convertInfo); Log(TEXT("Total frames rendered: %d, number of frames that lagged: %d (%0.2f%%) (it's okay for some frames to lag)"), numTotalFrames, numLongFrames, (double(numLongFrames)/double(numTotalFrames))*100.0); if(bUseCFR) Log(TEXT("Total duplicated CFR frames: %d"), numTotalDuplicatedFrames); }