Esempio n. 1
0
//todo: this function is an abomination, this is just disgusting.  fix it.
//...seriously, this is really, really horrible.  I mean this is amazingly bad.
void OBS::MainCaptureLoop()
{
    int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0;
    int copyWait = NUM_RENDER_BUFFERS-1;

    bSentHeaders = false;
    bFirstAudioPacket = true;

    Vect2 baseSize    = Vect2(float(baseCX), float(baseCY));
    Vect2 outputSize  = Vect2(float(outputCX), float(outputCY));
    Vect2 scaleSize   = Vect2(float(scaleCX), float(scaleCY));

    HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI"));

    //----------------------------------------
    // x264 input buffers

    int curOutBuffer = 0;

    x264_picture_t *lastPic = NULL;
    x264_picture_t outPics[NUM_OUT_BUFFERS];
    DWORD outTimes[NUM_OUT_BUFFERS] = {0, 0, 0};

    for(int i=0; i<NUM_OUT_BUFFERS; i++)
        x264_picture_init(&outPics[i]);

    if(bUsing444)
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
        {
            outPics[i].img.i_csp   = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
            outPics[i].img.i_plane = 1;
        }
    }
    else
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
            x264_picture_alloc(&outPics[i], X264_CSP_I420, outputCX, outputCY);
    }

    //----------------------------------------
    // time/timestamp stuff

    bufferedTimes.Clear();
    ctsOffsets.Clear();

#ifdef USE_100NS_TIME
    QWORD streamTimeStart = GetQPCTime100NS();
    QWORD frameTime100ns = 10000000/fps;

    QWORD sleepTargetTime = 0;
    bool bWasLaggedFrame = false;
#else
    DWORD streamTimeStart = OSGetTime();
    DWORD fpsTimeAdjust = 0;
#endif
    totalStreamTime = 0;

    lastAudioTimestamp = 0;

    latestVideoTime = firstSceneTimestamp = GetQPCTimeMS();

    DWORD fpsTimeNumerator = 1000-(frameTime*fps);
    DWORD fpsTimeDenominator = fps;
    DWORD cfrTime = 0;
    DWORD cfrTimeAdjust = 0;

    //----------------------------------------
    // start audio capture streams

    desktopAudio->StartCapture();
    if(micAudio) micAudio->StartCapture();

    //----------------------------------------
    // status bar/statistics stuff

    DWORD fpsCounter = 0;

    int numLongFrames = 0;
    int numTotalFrames = 0;

    int numTotalDuplicatedFrames = 0;

    bytesPerSec = 0;
    captureFPS = 0;
    curFramesDropped = 0;
    curStrain = 0.0;
    PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);

    QWORD lastBytesSent[3] = {0, 0, 0};
    DWORD lastFramesDropped = 0;
#ifdef USE_100NS_TIME
    double bpsTime = 0.0;
#else
    float bpsTime = 0.0f;
#endif
    double lastStrain = 0.0f;
    DWORD numSecondsWaited = 0;

    //----------------------------------------
    // 444->420 thread data

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads);
    Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads);

    zero(h420Threads, sizeof(HANDLE)*numThreads);
    zero(convertInfo, sizeof(Convert444Data)*numThreads);

    for(int i=0; i<numThreads; i++)
    {
        convertInfo[i].width  = outputCX;
        convertInfo[i].height = outputCY;
        convertInfo[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);

        if(i == 0)
            convertInfo[i].startY = 0;
        else
            convertInfo[i].startY = convertInfo[i-1].endY;

        if(i == (numThreads-1))
            convertInfo[i].endY = outputCY;
        else
            convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bool bFirstFrame = true;
    bool bFirstImage = true;
    bool bFirst420Encode = true;
    bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444;

    List<HANDLE> completeEvents;

    if(bUseThreaded420)
    {
        for(int i=0; i<numThreads; i++)
        {
            h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i);
            completeEvents << convertInfo[i].hSignalComplete;
        }
    }

    //----------------------------------------

    QWORD curStreamTime = 0, lastStreamTime, firstFrameTime = GetQPCTimeMS();

#ifdef USE_100NS_TIME
    lastStreamTime = GetQPCTime100NS()-frameTime100ns;
#else
    lastStreamTime = firstFrameTime-frameTime;
#endif

    //bool bFirstAudioPacket = true;

    while(bRunning)
    {
#ifdef USE_100NS_TIME
        QWORD renderStartTime = GetQPCTime100NS();
        totalStreamTime = DWORD((renderStartTime-streamTimeStart)/10000);

        if(sleepTargetTime == 0 || bWasLaggedFrame)
            sleepTargetTime = renderStartTime;
#else
        DWORD renderStartTime = OSGetTime();
        totalStreamTime = renderStartTime-streamTimeStart;

        DWORD frameTimeAdjust = frameTime;
        fpsTimeAdjust += fpsTimeNumerator;
        if(fpsTimeAdjust > fpsTimeDenominator)
        {
            fpsTimeAdjust -= fpsTimeDenominator;
            ++frameTimeAdjust;
        }
#endif

        bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled;

        profileIn("frame");

#ifdef USE_100NS_TIME
        QWORD qwTime = renderStartTime/10000;
        latestVideoTime = qwTime;

        QWORD frameDelta = renderStartTime-lastStreamTime;
        double fSeconds = double(frameDelta)*0.0000001;

        //Log(TEXT("frameDelta: %f"), fSeconds);

        lastStreamTime = renderStartTime;
#else
        QWORD qwTime = GetQPCTimeMS();
        latestVideoTime = qwTime;

        QWORD frameDelta = qwTime-lastStreamTime;
        float fSeconds = float(frameDelta)*0.001f;

        //Log(TEXT("frameDelta: %llu"), frameDelta);

        lastStreamTime = qwTime;
#endif

        //------------------------------------

        if(bRequestKeyframe && keyframeWait > 0)
        {
            keyframeWait -= int(frameDelta);

            if(keyframeWait <= 0)
            {
                GetVideoEncoder()->RequestKeyframe();
                bRequestKeyframe = false;
            }
        }

        if(!bPushToTalkDown && pushToTalkTimeLeft > 0)
        {
            pushToTalkTimeLeft -= int(frameDelta);
            OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft);
            if(pushToTalkTimeLeft <= 0)
            {
                pushToTalkTimeLeft = 0;
                bPushToTalkOn = false;
            }
        }

        //------------------------------------

        OSEnterMutex(hSceneMutex);

        if(bResizeRenderView)
        {
            GS->ResizeView();
            bResizeRenderView = false;
        }

        //------------------------------------

        if(scene)
        {
            profileIn("scene->Preprocess");
            scene->Preprocess();

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Preprocess();

            profileOut;

            scene->Tick(float(fSeconds));

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Tick(float(fSeconds));
        }

        //------------------------------------

        QWORD curBytesSent = network->GetCurrentSentBytes();
        curFramesDropped = network->NumDroppedFrames();
        bool bUpdateBPS = false;

        bpsTime += fSeconds;
        if(bpsTime > 1.0f)
        {
            if(numSecondsWaited < 3)
                ++numSecondsWaited;

            //bytesPerSec = DWORD(curBytesSent - lastBytesSent);
            bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited;

            if(bpsTime > 2.0)
                bpsTime = 0.0f;
            else
                bpsTime -= 1.0;

            if(numSecondsWaited == 3)
            {
                lastBytesSent[0] = lastBytesSent[1];
                lastBytesSent[1] = lastBytesSent[2];
                lastBytesSent[2] = curBytesSent;
            }
            else
                lastBytesSent[numSecondsWaited] = curBytesSent;

            captureFPS = fpsCounter;
            fpsCounter = 0;

            bUpdateBPS = true;
        }

        fpsCounter++;

        curStrain = network->GetPacketStrain();

        EnableBlending(TRUE);
        BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA);

        //------------------------------------
        // render the mini render texture

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(mainPixelShader);

        SetRenderTarget(mainRenderTextures[curRenderTarget]);

        Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0, 0, baseSize.x, baseSize.y);

        if(scene)
            scene->Render();

        //------------------------------------

        if(bTransitioning)
        {
            if(!transitionTexture)
            {
                transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE);
                if(transitionTexture)
                {
                    D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture);
                    D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]);
                    GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture);
                }
                else
                    bTransitioning = false;
            }
            else if(transitionAlpha >= 1.0f)
            {
                delete transitionTexture;
                transitionTexture = NULL;

                bTransitioning = false;
            }
        }

        if(bTransitioning)
        {
            EnableBlending(TRUE);
            transitionAlpha += float(fSeconds)*5.0f;
            if(transitionAlpha > 1.0f)
                transitionAlpha = 1.0f;
        }
        else
            EnableBlending(FALSE);

        //------------------------------------
        // render the mini view thingy

        if(bRenderView)
        {
            // Cache
            const Vect2 renderFrameSize = GetRenderFrameSize();
            const Vect2 renderFrameOffset = GetRenderFrameOffset();
            const Vect2 renderFrameCtrlSize = GetRenderFrameControlSize();

            SetRenderTarget(NULL);

            LoadVertexShader(mainVertexShader);
            LoadPixelShader(mainPixelShader);

            Ortho(0.0f, renderFrameCtrlSize.x, renderFrameCtrlSize.y, 0.0f, -100.0f, 100.0f);
            if(renderFrameCtrlSize.x != oldRenderFrameCtrlWidth || renderFrameCtrlSize.y != oldRenderFrameCtrlHeight)
            {
                // User is drag resizing the window. We don't recreate the swap chains so our coordinates are wrong
                SetViewport(0.0f, 0.0f, (float)oldRenderFrameCtrlWidth, (float)oldRenderFrameCtrlHeight);
            }
            else
                SetViewport(0.0f, 0.0f, renderFrameCtrlSize.x, renderFrameCtrlSize.y);

            // Draw background (Black if fullscreen, window colour otherwise)
            if(bFullscreenMode)
                ClearColorBuffer(0x000000);
            else
                ClearColorBuffer(GetSysColor(COLOR_BTNFACE));

            if(bTransitioning)
            {
                BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
                DrawSprite(transitionTexture, 0xFFFFFFFF,
                        renderFrameOffset.x, renderFrameOffset.y,
                        renderFrameOffset.x + renderFrameSize.x, renderFrameOffset.y + renderFrameSize.y);
                BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
            }

            DrawSprite(mainRenderTextures[curRenderTarget], 0xFFFFFFFF,
                    renderFrameOffset.x, renderFrameOffset.y,
                    renderFrameOffset.x + renderFrameSize.x, renderFrameOffset.y + renderFrameSize.y);

            //draw selections if in edit mode
            if(bEditMode && !bSizeChanging)
            {
                LoadVertexShader(solidVertexShader);
                LoadPixelShader(solidPixelShader);
                solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFFFF0000);

                if(scene)
                    scene->RenderSelections();
            }
        }
        else if(bForceRenderViewErase)
        {
            InvalidateRect(hwndRenderFrame, NULL, TRUE);
            UpdateWindow(hwndRenderFrame);
            bForceRenderViewErase = false;
        }

        //------------------------------------
        // actual stream output

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(yuvScalePixelShader);

        Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget];
        SetRenderTarget(yuvRenderTexture);

        if(downscale < 2.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize);
        else if(downscale < 3.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/(outputSize*3.0f));

        Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y);

        //why am I using scaleSize instead of outputSize for the texture?
        //because outputSize can be trimmed by up to three pixels due to 128-bit alignment.
        //using the scale function with outputSize can cause slightly inaccurate scaled images
        if(bTransitioning)
        {
            BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
            DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, 1.0f, 1.0f);
            BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
        }

        DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, 1.0f, 1.0f);

        //------------------------------------

        if(bRenderView && !copyWait)
            static_cast<D3D10System*>(GS)->swap->Present(0, 0);

        OSLeaveMutex(hSceneMutex);

        //------------------------------------
        // present/upload

        profileIn("video encoding and uploading");

        bool bEncode = true;

        if(copyWait)
        {
            copyWait--;
            bEncode = false;
        }
        else
        {
            //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing
            if(!bRecievedFirstAudioFrame)
                bEncode = false;
            else if(bFirstFrame)
            {
                firstFrameTime = qwTime;
                bFirstFrame = false;
            }

            if(!bEncode)
            {
                if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                    curYUVTexture = 0;
                else
                    curYUVTexture++;
            }
        }

        if(bEncode)
        {
            curStreamTime = qwTime-firstFrameTime;

            UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1;

            ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
            profileIn("CopyResource");

            if(!bFirst420Encode && bUseThreaded420)
            {
                WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE);
                copyTexture->Unmap(0);
            }

            D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]);
            GetD3D()->CopyResource(copyTexture, d3dYUV->texture);
            profileOut;

            ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture];

            if(bFirstImage) //ignore the first frame
                bFirstImage = false;
            else
            {
                HRESULT result;
                D3D10_MAPPED_TEXTURE2D map;
                if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map)))
                {
                    int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1;
                    int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1;

                    x264_picture_t &prevPicOut = outPics[prevOutBuffer];
                    x264_picture_t &picOut = outPics[curOutBuffer];
                    x264_picture_t &nextPicOut = outPics[nextOutBuffer];

                    if(!bUsing444)
                    {
                        profileIn("conversion to 4:2:0");

                        if(bUseThreaded420)
                        {
                            outTimes[nextOutBuffer] = (DWORD)curStreamTime;

                            for(int i=0; i<numThreads; i++)
                            {
                                convertInfo[i].input     = (LPBYTE)map.pData;
                                convertInfo[i].pitch     = map.RowPitch;
                                convertInfo[i].output[0] = nextPicOut.img.plane[0];
                                convertInfo[i].output[1] = nextPicOut.img.plane[1];
                                convertInfo[i].output[2] = nextPicOut.img.plane[2];
                                SetEvent(convertInfo[i].hSignalConvert);
                            }

                            if(bFirst420Encode)
                                bFirst420Encode = bEncode = false;
                        }
                        else
                        {
                            outTimes[curOutBuffer] = (DWORD)curStreamTime;
                            Convert444to420((LPBYTE)map.pData, outputCX, map.RowPitch, outputCY, 0, outputCY, picOut.img.plane, SSE2Available());
                            prevTexture->Unmap(0);
                        }

                        profileOut;
                    }
                    else
                    {
                        outTimes[curOutBuffer] = (DWORD)curStreamTime;

                        picOut.img.i_stride[0] = map.RowPitch;
                        picOut.img.plane[0]    = (uint8_t*)map.pData;
                    }

                    if(bEncode)
                    {
                        DWORD curFrameTimestamp = outTimes[prevOutBuffer];
                        //Log(TEXT("curFrameTimestamp: %u"), curFrameTimestamp);

                        //------------------------------------

                        FrameProcessInfo frameInfo;
                        frameInfo.firstFrameTime = firstFrameTime;
                        frameInfo.prevTexture = prevTexture;

                        if(bDupeFrames)
                        {
                            while(cfrTime < curFrameTimestamp)
                            {
                                DWORD frameTimeAdjust = frameTime;
                                cfrTimeAdjust += fpsTimeNumerator;
                                if(cfrTimeAdjust > fpsTimeDenominator)
                                {
                                    cfrTimeAdjust -= fpsTimeDenominator;
                                    ++frameTimeAdjust;
                                }

                                DWORD halfTime = (frameTimeAdjust+1)/2;

                                x264_picture_t *nextPic = (curFrameTimestamp-cfrTime <= halfTime) ? &picOut : &prevPicOut;
                                //Log(TEXT("cfrTime: %u, time: %u"), cfrTime, curFrameTimestamp);

                                //these lines are just for counting duped frames
                                if(nextPic == lastPic)
                                    ++numTotalDuplicatedFrames;
                                else
                                    lastPic = nextPic;

                                frameInfo.picOut = nextPic;
                                frameInfo.picOut->i_pts = cfrTime;
                                frameInfo.frameTimestamp = cfrTime;
                                ProcessFrame(frameInfo);

                                cfrTime += frameTimeAdjust;

                                //Log(TEXT("cfrTime: %u, chi frame: %u"), cfrTime, (curFrameTimestamp-cfrTime <= halfTime));
                            }
                        }
                        else
                        {
                            picOut.i_pts = curFrameTimestamp;

                            frameInfo.picOut = &picOut;
                            frameInfo.frameTimestamp = curFrameTimestamp;

                            ProcessFrame(frameInfo);
                        }
                    }

                    if(bUsing444)
                    {
                        prevTexture->Unmap(0);
                    }

                    curOutBuffer = nextOutBuffer;
                }
                else
                {
                    //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled
                    if (result == DXGI_ERROR_DEVICE_REMOVED)
                    {
                        String message;

                        HRESULT reason = GetD3D()->GetDeviceRemovedReason();

                        switch (reason)
                        {
                        case DXGI_ERROR_DEVICE_RESET:
                        case DXGI_ERROR_DEVICE_HUNG:
                            message = TEXT("Your video card or driver froze and was reset. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DEVICE_REMOVED:
                            message = TEXT("Your video card disappeared from the system. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DRIVER_INTERNAL_ERROR:
                            message = TEXT("Your video driver reported an internal error. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_INVALID_CALL:
                            message = TEXT("Your video driver reported an invalid call. Please check for possible driver issues.");
                            break;
                        default:
                            message = TEXT("DXGI_ERROR_DEVICE_REMOVED");
                            break;
                        }

                        CrashError (TEXT("Texture->Map failed: 0x%08x 0x%08x\r\n\r\n%s"), result, reason, message.Array());
                    }
                    else
                        CrashError (TEXT("Texture->Map failed: 0x%08x"), result);
                }
            }

            if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
                curCopyTexture = 0;
            else
                curCopyTexture++;

            if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                curYUVTexture = 0;
            else
                curYUVTexture++;
        }

        lastRenderTarget = curRenderTarget;

        if(curRenderTarget == (NUM_RENDER_BUFFERS-1))
            curRenderTarget = 0;
        else
            curRenderTarget++;

        if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped)
        {
            PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);
            lastStrain = curStrain;

            lastFramesDropped = curFramesDropped;
        }

        profileOut;
        profileOut;

        //------------------------------------
        // we're about to sleep so we should flush the d3d command queue
        GetD3D()->Flush();

        //------------------------------------
        // frame sync

#ifdef USE_100NS_TIME
        QWORD renderStopTime = GetQPCTime100NS();
        sleepTargetTime += frameTime100ns;

        if(bWasLaggedFrame = (sleepTargetTime <= renderStopTime))
            numLongFrames++;
        else
            SleepTo(sleepTargetTime);
#else
        DWORD renderStopTime = OSGetTime();
        DWORD totalTime = renderStopTime-renderStartTime;

        if(totalTime > frameTimeAdjust)
            numLongFrames++;
        else if(totalTime < frameTimeAdjust)
            OSSleep(frameTimeAdjust-totalTime);
#endif

        //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime);

        numTotalFrames++;
    }

    if(!bUsing444)
    {
        if(bUseThreaded420)
        {
            for(int i=0; i<numThreads; i++)
            {
                if(h420Threads[i])
                {
                    convertInfo[i].bKillThread = true;
                    SetEvent(convertInfo[i].hSignalConvert);

                    OSTerminateThread(h420Threads[i], 10000);
                    h420Threads[i] = NULL;
                }

                if(convertInfo[i].hSignalConvert)
                {
                    CloseHandle(convertInfo[i].hSignalConvert);
                    convertInfo[i].hSignalConvert = NULL;
                }

                if(convertInfo[i].hSignalComplete)
                {
                    CloseHandle(convertInfo[i].hSignalComplete);
                    convertInfo[i].hSignalComplete = NULL;
                }
            }

            if(!bFirst420Encode)
            {
                ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
                copyTexture->Unmap(0);
            }
        }

        for(int i=0; i<NUM_OUT_BUFFERS; i++)
            x264_picture_clean(&outPics[i]);
    }

    Free(h420Threads);
    Free(convertInfo);

    Log(TEXT("Total frames rendered: %d, number of frames that lagged: %d (%0.2f%%) (it's okay for some frames to lag)"), numTotalFrames, numLongFrames, (double(numLongFrames)/double(numTotalFrames))*100.0);
    if(bDupeFrames)
        Log(TEXT("Total duplicated frames: %d (%0.2f%%)"), numTotalDuplicatedFrames, (double(numTotalDuplicatedFrames)/double(numTotalFrames))*100.0);
}
Esempio n. 2
0
bool OBS::ProcessFrame(FrameProcessInfo &frameInfo)
{
    List<DataPacket> videoPackets;
    List<PacketType> videoPacketTypes;

    //------------------------------------
    // encode

    bufferedTimes << frameInfo.frameTimestamp;

    VideoSegment curSegment;
    bool bProcessedFrame, bSendFrame = false;
    int curCTSOffset = 0;

    profileIn("call to encoder");

    videoEncoder->Encode(frameInfo.picOut, videoPackets, videoPacketTypes, bufferedTimes[0], ctsOffset);
    if(bUsing444) frameInfo.prevTexture->Unmap(0);

    ctsOffsets << ctsOffset;

    bProcessedFrame = (videoPackets.Num() != 0);

    //buffer video data before sending out
    if(bProcessedFrame)
    {
        bSendFrame = BufferVideoData(videoPackets, videoPacketTypes, bufferedTimes[0], curSegment);
        bufferedTimes.Remove(0);

        curCTSOffset = ctsOffsets[0];
        ctsOffsets.Remove(0);
    }

    profileOut;

    //------------------------------------
    // upload

    profileIn("sending stuff out");

    //send headers before the first frame if not yet sent
    if(bSendFrame)
    {
        if(!bSentHeaders)
        {
            network->BeginPublishing();
            bSentHeaders = true;
        }

        OSEnterMutex(hSoundDataMutex);

        if(pendingAudioFrames.Num())
        {
            while(pendingAudioFrames.Num())
            {
                if(frameInfo.firstFrameTime < pendingAudioFrames[0].timestamp)
                {
                    UINT audioTimestamp = UINT(pendingAudioFrames[0].timestamp-frameInfo.firstFrameTime);

                    /*if(bFirstAudioPacket)
                    {
                        audioTimestamp = 0;
                        bFirstAudioPacket = false;
                    }
                    else*/
                        audioTimestamp += curCTSOffset;

                    //stop sending audio packets when we reach an audio timestamp greater than the video timestamp
                    if(audioTimestamp > curSegment.timestamp)
                        break;

                    if(audioTimestamp == 0 || audioTimestamp > lastAudioTimestamp)
                    {
                        List<BYTE> &audioData = pendingAudioFrames[0].audioData;
                        if(audioData.Num())
                        {
                            //Log(TEXT("a:%u, %llu, cts: %d"), audioTimestamp, frameInfo.firstFrameTime+audioTimestamp-curCTSOffset, curCTSOffset);

                            network->SendPacket(audioData.Array(), audioData.Num(), audioTimestamp, PacketType_Audio);
                            if(fileStream)
                                fileStream->AddPacket(audioData.Array(), audioData.Num(), audioTimestamp, PacketType_Audio);

                            audioData.Clear();

                            lastAudioTimestamp = audioTimestamp;
                        }
                    }
                }
                else
                    nop();

                pendingAudioFrames[0].audioData.Clear();
                pendingAudioFrames.Remove(0);
            }
        }

        OSLeaveMutex(hSoundDataMutex);

        for(UINT i=0; i<curSegment.packets.Num(); i++)
        {
            VideoPacketData &packet = curSegment.packets[i];

            if(packet.type == PacketType_VideoHighest)
                bRequestKeyframe = false;

            //Log(TEXT("v:%u, %llu"), curSegment.timestamp, frameInfo.firstFrameTime+curSegment.timestamp);

            network->SendPacket(packet.data.Array(), packet.data.Num(), curSegment.timestamp, packet.type);
            if(fileStream)
                fileStream->AddPacket(packet.data.Array(), packet.data.Num(), curSegment.timestamp, packet.type);
        }
    }

    profileOut;

    return bProcessedFrame;
}
Esempio n. 3
0
//todo: this function is an abomination, this is just disgusting.  fix it.
//...seriously, this is really, really horrible.  I mean this is amazingly bad.
void OBS::MainCaptureLoop()
{
    int curRenderTarget = 0, curYUVTexture = 0, curCopyTexture = 0;
    int copyWait = NUM_RENDER_BUFFERS-1;

    bSentHeaders = false;
    bFirstAudioPacket = true;

    bool bLogLongFramesProfile = GlobalConfig->GetInt(TEXT("General"), TEXT("LogLongFramesProfile"), LOGLONGFRAMESDEFAULT) != 0;
    float logLongFramesProfilePercentage = GlobalConfig->GetFloat(TEXT("General"), TEXT("LogLongFramesProfilePercentage"), 10.f);

    Vect2 baseSize    = Vect2(float(baseCX), float(baseCY));
    Vect2 outputSize  = Vect2(float(outputCX), float(outputCY));
    Vect2 scaleSize   = Vect2(float(scaleCX), float(scaleCY));

    HANDLE hMatrix   = yuvScalePixelShader->GetParameterByName(TEXT("yuvMat"));
    HANDLE hScaleVal = yuvScalePixelShader->GetParameterByName(TEXT("baseDimensionI"));

    //----------------------------------------
    // x264 input buffers

    int curOutBuffer = 0;

    bool bUsingQSV = videoEncoder->isQSV();//GlobalConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0;
    bUsing444 = false;

    EncoderPicture lastPic;
    EncoderPicture outPics[NUM_OUT_BUFFERS];

    for(int i=0; i<NUM_OUT_BUFFERS; i++)
    {
        if(bUsingQSV)
        {
            outPics[i].mfxOut = new mfxFrameSurface1;
            memset(outPics[i].mfxOut, 0, sizeof(mfxFrameSurface1));
            mfxFrameData& data = outPics[i].mfxOut->Data;
            videoEncoder->RequestBuffers(&data);
        }
        else
        {
            outPics[i].picOut = new x264_picture_t;
            x264_picture_init(outPics[i].picOut);
        }
    }

    if(bUsing444)
    {
        for(int i=0; i<NUM_OUT_BUFFERS; i++)
        {
            outPics[i].picOut->img.i_csp   = X264_CSP_BGRA; //although the x264 input says BGR, x264 actually will expect packed UYV
            outPics[i].picOut->img.i_plane = 1;
        }
    }
    else
    {
        if(!bUsingQSV)
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
                x264_picture_alloc(outPics[i].picOut, X264_CSP_NV12, outputCX, outputCY);
    }

    int bCongestionControl = AppConfig->GetInt (TEXT("Video Encoding"), TEXT("CongestionControl"), 0);
    bool bDynamicBitrateSupported = App->GetVideoEncoder()->DynamicBitrateSupported();
    int defaultBitRate = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int currentBitRate = defaultBitRate;
    QWORD lastAdjustmentTime = 0;
    UINT adjustmentStreamId = 0;

    //std::unique_ptr<ProfilerNode> encodeThreadProfiler;

    //----------------------------------------
    // time/timestamp stuff

    bool bWasLaggedFrame = false;

    totalStreamTime = 0;
    lastAudioTimestamp = 0;

    //----------------------------------------
    // start audio capture streams

    desktopAudio->StartCapture();
    if(micAudio) micAudio->StartCapture();

    //----------------------------------------
    // status bar/statistics stuff

    DWORD fpsCounter = 0;

    int numLongFrames = 0;
    int numTotalFrames = 0;

    bytesPerSec = 0;
    captureFPS = 0;
    curFramesDropped = 0;
    curStrain = 0.0;
    PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);

    QWORD lastBytesSent[3] = {0, 0, 0};
    DWORD lastFramesDropped = 0;
    double bpsTime = 0.0;

    double lastStrain = 0.0f;
    DWORD numSecondsWaited = 0;

    //----------------------------------------
    // 444->420 thread data

    int numThreads = MAX(OSGetTotalCores()-2, 1);
    HANDLE *h420Threads = (HANDLE*)Allocate(sizeof(HANDLE)*numThreads);
    Convert444Data *convertInfo = (Convert444Data*)Allocate(sizeof(Convert444Data)*numThreads);

    zero(h420Threads, sizeof(HANDLE)*numThreads);
    zero(convertInfo, sizeof(Convert444Data)*numThreads);

    for(int i=0; i<numThreads; i++)
    {
        convertInfo[i].width  = outputCX;
        convertInfo[i].height = outputCY;
        convertInfo[i].hSignalConvert  = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].hSignalComplete = CreateEvent(NULL, FALSE, FALSE, NULL);
        convertInfo[i].bNV12 = bUsingQSV;
        convertInfo[i].numThreads = numThreads;

        if(i == 0)
            convertInfo[i].startY = 0;
        else
            convertInfo[i].startY = convertInfo[i-1].endY;

        if(i == (numThreads-1))
            convertInfo[i].endY = outputCY;
        else
            convertInfo[i].endY = ((outputCY/numThreads)*(i+1)) & 0xFFFFFFFE;
    }

    bool bEncode;
    bool bFirstFrame = true;
    bool bFirstImage = true;
    bool bFirstEncode = true;
    bool bUseThreaded420 = bUseMultithreadedOptimizations && (OSGetTotalCores() > 1) && !bUsing444;

    List<HANDLE> completeEvents;

    if(bUseThreaded420)
    {
        for(int i=0; i<numThreads; i++)
        {
            h420Threads[i] = OSCreateThread((XTHREAD)Convert444Thread, convertInfo+i);
            completeEvents << convertInfo[i].hSignalComplete;
        }
    }

    //----------------------------------------

    QWORD streamTimeStart  = GetQPCTimeNS();
    QWORD lastStreamTime   = 0;
    QWORD firstFrameTimeMS = streamTimeStart/1000000;
    QWORD frameLengthNS    = 1000000000/fps;

    while(WaitForSingleObject(hVideoEvent, INFINITE) == WAIT_OBJECT_0)
    {
        if (bShutdownVideoThread)
            break;

        QWORD renderStartTime = GetQPCTimeNS();
        totalStreamTime = DWORD((renderStartTime-streamTimeStart)/1000000);

        bool bRenderView = !IsIconic(hwndMain) && bRenderViewEnabled;

        QWORD renderStartTimeMS = renderStartTime/1000000;

        QWORD curStreamTime = latestVideoTimeNS;
        if (!lastStreamTime)
            lastStreamTime = curStreamTime-frameLengthNS;
        QWORD frameDelta = curStreamTime-lastStreamTime;
        //if (!lastStreamTime)
        //    lastStreamTime = renderStartTime-frameLengthNS;
        //QWORD frameDelta = renderStartTime-lastStreamTime;
        double fSeconds = double(frameDelta)*0.000000001;
        //lastStreamTime = renderStartTime;

        profileIn("video thread frame");

        //Log(TEXT("Stream Time: %llu"), curStreamTime);
        //Log(TEXT("frameDelta: %lf"), fSeconds);

        bool bUpdateBPS = false;
        profileIn("frame preprocessing and rendering");

        //------------------------------------

        if(bRequestKeyframe && keyframeWait > 0)
        {
            keyframeWait -= int(frameDelta);

            if(keyframeWait <= 0)
            {
                GetVideoEncoder()->RequestKeyframe();
                bRequestKeyframe = false;
            }
        }

        if(!bPushToTalkDown && pushToTalkTimeLeft > 0)
        {
            pushToTalkTimeLeft -= int(frameDelta);
            OSDebugOut(TEXT("time left: %d\r\n"), pushToTalkTimeLeft);
            if(pushToTalkTimeLeft <= 0)
            {
                pushToTalkTimeLeft = 0;
                bPushToTalkOn = false;
            }
        }

        //------------------------------------

        OSEnterMutex(hSceneMutex);

        if (bPleaseEnableProjector)
            ActuallyEnableProjector();
        else if(bPleaseDisableProjector)
            DisableProjector();

        if(bResizeRenderView)
        {
            GS->ResizeView();
            bResizeRenderView = false;
        }

        //------------------------------------

        if(scene)
        {
            profileIn("scene->Preprocess");
            scene->Preprocess();

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Preprocess();

            profileOut;

            scene->Tick(float(fSeconds));

            for(UINT i=0; i<globalSources.Num(); i++)
                globalSources[i].source->Tick(float(fSeconds));
        }

        //------------------------------------

        QWORD curBytesSent = network->GetCurrentSentBytes();
        curFramesDropped = network->NumDroppedFrames();

        bpsTime += fSeconds;
        if(bpsTime > 1.0f)
        {
            if(numSecondsWaited < 3)
                ++numSecondsWaited;

            //bytesPerSec = DWORD(curBytesSent - lastBytesSent);
            bytesPerSec = DWORD(curBytesSent - lastBytesSent[0]) / numSecondsWaited;

            if(bpsTime > 2.0)
                bpsTime = 0.0f;
            else
                bpsTime -= 1.0;

            if(numSecondsWaited == 3)
            {
                lastBytesSent[0] = lastBytesSent[1];
                lastBytesSent[1] = lastBytesSent[2];
                lastBytesSent[2] = curBytesSent;
            }
            else
                lastBytesSent[numSecondsWaited] = curBytesSent;

            captureFPS = fpsCounter;
            fpsCounter = 0;

            bUpdateBPS = true;
        }

        fpsCounter++;

        curStrain = network->GetPacketStrain();

        EnableBlending(TRUE);
        BlendFunction(GS_BLEND_SRCALPHA, GS_BLEND_INVSRCALPHA);

        //------------------------------------
        // render the mini render texture

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(mainPixelShader);

        SetRenderTarget(mainRenderTextures[curRenderTarget]);

        Ortho(0.0f, baseSize.x, baseSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0, 0, baseSize.x, baseSize.y);

        if(scene)
            scene->Render();

        //------------------------------------

        if(bTransitioning)
        {
            if(!transitionTexture)
            {
                transitionTexture = CreateTexture(baseCX, baseCY, GS_BGRA, NULL, FALSE, TRUE);
                if(transitionTexture)
                {
                    D3D10Texture *d3dTransitionTex = static_cast<D3D10Texture*>(transitionTexture);
                    D3D10Texture *d3dSceneTex = static_cast<D3D10Texture*>(mainRenderTextures[lastRenderTarget]);
                    GetD3D()->CopyResource(d3dTransitionTex->texture, d3dSceneTex->texture);
                }
                else
                    bTransitioning = false;
            }
            else if(transitionAlpha >= 1.0f)
            {
                delete transitionTexture;
                transitionTexture = NULL;

                bTransitioning = false;
            }
        }

        if(bTransitioning)
        {
            EnableBlending(TRUE);
            transitionAlpha += float(fSeconds)*5.0f;
            if(transitionAlpha > 1.0f)
                transitionAlpha = 1.0f;
        }
        else
            EnableBlending(FALSE);

        //------------------------------------
        // render the mini view thingy

        if (bProjector) {
            SetRenderTarget(projectorTexture);

            Vect2 renderFrameSize, renderFrameOffset;
            Vect2 projectorSize = Vect2(float(projectorWidth), float(projectorHeight));

            float projectorAspect = (projectorSize.x / projectorSize.y);
            float baseAspect = (baseSize.x / baseSize.y);

            if (projectorAspect < baseAspect) {
                float fProjectorWidth = float(projectorWidth);

                renderFrameSize   = Vect2(fProjectorWidth, fProjectorWidth / baseAspect);
                renderFrameOffset = Vect2(0.0f, (projectorSize.y-renderFrameSize.y) * 0.5f);
            } else {
                float fProjectorHeight = float(projectorHeight);

                renderFrameSize   = Vect2(fProjectorHeight * baseAspect, fProjectorHeight);
                renderFrameOffset = Vect2((projectorSize.x-renderFrameSize.x) * 0.5f, 0.0f);
            }

            DrawPreview(renderFrameSize, renderFrameOffset, projectorSize, curRenderTarget, Preview_Projector);

            SetRenderTarget(NULL);
        }

        if(bRenderView)
        {
            // Cache
            const Vect2 renderFrameSize = GetRenderFrameSize();
            const Vect2 renderFrameOffset = GetRenderFrameOffset();
            const Vect2 renderFrameCtrlSize = GetRenderFrameControlSize();

            SetRenderTarget(NULL);
            DrawPreview(renderFrameSize, renderFrameOffset, renderFrameCtrlSize, curRenderTarget,
                    bFullscreenMode ? Preview_Fullscreen : Preview_Standard);

            //draw selections if in edit mode
            if(bEditMode && !bSizeChanging)
            {
                if(scene) {
                    LoadVertexShader(solidVertexShader);
                    LoadPixelShader(solidPixelShader);
                    solidPixelShader->SetColor(solidPixelShader->GetParameter(0), 0xFF0000);
                    scene->RenderSelections(solidPixelShader);
                }
            }
        }
        else if(bForceRenderViewErase)
        {
            InvalidateRect(hwndRenderFrame, NULL, TRUE);
            UpdateWindow(hwndRenderFrame);
            bForceRenderViewErase = false;
        }

        //------------------------------------
        // actual stream output

        LoadVertexShader(mainVertexShader);
        LoadPixelShader(yuvScalePixelShader);

        Texture *yuvRenderTexture = yuvRenderTextures[curRenderTarget];
        SetRenderTarget(yuvRenderTexture);

        switch(colorDesc.matrix)
        {
        case ColorMatrix_GBR:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[0] : (float*)yuvMat[0]);
            break;
        case ColorMatrix_YCgCo:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[1] : (float*)yuvMat[1]);
            break;
        case ColorMatrix_BT2020NCL:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[2] : (float*)yuvMat[2]);
            break;
        case ColorMatrix_BT709:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[3] : (float*)yuvMat[3]);
            break;
        case ColorMatrix_SMPTE240M:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[4] : (float*)yuvMat[4]);
            break;
        default:
            yuvScalePixelShader->SetMatrix(hMatrix, colorDesc.fullRange ? (float*)yuvFullMat[5] : (float*)yuvMat[5]);
        }

        if(downscale < 2.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/baseSize);
        else if(downscale < 3.01)
            yuvScalePixelShader->SetVector2(hScaleVal, 1.0f/(outputSize*3.0f));

        Ortho(0.0f, outputSize.x, outputSize.y, 0.0f, -100.0f, 100.0f);
        SetViewport(0.0f, 0.0f, outputSize.x, outputSize.y);

        //why am I using scaleSize instead of outputSize for the texture?
        //because outputSize can be trimmed by up to three pixels due to 128-bit alignment.
        //using the scale function with outputSize can cause slightly inaccurate scaled images
        if(bTransitioning)
        {
            BlendFunction(GS_BLEND_ONE, GS_BLEND_ZERO);
            DrawSpriteEx(transitionTexture, 0xFFFFFFFF, 0.0f, 0.0f, scaleSize.x, scaleSize.y, 0.0f, 0.0f, 1.0f, 1.0f);
            BlendFunction(GS_BLEND_FACTOR, GS_BLEND_INVFACTOR, transitionAlpha);
        }

        DrawSpriteEx(mainRenderTextures[curRenderTarget], 0xFFFFFFFF, 0.0f, 0.0f, outputSize.x, outputSize.y, 0.0f, 0.0f, 1.0f, 1.0f);

        //------------------------------------

        if (bProjector && !copyWait)
            projectorSwap->Present(0, 0);

        if(bRenderView && !copyWait)
            static_cast<D3D10System*>(GS)->swap->Present(0, 0);

        OSLeaveMutex(hSceneMutex);

        profileOut;

        //------------------------------------
        // present/upload

        profileIn("GPU download and color conversion");

        bEncode = true;

        if(copyWait)
        {
            copyWait--;
            bEncode = false;
        }
        else
        {
            //audio sometimes takes a bit to start -- do not start processing frames until audio has started capturing
            if(!bRecievedFirstAudioFrame)
            {
                static bool bWarnedAboutNoAudio = false;
                if (renderStartTimeMS-firstFrameTimeMS > 10000 && !bWarnedAboutNoAudio)
                {
                    bWarnedAboutNoAudio = true;
                    //AddStreamInfo (TEXT ("WARNING: OBS is not receiving audio frames. Please check your audio devices."), StreamInfoPriority_Critical); 
                }
                bEncode = false;
            }
            else if(bFirstFrame)
            {
                firstFrameTimestamp = lastStreamTime/1000000;
                bFirstFrame = false;
            }

            if(!bEncode)
            {
                if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                    curYUVTexture = 0;
                else
                    curYUVTexture++;
            }
        }

        lastStreamTime = curStreamTime;

        if(bEncode)
        {
            UINT prevCopyTexture = (curCopyTexture == 0) ? NUM_RENDER_BUFFERS-1 : curCopyTexture-1;

            ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
            profileIn("CopyResource");

            if(!bFirstEncode && bUseThreaded420)
            {
                WaitForMultipleObjects(completeEvents.Num(), completeEvents.Array(), TRUE, INFINITE);
                copyTexture->Unmap(0);
            }

            D3D10Texture *d3dYUV = static_cast<D3D10Texture*>(yuvRenderTextures[curYUVTexture]);
            GetD3D()->CopyResource(copyTexture, d3dYUV->texture);
            profileOut;

            ID3D10Texture2D *prevTexture = copyTextures[prevCopyTexture];

            if(bFirstImage) //ignore the first frame
                bFirstImage = false;
            else
            {
                HRESULT result;
                D3D10_MAPPED_TEXTURE2D map;
                if(SUCCEEDED(result = prevTexture->Map(0, D3D10_MAP_READ, 0, &map)))
                {
                    int prevOutBuffer = (curOutBuffer == 0) ? NUM_OUT_BUFFERS-1 : curOutBuffer-1;
                    int nextOutBuffer = (curOutBuffer == NUM_OUT_BUFFERS-1) ? 0 : curOutBuffer+1;

                    EncoderPicture &prevPicOut = outPics[prevOutBuffer];
                    EncoderPicture &picOut = outPics[curOutBuffer];
                    EncoderPicture &nextPicOut = outPics[nextOutBuffer];

                    if(!bUsing444)
                    {
                        profileIn("conversion to 4:2:0");

                        if(bUseThreaded420)
                        {
                            for(int i=0; i<numThreads; i++)
                            {
                                convertInfo[i].input     = (LPBYTE)map.pData;
                                convertInfo[i].inPitch   = map.RowPitch;
                                if(bUsingQSV)
                                {
                                    mfxFrameData& data = nextPicOut.mfxOut->Data;
                                    videoEncoder->RequestBuffers(&data);
                                    convertInfo[i].outPitch  = data.Pitch;
                                    convertInfo[i].output[0] = data.Y;
                                    convertInfo[i].output[1] = data.UV;
                                }
                                else
                                {
                                    convertInfo[i].output[0] = nextPicOut.picOut->img.plane[0];
                                    convertInfo[i].output[1] = nextPicOut.picOut->img.plane[1];
                                    convertInfo[i].output[2] = nextPicOut.picOut->img.plane[2];
								}
                                SetEvent(convertInfo[i].hSignalConvert);
                            }

                            if(bFirstEncode)
                                bFirstEncode = bEncode = false;
                        }
                        else
                        {
                            if(bUsingQSV)
                            {
                                mfxFrameData& data = picOut.mfxOut->Data;
                                videoEncoder->RequestBuffers(&data);
                                LPBYTE output[] = {data.Y, data.UV};
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, data.Pitch, outputCY, 0, outputCY, output);
                            }
                            else
                                Convert444toNV12((LPBYTE)map.pData, outputCX, map.RowPitch, outputCX, outputCY, 0, outputCY, picOut.picOut->img.plane);
                            prevTexture->Unmap(0);
                        }

                        profileOut;
                    }

                    if(bEncode)
                    {
                        //encodeThreadProfiler.reset(::new ProfilerNode(TEXT("EncodeThread"), true));
                        //encodeThreadProfiler->MonitorThread(hEncodeThread);
                        curFramePic = &picOut;
                    }

                    curOutBuffer = nextOutBuffer;
                }
                else
                {
                    //We have to crash, or we end up deadlocking the thread when the convert threads are never signalled
                    if (result == DXGI_ERROR_DEVICE_REMOVED)
                    {
                        String message;

                        HRESULT reason = GetD3D()->GetDeviceRemovedReason();

                        switch (reason)
                        {
                        case DXGI_ERROR_DEVICE_RESET:
                        case DXGI_ERROR_DEVICE_HUNG:
                            message = TEXT("Your video card or driver froze and was reset. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DEVICE_REMOVED:
                            message = TEXT("Your video card disappeared from the system. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_DRIVER_INTERNAL_ERROR:
                            message = TEXT("Your video driver reported an internal error. Please check for possible hardware / driver issues.");
                            break;
                        case DXGI_ERROR_INVALID_CALL:
                            message = TEXT("Your video driver reported an invalid call. Please check for possible driver issues.");
                            break;
                        default:
                            message = TEXT("DXGI_ERROR_DEVICE_REMOVED");
                            break;
                        }

                        message << TEXT(" This error can also occur if you have enabled opencl in x264 custom settings.");

                        CrashError (TEXT("Texture->Map failed: 0x%08x 0x%08x\r\n\r\n%s"), result, reason, message.Array());
                    }
                    else
                        CrashError (TEXT("Texture->Map failed: 0x%08x"), result);
                }
            }

            if(curCopyTexture == (NUM_RENDER_BUFFERS-1))
                curCopyTexture = 0;
            else
                curCopyTexture++;

            if(curYUVTexture == (NUM_RENDER_BUFFERS-1))
                curYUVTexture = 0;
            else
                curYUVTexture++;

            if (bCongestionControl && bDynamicBitrateSupported && !bTestStream)
            {
                if (curStrain > 25)
                {
                    if (renderStartTimeMS - lastAdjustmentTime > 1500)
                    {
                        if (currentBitRate > 100)
                        {
                            currentBitRate = (int)(currentBitRate * (1.0 - (curStrain / 400)));
                            App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                            if (!adjustmentStreamId)
                                adjustmentStreamId = App->AddStreamInfo (FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                            else
                                App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion detected, dropping bitrate to %d kbps"), currentBitRate).Array());

                            bUpdateBPS = true;
                        }

                        lastAdjustmentTime = renderStartTimeMS;
                    }
                }
                else if (currentBitRate < defaultBitRate && curStrain < 5 && lastStrain < 5)
                {
                    if (renderStartTimeMS - lastAdjustmentTime > 5000)
                    {
                        if (currentBitRate < defaultBitRate)
                        {
                            currentBitRate += (int)(defaultBitRate * 0.05);
                            if (currentBitRate > defaultBitRate)
                                currentBitRate = defaultBitRate;
                        }

                        App->GetVideoEncoder()->SetBitRate(currentBitRate, -1);
                        /*if (!adjustmentStreamId)
                            App->AddStreamInfo (FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array(), StreamInfoPriority_Low);
                        else
                            App->SetStreamInfo(adjustmentStreamId, FormattedString(TEXT("Congestion clearing, raising bitrate to %d kbps"), currentBitRate).Array());*/

                        bUpdateBPS = true;

                        lastAdjustmentTime = renderStartTimeMS;
                    }
                }
            }
        }

        lastRenderTarget = curRenderTarget;

        if(curRenderTarget == (NUM_RENDER_BUFFERS-1))
            curRenderTarget = 0;
        else
            curRenderTarget++;

        if(bUpdateBPS || !CloseDouble(curStrain, lastStrain) || curFramesDropped != lastFramesDropped)
        {
            PostMessage(hwndMain, OBS_UPDATESTATUSBAR, 0, 0);
            lastStrain = curStrain;

            lastFramesDropped = curFramesDropped;
        }

        //------------------------------------
        // we're about to sleep so we should flush the d3d command queue
        profileIn("flush");
        GetD3D()->Flush();
        profileOut;

        profileOut; //video encoding and uploading
        profileOut; //frame

        //------------------------------------
        // frame sync

        QWORD renderStopTime = GetQPCTimeNS();

        if(bWasLaggedFrame = (frameDelta > frameLengthNS))
        {
            numLongFrames++;
            if(bLogLongFramesProfile && (numLongFrames/float(max(1, numTotalFrames)) * 100.) > logLongFramesProfilePercentage)
                DumpLastProfileData();
        }

        //OSDebugOut(TEXT("Frame adjust time: %d, "), frameTimeAdjust-totalTime);

        numTotalFrames++;
    }

    DisableProjector();

    //encodeThreadProfiler.reset();

    if(!bUsing444)
    {
        if(bUseThreaded420)
        {
            for(int i=0; i<numThreads; i++)
            {
                if(h420Threads[i])
                {
                    convertInfo[i].bKillThread = true;
                    SetEvent(convertInfo[i].hSignalConvert);

                    OSTerminateThread(h420Threads[i], 10000);
                    h420Threads[i] = NULL;
                }

                if(convertInfo[i].hSignalConvert)
                {
                    CloseHandle(convertInfo[i].hSignalConvert);
                    convertInfo[i].hSignalConvert = NULL;
                }

                if(convertInfo[i].hSignalComplete)
                {
                    CloseHandle(convertInfo[i].hSignalComplete);
                    convertInfo[i].hSignalComplete = NULL;
                }
            }

            if(!bFirstEncode)
            {
                ID3D10Texture2D *copyTexture = copyTextures[curCopyTexture];
                copyTexture->Unmap(0);
            }
        }

        if(bUsingQSV)
            for(int i = 0; i < NUM_OUT_BUFFERS; i++)
                delete outPics[i].mfxOut;
        else
            for(int i=0; i<NUM_OUT_BUFFERS; i++)
            {
                x264_picture_clean(outPics[i].picOut);
                delete outPics[i].picOut;
            }
    }

    Free(h420Threads);
    Free(convertInfo);

    Log(TEXT("Total frames rendered: %d, number of late frames: %d (%0.2f%%) (it's okay for some frames to be late)"), numTotalFrames, numLongFrames, (double(numLongFrames)/double(numTotalFrames))*100.0);
}
Esempio n. 4
0
void HandleGLSceneUpdate(HDC hDC)
{
    if(!bTargetAcquired && hdcAcquiredDC == NULL)
    {
        logOutput << CurrentTimeString() << "setting up gl data" << endl;
        PIXELFORMATDESCRIPTOR pfd;

        hwndTarget = WindowFromDC(hDC);

        int pixFormat = GetPixelFormat(hDC);
        DescribePixelFormat(hDC, pixFormat, sizeof(pfd), &pfd);

        if(pfd.cColorBits == 32 && hwndTarget)
        {
            bTargetAcquired = true;
            hdcAcquiredDC = hDC;
            glcaptureInfo.format = GS_BGR;
        }

        OSInitializeTimer();
    }

    if(hDC == hdcAcquiredDC)
    {
        if(bCapturing && WaitForSingleObject(hSignalEnd, 0) == WAIT_OBJECT_0)
            bStopRequested = true;

        if(bCapturing && !IsWindow(hwndOBS))
        {
            hwndOBS = NULL;
            bStopRequested = true;
        }

        if(bCapturing && bStopRequested)
        {
            RUNEVERYRESET logOutput << CurrentTimeString() << "stop requested, terminating gl capture" << endl;

            ClearGLData();
            bCapturing = false;
            bStopRequested = false;
            bReacquiring = false;
        }

        if(!bCapturing && WaitForSingleObject(hSignalRestart, 0) == WAIT_OBJECT_0)
        {
            hwndOBS = FindWindow(OBS_WINDOW_CLASS, NULL);
            if(hwndOBS)
                bCapturing = true;
        }

        RECT rc;
        GetClientRect(hwndTarget, &rc);

        if(bCapturing && bReacquiring)
        {
            if(lastCX != rc.right || lastCY != rc.bottom) //reset if continuing to size within the 3 seconds
            {
                reacquireStart = OSGetTimeMicroseconds();
                lastCX = rc.right;
                lastCY = rc.bottom;
            }

            if(OSGetTimeMicroseconds()-reacquireTime >= 3000000) { //3 second to reacquire
                RUNEVERYRESET logOutput << CurrentTimeString() << "reacquiring gl due to resize..." << endl;
                bReacquiring = false;
            } else {
                return;
            }
        }

        if(bCapturing && (!bHasTextures || rc.right != glcaptureInfo.cx || rc.bottom != glcaptureInfo.cy))
        {
            if (!rc.right || !rc.bottom)
                return;

            if(bHasTextures) //resizing
            {
                ClearGLData();
                bReacquiring = true;
                reacquireStart = OSGetTimeMicroseconds();
                lastCX = rc.right;
                lastCY = rc.bottom;
                return;
            }
            else
            {
                if(hwndOBS)
                    DoGLCPUHook(rc);
                else
                    ClearGLData();
            }
        }

        LONGLONG timeVal = OSGetTimeMicroseconds();

        //check keep alive state, dumb but effective
        if(bCapturing)
        {
            if (!keepAliveTime)
                keepAliveTime = timeVal;

            if((timeVal-keepAliveTime) > 5000000)
            {
                HANDLE hKeepAlive = OpenEvent(EVENT_ALL_ACCESS, FALSE, strKeepAlive.c_str());
                if (hKeepAlive) {
                    CloseHandle(hKeepAlive);
                } else {
                    ClearGLData();
                    logOutput << CurrentTimeString() << "Keepalive no longer found on gl, freeing capture data" << endl;
                    bCapturing = false;
                }

                keepAliveTime = timeVal;
            }
        }

        if(bHasTextures)
        {
            LONGLONG frameTime;
            if(bCapturing)
            {
                if(copyData)
                {
                    if(frameTime = copyData->frameTime)
                    {
                        LONGLONG timeElapsed = timeVal-lastTime;

                        if(timeElapsed >= frameTime)
                        {
                            lastTime += frameTime;
                            if(timeElapsed > frameTime*2)
                                lastTime = timeVal;

                            GLuint texture = gltextures[curCapture];
                            DWORD nextCapture = (curCapture == NUM_BUFFERS-1) ? 0 : (curCapture+1);

                            glReadBuffer(GL_BACK);
                            glBindBuffer(GL_PIXEL_PACK_BUFFER, texture);

                            if(glLockedTextures[curCapture])
                            {
                                OSEnterMutex(glDataMutexes[curCapture]);

                                glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
                                glLockedTextures[curCapture] = false;

                                OSLeaveMutex(glDataMutexes[curCapture]);
                            }

                            glReadPixels(0, 0, glcaptureInfo.cx, glcaptureInfo.cy, GL_BGRA, GL_UNSIGNED_BYTE, 0);

                            //----------------------------------

                            glBindBuffer(GL_PIXEL_PACK_BUFFER, gltextures[nextCapture]);
                            pCopyData = (void*)glMapBuffer(GL_PIXEL_PACK_BUFFER, GL_READ_ONLY);
                            if(pCopyData)
                            {
                                curCPUTexture = nextCapture;
                                glLockedTextures[nextCapture] = true;

                                RUNEVERYRESET logOutput << CurrentTimeString() << "successfully capturing gl frames via RAM" << endl;

                                SetEvent(hCopyEvent);
                            } else {
                                RUNEVERYRESET logOutput << CurrentTimeString() << "one or more gl frames failed to capture for some reason" << endl;
                            }

                            //----------------------------------

                            glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);

                            curCapture = nextCapture;
                        }
                    }
                }
            }
            else {
                RUNEVERYRESET logOutput << CurrentTimeString() << "no longer capturing, terminating gl capture" << endl;
                ClearGLData();
            }
        }
    }
}
Esempio n. 5
0
DWORD DeviceSource::SampleThread(DeviceSource *source)
{
    HANDLE hSampleMutex = source->hSampleMutex;
    LONGLONG lastTime = GetQPCTime100NS(), bufferTime = 0, frameWait = 0, curBufferTime = source->bufferTime;
    LONGLONG lastSampleTime = 0;

    bool bFirstFrame = true;
    bool bFirstDelay = true;

    while (WaitForSingleObject(source->hStopSampleEvent, 2) == WAIT_TIMEOUT) {
        LONGLONG t = GetQPCTime100NS();
        LONGLONG delta = t-lastTime;
        lastTime = t;

        OSEnterMutex(hSampleMutex);

        if (source->samples.Num()) {
            if (bFirstFrame) {
                bFirstFrame = false;
                lastSampleTime = source->samples[0]->timestamp;
            }

            //wait until the requested delay has been buffered before processing packets
            if (bufferTime >= source->bufferTime) {
                frameWait += delta;

                //if delay time was adjusted downward, remove packets accordingly
                bool bBufferTimeChanged = (curBufferTime != source->bufferTime);
                if (bBufferTimeChanged) {
                    if (curBufferTime > source->bufferTime) {
                        if (source->audioOut)
                            source->audioOut->FlushSamples();

                        LONGLONG lostTime = curBufferTime - source->bufferTime;
                        bufferTime -= lostTime;

                        if (source->samples.Num()) {
                            LONGLONG startTime = source->samples[0]->timestamp;

                            while (source->samples.Num()) {
                                SampleData *sample = source->samples[0];

                                if ((sample->timestamp - startTime) >= lostTime)
                                    break;

                                lastSampleTime = sample->timestamp;

                                sample->Release();
                                source->samples.Remove(0);
                            }
                        }
                    }

                    curBufferTime = source->bufferTime;
                }

                while (source->samples.Num()) {
                    SampleData *sample = source->samples[0];

                    LONGLONG timestamp = sample->timestamp;
                    LONGLONG sampleTime = timestamp - lastSampleTime;

                    //sometimes timestamps can go to shit with horrible garbage devices.
                    //so, bypass any unusual timestamp offsets.
                    if (sampleTime < -6000000 || sampleTime > 6000000) {
                        //OSDebugOut(TEXT("sample time: %lld\r\n"), sampleTime);
                        sampleTime = 0;
                    }

                    if (frameWait < sampleTime)
                        break;

                    if (sample->bAudio) {
                        if (source->audioOut)
                            source->audioOut->ReceiveAudio(sample->lpData, sample->dataLength);

                        sample->Release();
                    } else {
                        SafeRelease(source->latestVideoSample);
                        source->latestVideoSample = sample;
                    }

                    source->samples.Remove(0);

                    if (sampleTime > 0)
                        frameWait -= sampleTime;

                    lastSampleTime = timestamp;
                }
            }
        }

        OSLeaveMutex(hSampleMutex);

        if (!bFirstFrame && bufferTime < source->bufferTime)
            bufferTime += delta;
    }

    return 0;
}
Esempio n. 6
0
void OBSAPIInterface::HandleHotkeys()
{
    List<DWORD> hitKeys;

    DWORD modifiers = 0;
    if(GetAsyncKeyState(VK_MENU) & 0x8000)
        modifiers |= HOTKEYF_ALT;
    if(GetAsyncKeyState(VK_CONTROL) & 0x8000)
        modifiers |= HOTKEYF_CONTROL;
    if(GetAsyncKeyState(VK_SHIFT) & 0x8000)
        modifiers |= HOTKEYF_SHIFT;

    OSEnterMutex(App->hHotkeyMutex);

    for(UINT i=0; i<hotkeys.Num(); i++)
    {
        HotkeyInfo &info = hotkeys[i];

        DWORD hotkeyVK          = LOBYTE(info.hotkey);
        DWORD hotkeyModifiers   = HIBYTE(info.hotkey);

        hotkeyModifiers &= ~(HOTKEYF_EXT);

        bool bModifiersMatch = false;
        if (GlobalConfig->GetInt(TEXT("General"), TEXT("AllowOtherHotkeyModifiers"), true))
            bModifiersMatch = ((hotkeyModifiers & modifiers) == hotkeyModifiers); //allows other modifiers to be pressed
        else 
            bModifiersMatch = (hotkeyModifiers == modifiers);

        if(hotkeyModifiers && !hotkeyVK) //modifier-only hotkey
        {
            if((hotkeyModifiers & modifiers) == hotkeyModifiers)
            {
                if(!info.bHotkeyDown)
                {
                    PostMessage(hwndMain, OBS_CALLHOTKEY, TRUE, info.hotkeyID);
                    info.bDownSent = true;
                    info.bHotkeyDown = true;
                }

                continue;
            }
        }
        else
        {
            if(bModifiersMatch)
            {
                short keyState   = GetAsyncKeyState(hotkeyVK);
                bool bDown       = (keyState & 0x8000) != 0;
                bool bWasPressed = (keyState & 0x1) != 0;

                if(bDown || bWasPressed)
                {
                    if(!info.bHotkeyDown && info.bModifiersDown) //only triggers the hotkey if the actual main key was pressed second
                    {
                        PostMessage(hwndMain, OBS_CALLHOTKEY, TRUE, info.hotkeyID);
                        info.bDownSent = true;
                    }

                    info.bHotkeyDown = true;
                    if(bDown)
                        continue;
                }
            }
        }

        info.bModifiersDown = bModifiersMatch;

        if(info.bHotkeyDown) //key up
        {
            if(info.bDownSent)
            {
                PostMessage(hwndMain, OBS_CALLHOTKEY, FALSE, info.hotkeyID);
                info.bDownSent = false;
            }

            info.bHotkeyDown = false;
        }
    }

    OSLeaveMutex(App->hHotkeyMutex);
}
Esempio n. 7
0
File: API.cpp Progetto: Aslai/OBS
void OBSAPIInterface::HandleHotkeys()
{
    List<DWORD> hitKeys;

    static bool allow_other_hotkey_modifiers;
    static bool uplay_overlay_compatibility;
    static bool set_vars = false;

    /* only query these config variables once */
    if (!set_vars)
    {
        allow_other_hotkey_modifiers = !!GlobalConfig->GetInt(TEXT("General"), TEXT("AllowOtherHotkeyModifiers"), true);
        uplay_overlay_compatibility = !!GlobalConfig->GetInt(L"General", L"UplayOverlayCompatibility", false);
        set_vars = true;
    }

    DWORD modifiers = 0;
    if(GetAsyncKeyState(VK_MENU) & 0x8000)
        modifiers |= HOTKEYF_ALT;
    if(GetAsyncKeyState(VK_CONTROL) & 0x8000)
        modifiers |= HOTKEYF_CONTROL;
    if (!uplay_overlay_compatibility)
        if (GetAsyncKeyState(VK_SHIFT) & 0x8000)
            modifiers |= HOTKEYF_SHIFT;

    OSEnterMutex(App->hHotkeyMutex);

    for(UINT i=0; i<hotkeys.Num(); i++)
    {
        HotkeyInfo &info = hotkeys[i];

        DWORD hotkeyVK          = LOBYTE(info.hotkey);
        DWORD hotkeyModifiers   = HIBYTE(info.hotkey);
        DWORD xinputNum         = LOWORD(info.hotkey);
        DWORD xinputButton      = HIWORD(info.hotkey);

        hotkeyModifiers &= ~(HOTKEYF_EXT);

        if(xinputButton)
        {
            XINPUT_STATE state = { 0 };

            if(XInputGetState(xinputNum, &state) == ERROR_SUCCESS)
            {
                if(state.Gamepad.bLeftTrigger >= 85)
                    state.Gamepad.wButtons |= XINPUT_GAMEPAD_LEFT_TRIGGER;

                if(state.Gamepad.bRightTrigger >= 85)
                    state.Gamepad.wButtons |= XINPUT_GAMEPAD_RIGHT_TRIGGER;

                if((state.Gamepad.wButtons & xinputButton) != 0 && !info.bHotkeyDown)
                {
                    PostMessage(hwndMain, OBS_CALLHOTKEY, TRUE, info.hotkeyID);
                    info.bDownSent = true;
                    info.bHotkeyDown = true;
                }
            }

            info.bModifiersDown = 0;
        }
        else
        {
            bool bModifiersMatch = false;
            if(allow_other_hotkey_modifiers)
                bModifiersMatch = ((hotkeyModifiers & modifiers) == hotkeyModifiers); //allows other modifiers to be pressed
            else
                bModifiersMatch = (hotkeyModifiers == modifiers);

            if(hotkeyModifiers && !hotkeyVK) //modifier-only hotkey
            {
                if((hotkeyModifiers & modifiers) == hotkeyModifiers)
                {
                    if(!info.bHotkeyDown)
                    {
                        PostMessage(hwndMain, OBS_CALLHOTKEY, TRUE, info.hotkeyID);
                        info.bDownSent = true;
                        info.bHotkeyDown = true;
                    }

                    continue;
                }
            }
            else
            {
                if (bModifiersMatch && !(uplay_overlay_compatibility && hotkeyVK == VK_F2))
                {
                    short keyState = GetAsyncKeyState(hotkeyVK);
                    bool bDown = (keyState & 0x8000) != 0;
                    bool bWasPressed = (keyState & 0x1) != 0;

                    if(bDown || bWasPressed)
                    {
                        if(!info.bHotkeyDown && info.bModifiersDown) //only triggers the hotkey if the actual main key was pressed second
                        {
                            PostMessage(hwndMain, OBS_CALLHOTKEY, TRUE, info.hotkeyID);
                            info.bDownSent = true;
                        }

                        info.bHotkeyDown = true;
                        if(bDown)
                            continue;
                    }
                }
            }

            info.bModifiersDown = bModifiersMatch;
        }

        if(info.bHotkeyDown) //key up
        {
            if(info.bDownSent)
            {
                PostMessage(hwndMain, OBS_CALLHOTKEY, FALSE, info.hotkeyID);
                info.bDownSent = false;
            }

            info.bHotkeyDown = false;
        }
    }

    OSLeaveMutex(App->hHotkeyMutex);
}
Esempio n. 8
0
void RTMPPublisher::SocketLoop()
{
    bool canWrite = false;

    int delayTime;
    int latencyPacketSize;

    WSANETWORKEVENTS networkEvents;

    SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_ABOVE_NORMAL);

    WSAEventSelect(rtmp->m_sb.sb_socket, hWriteEvent, FD_READ|FD_WRITE|FD_CLOSE);

    //Low latency mode works by delaying delayTime ms between calls to send() and only sending
    //a buffer as large as latencyPacketSize at once. This causes keyframes and other data bursts
    //to be sent over several sends instead of one large one.
    if (lowLatencyMode == LL_MODE_AUTO)
    {
        //Auto mode aims for a constant rate of whatever the stream bitrate is and segments into
        //MTU sized packets (test packet captures indicated that despite nagling being enabled,
        //the size of the send() buffer is still important for some reason). Note that delays
        //become very short at this rate, and it can take a while for the buffer to empty after
        //a keyframe.
        delayTime = 1400.0f / (dataBufferSize / 1000.0f);
        latencyPacketSize = 1460;
    }
    else if (lowLatencyMode == LL_MODE_FIXED)
    {
        //We use latencyFactor - 2 to guarantee we're always sending at a slightly higher
        //rate than the maximum expected data rate so we don't get backed up
        latencyPacketSize = dataBufferSize / (latencyFactor - 2);
        delayTime = 1000 / latencyFactor;
    }
    else
    {
        latencyPacketSize = dataBufferSize;
        delayTime = 0;
    }

    SetupSendBacklogEvent ();

    HANDLE hObjects[3];

    hObjects[0] = hWriteEvent;
    hObjects[1] = hBufferEvent;
    hObjects[2] = hSendBacklogEvent;

    for (;;)
    {
        if (bStopping && WaitForSingleObject(hSocketLoopExit, 0) != WAIT_TIMEOUT)
        {
            OSEnterMutex(hDataBufferMutex);
            if (curDataBufferLen == 0)
            {
                //OSDebugOut (TEXT("Exiting on empty buffer.\n"));
                OSLeaveMutex(hDataBufferMutex);
                break;
            }

            //OSDebugOut (TEXT("Want to exit, but %d bytes remain.\n"), curDataBufferLen);
            OSLeaveMutex(hDataBufferMutex);
        }

        int status = WaitForMultipleObjects (3, hObjects, FALSE, INFINITE);
        if (status == WAIT_ABANDONED || status == WAIT_FAILED)
        {
            Log(TEXT("RTMPPublisher::SocketLoop: Aborting due to WaitForMultipleObjects failure"));
            App->PostStopMessage();
            return;
        }

        if (status == WAIT_OBJECT_0)
        {
            //Socket event
            if (WSAEnumNetworkEvents (rtmp->m_sb.sb_socket, NULL, &networkEvents))
            {
                Log(TEXT("RTMPPublisher::SocketLoop: Aborting due to WSAEnumNetworkEvents failure, %d"), WSAGetLastError());
                App->PostStopMessage();
                return;
            }

            if (networkEvents.lNetworkEvents & FD_WRITE)
                canWrite = true;

            if (networkEvents.lNetworkEvents & FD_CLOSE)
            {
                if (bStopping)
                    Log(TEXT("RTMPPublisher::SocketLoop: Aborting due to FD_CLOSE during shutdown, buffered data lost, error %d"), networkEvents.iErrorCode[FD_CLOSE_BIT]);
                else
                    Log(TEXT("RTMPPublisher::SocketLoop: Aborting due to FD_CLOSE, error %d"), networkEvents.iErrorCode[FD_CLOSE_BIT]);
                FatalSocketShutdown ();
                return;
            }

            if (networkEvents.lNetworkEvents & FD_READ)
            {
                BYTE discard[16384];
                int ret, errorCode;
                BOOL fatalError = FALSE;

                for (;;)
                {
                    ret = recv(rtmp->m_sb.sb_socket, (char *)discard, sizeof(discard), 0);
                    if (ret == -1)
                    {
                        errorCode = WSAGetLastError();

                        if (errorCode == WSAEWOULDBLOCK)
                            break;

                        fatalError = TRUE;
                    }
                    else if (ret == 0)
                    {
                        errorCode = 0;
                        fatalError = TRUE;
                    }

                    if (fatalError)
                    {
                        Log(TEXT("RTMPPublisher::SocketLoop: Socket error, recv() returned %d, GetLastError() %d"), ret, errorCode);
                        FatalSocketShutdown ();
                        return;
                    }
                }
            }
        }
        else if (status == WAIT_OBJECT_0 + 2)
        {
            //Ideal send backlog event
            ULONG idealSendBacklog;

            if (!idealsendbacklogquery(rtmp->m_sb.sb_socket, &idealSendBacklog))
            {
                int curTCPBufSize, curTCPBufSizeSize = sizeof(curTCPBufSize);
                getsockopt (rtmp->m_sb.sb_socket, SOL_SOCKET, SO_SNDBUF, (char *)&curTCPBufSize, &curTCPBufSizeSize);

                if (curTCPBufSize < (int)idealSendBacklog)
                {
                    int bufferSize = (int)idealSendBacklog;
                    setsockopt(rtmp->m_sb.sb_socket, SOL_SOCKET, SO_SNDBUF, (const char *)&bufferSize, sizeof(bufferSize));
                    Log(TEXT("RTMPPublisher::Socketloop: Increasing socket send buffer to ISB %d"), idealSendBacklog);
                }
            }

            SetupSendBacklogEvent ();
            continue;
        }
        
        if (canWrite)
        {
            bool exitLoop = false;
            do
            {
                OSEnterMutex(hDataBufferMutex);

                if (!curDataBufferLen)
                {
                    //this is now an expected occasional condition due to use of auto-reset events, we could end up emptying the buffer
                    //as it's filled in a previous loop cycle, especially if using low latency mode.
                    OSLeaveMutex(hDataBufferMutex);
                    //Log(TEXT("RTMPPublisher::SocketLoop: Trying to send, but no data available?!"));
                    break;
                }
                
                int ret;
                if (lowLatencyMode != LL_MODE_NONE)
                {
                    int sendLength = min (latencyPacketSize, curDataBufferLen);
                    ret = send(rtmp->m_sb.sb_socket, (const char *)dataBuffer, sendLength, 0);
                }
                else
                {
                    ret = send(rtmp->m_sb.sb_socket, (const char *)dataBuffer, curDataBufferLen, 0);
                }

                if (ret > 0)
                {
                    if (curDataBufferLen - ret)
                        memmove(dataBuffer, dataBuffer + ret, curDataBufferLen - ret);
                    curDataBufferLen -= ret;

                    bytesSent += ret;

                    SetEvent(hBufferSpaceAvailableEvent);
                }
                else
                {
                    int errorCode;
                    BOOL fatalError = FALSE;

                    if (ret == -1)
                    {
                        errorCode = WSAGetLastError();

                        if (errorCode == WSAEWOULDBLOCK)
                        {
                            canWrite = false;
                            OSLeaveMutex(hDataBufferMutex);
                            break;
                        }

                        fatalError = TRUE;
                    }
                    else if (ret == 0)
                    {
                        errorCode = 0;
                        fatalError = TRUE;
                    }

                    if (fatalError)
                    {
                        //connection closed, or connection was aborted / socket closed / etc, that's a fatal error for us.
                        Log(TEXT("RTMPPublisher::SocketLoop: Socket error, send() returned %d, GetLastError() %d"), ret, errorCode);
                        OSLeaveMutex(hDataBufferMutex);
                        FatalSocketShutdown ();
                        return;
                    }
                }

                //finish writing for now
                if (curDataBufferLen <= 1000)
                    exitLoop = true;

                OSLeaveMutex(hDataBufferMutex);

                if (delayTime)
                    Sleep (delayTime);
            } while (!exitLoop);
        }
    }

    Log(TEXT("RTMPPublisher::SocketLoop: Graceful loop exit"));
}
void DeviceAudioSource::OnAudioDeviceChanged(const String &MonitorDevices, const String &SecMonitor)
{
	if (this->MonitorDevices.Compare(MonitorDevices.Array()) && this->SecMonitor.Compare(SecMonitor.Array()))
	{
		return;
	}

	Log::writeMessage(LOG_RTSPSERV, 1, "%s invoke begin!", __FUNCTION__);
	OSEnterMutex(hAudioMutex);

	bSameDevice = false;

	if (m_pAudioWaveOut)
	{
		m_pAudioWaveOut->Uninitialize();
	}

	if (NULL == m_pAudioWaveOut)
	{
		if (!MonitorDevices.Compare(TEXT("停用")) && !m_pAudioWaveOut)
		{
			m_pAudioWaveOut = new AudioWaveOut;
		}
	}
	else
	{
		if ((MonitorDevices.Compare(TEXT("Disable")) || MonitorDevices.Compare(TEXT("停用"))))
		{
			delete m_pAudioWaveOut;
			m_pAudioWaveOut = NULL;
		}
	}

	if (NULL != m_pAudioWaveOut)
	{
		m_pAudioWaveOut->Initialize(MonitorDevices.Array(), 2, device->audioFormat.nSamplesPerSec, device->audioFormat.wBitsPerSample);
	}

	if (m_pSecWaveOut)
	{
		m_pSecWaveOut->Uninitialize();
	}

	if (SecMonitor.CompareI(MonitorDevices.Array()) && (!SecMonitor.Compare(TEXT("Disable")) || !SecMonitor.Compare(TEXT("停用"))))
	{
		bSameDevice = true;
	}
	else if (!SecMonitor.Compare(TEXT("停用")) && !m_pSecWaveOut)
	{
		m_pSecWaveOut = new AudioWaveOut;
	}
	else if (m_pSecWaveOut)
	{
		if ((SecMonitor.Compare(TEXT("Disable")) || SecMonitor.Compare(TEXT("停用"))))
		{
			delete m_pSecWaveOut;
			m_pSecWaveOut = NULL;
		}
	}

	if (m_pSecWaveOut)
	{
		m_pSecWaveOut->Initialize(SecMonitor.Array(), 2, device->audioFormat.nSamplesPerSec, device->audioFormat.wBitsPerSample);
	}

	OSLeaveMutex(hAudioMutex);

	this->MonitorDevices = MonitorDevices;
	this->SecMonitor = SecMonitor;

	Log::writeMessage(LOG_RTSPSERV, 1, "%s invoke end!", __FUNCTION__);
}
Esempio n. 10
0
File: API.cpp Progetto: Aslai/OBS
bool OBS::SetScene(CTSTR lpScene)
{
    if(bDisableSceneSwitching)
        return false;

    HWND hwndScenes = GetDlgItem(hwndMain, ID_SCENES);
    UINT curSel = (UINT)SendMessage(hwndScenes, LB_GETCURSEL, 0, 0);

    //-------------------------

    if(curSel != LB_ERR)
    {
        UINT textLen = (UINT)SendMessage(hwndScenes, LB_GETTEXTLEN, curSel, 0);

        String strLBName;
        strLBName.SetLength(textLen);

        SendMessage(hwndScenes, LB_GETTEXT, curSel, (LPARAM)strLBName.Array());
        if(!strLBName.CompareI(lpScene))
        {
            UINT id = (UINT)SendMessage(hwndScenes, LB_FINDSTRINGEXACT, -1, (LPARAM)lpScene);
            if(id == LB_ERR)
                return false;

            SendMessage(hwndScenes, LB_SETCURSEL, id, 0);
        }
    }
    else
    {
        UINT id = (UINT)SendMessage(hwndScenes, LB_FINDSTRINGEXACT, -1, (LPARAM)lpScene);
        if(id == LB_ERR)
            return false;

        SendMessage(hwndScenes, LB_SETCURSEL, id, 0);
    }

    //-------------------------

    XElement *scenes = scenesConfig.GetElement(TEXT("scenes"));
    XElement *newSceneElement = scenes->GetElement(lpScene);
    if(!newSceneElement)
        return false;

    if(sceneElement == newSceneElement)
        return true;

    sceneElement = newSceneElement;

    CTSTR lpClass = sceneElement->GetString(TEXT("class"));
    if(!lpClass)
    {
        AppWarning(TEXT("OBS::SetScene: no class found for scene '%s'"), newSceneElement->GetName());
        return false;
    }

    DWORD sceneChangeStartTime;

    if(bRunning)
    {
        Log(TEXT("++++++++++++++++++++++++++++++++++++++++++++++++++++++"));
        Log(TEXT("  New Scene"));

        sceneChangeStartTime = OSGetTime();
    }

    XElement *sceneData = newSceneElement->GetElement(TEXT("data"));

    //-------------------------

    Scene *newScene = NULL;
    if(bRunning)
        newScene = CreateScene(lpClass, sceneData);

    //-------------------------

    HWND hwndSources = GetDlgItem(hwndMain, ID_SOURCES);

    SendMessage(hwndSources, WM_SETREDRAW, (WPARAM)FALSE, (LPARAM) 0);

    App->scaleItem = NULL;

    bChangingSources = true;
    ListView_DeleteAllItems(hwndSources);

    bool bSkipTransition = !performTransition;

    XElement *sources = sceneElement->GetElement(TEXT("sources"));
    if(sources)
    {
        UINT numSources = sources->NumElements();
        ListView_SetItemCount(hwndSources, numSources);

        for(UINT i=0; i<numSources; i++)
        {
            XElement *sourceElement = sources->GetElementByID(i);
            String className = sourceElement->GetString(TEXT("class"));

            if(className == "DeviceCapture") {
                // There's a capture device in the next scene that isn't a global source,
                // so let's skip the transition since it won't work anyway.
                bSkipTransition = true;
            }
        }

        for(UINT i=0; i<numSources; i++)
        {
            XElement *sourceElement = sources->GetElementByID(i);
            bool render = sourceElement->GetInt(TEXT("render"), 1) > 0;

            InsertSourceItem(i, (LPWSTR)sourceElement->GetName(), render);

            // Do not add image sources yet in case we're skipping the transition.
            // This fixes the issue where capture devices sources that used the
            // same device as one in the previous scene would just go blank
            // after switching.
            if(bRunning && newScene && !bSkipTransition)
                newScene->AddImageSource(sourceElement);
        }
    }

    bChangingSources = false;
    SendMessage(hwndSources, WM_SETREDRAW, (WPARAM)TRUE, (LPARAM) 0);
    RedrawWindow(hwndSources, NULL, NULL, RDW_ERASE | RDW_FRAME | RDW_INVALIDATE | RDW_ALLCHILDREN);

    if(scene && newScene && newScene->HasMissingSources())
        OBSMessageBox(hwndMain, Str("Scene.MissingSources"), NULL, 0);

    if(bRunning)
    {
        //todo: cache scenes maybe?  undecided.  not really as necessary with global sources
        OSEnterMutex(hSceneMutex);

        UINT numSources;

        if (scene)
        {
            //shutdown previous scene, if any
            numSources = scene->sceneItems.Num();
            for(UINT i=0; i<numSources; i++)
            {
                XElement *source = scene->sceneItems[i]->GetElement();
                String className = source->GetString(TEXT("class"));
                if(scene->sceneItems[i]->bRender && className == "GlobalSource") {
                    XElement *globalSourceData = source->GetElement(TEXT("data"));
                    String globalSourceName = globalSourceData->GetString(TEXT("name"));
                    if(App->GetGlobalSource(globalSourceName) != NULL) {
                        App->GetGlobalSource(globalSourceName)->GlobalSourceLeaveScene();
                    }
                }
            }

            scene->EndScene();
        }

        Scene *previousScene = scene;
        scene = newScene;

        if(newScene && bSkipTransition) {
            // If we're skipping the transition because of a non-global
            // DirectShow device, delete the scene here and add the
            // ImageSources at this point instead.
            delete previousScene;

            if(sources)
            {
                UINT numSources = sources->NumElements();

                for(UINT i=0; i<numSources; i++)
                {
                    XElement *sourceElement = sources->GetElementByID(i);

                    if(newScene)
                        newScene->AddImageSource(sourceElement);
                }
            }
        }

        scene->BeginScene();

        numSources = scene->sceneItems.Num();
        for(UINT i=0; i<numSources; i++)
        {
            XElement *source = scene->sceneItems[i]->GetElement();

            String className = source->GetString(TEXT("class"));
            if(scene->sceneItems[i]->bRender && className == "GlobalSource") {
                XElement *globalSourceData = source->GetElement(TEXT("data"));
                String globalSourceName = globalSourceData->GetString(TEXT("name"));
                if(App->GetGlobalSource(globalSourceName) != NULL) {
                    App->GetGlobalSource(globalSourceName)->GlobalSourceEnterScene();
                }
            }
        }

        if(!bTransitioning && !bSkipTransition)
        {
            bTransitioning = true;
            transitionAlpha = 0.0f;
        }

        OSLeaveMutex(hSceneMutex);

        if(!bSkipTransition) {
            // Do not delete the previous scene here, since it has already
            // been deleted.
            delete previousScene;
        }

        DWORD sceneChangeTime = OSGetTime() - sceneChangeStartTime;
        if (sceneChangeTime >= 500)
            Log(TEXT("PERFORMANCE WARNING: Scene change took %u ms, maybe some sources should be global sources?"), sceneChangeTime);
    }

    if(API != NULL)
       ReportSwitchScenes(lpScene);

    return true;
}
void DeviceAudioSource::FlushSamples()
{
    OSEnterMutex(hAudioMutex);
    sampleBuffer.Clear();
    OSLeaveMutex(hAudioMutex);
}
void DeviceAudioSource::ReceiveAudio(LPBYTE lpData, UINT dataLength, float volume)
{
    if(lpData)
    {
		if (volume != 1.0f)
		{
			short *Tem = (short*)lpData;
			for (int i = 0; i < dataLength; i += 2)
			{
				long sVolume = Tem[i / 2];

				sVolume *= volume;

				if (sVolume > 0x7fff)
				{
					sVolume = 0x7fff;
				}
				else if (sVolume < -0x8000)
				{
					sVolume = -0x8000;
				}

				Tem[i / 2] = (short)sVolume;
			}
		}
		bool bPlayLive = false;
		if (bLiveInstance)
		{
			OSEnterMutex(hAudioMutex);
			sampleBuffer.AppendArray(lpData, dataLength);
			OSLeaveMutex(hAudioMutex);
			bPlayLive = m_bPlayPcmLive;
		}
		else
		{
			if (sampleBuffer.Num())
			{
				sampleBuffer.RemoveRange(0, sampleBuffer.Num());
			}
		}
		QWORD tmNow  = GetQPCMS();
		if (tmNow - m_timeStart > 200 && m_timeStart != 0)
		{
			if (!m_bErrorLog)
			{
				Log(L"111111 dshow送入时间间隔过大<%d> 缓冲大小%d,送入数据%d", tmNow - m_timeStart, sampleBuffer.Num(), dataLength);
				m_bErrorLog = true;
			}			
		}
		else
		{
			m_bErrorLog = false;
		}

		m_timeStart = tmNow;

		OSEnterMutex(hAudioMutex);
		int Len = dataLength;
		if (m_pAudioWaveOut && (m_bPlayPcmLocal || bPlayLive))
		{
			char *OutBuffer;
			CaculateVolume((LPVOID)lpData, Len, (void**)&OutBuffer);

			m_pAudioWaveOut->push_pcm_data((char*)OutBuffer, Len);

			if (!bSameDevice && bProjector && m_pSecWaveOut)
				m_pSecWaveOut->push_pcm_data((char*)OutBuffer, Len);

		}
		else if (bProjector)
		{
			char *OutBuffer;
			CaculateVolume((LPVOID)lpData, Len, (void**)&OutBuffer);

			if (bSameDevice && m_pAudioWaveOut)
			{
				m_pAudioWaveOut->push_pcm_data((char*)OutBuffer, Len);
			}
			else if (m_pSecWaveOut)
			{
				m_pSecWaveOut->push_pcm_data((char*)OutBuffer, Len);
			}
		}
		OSLeaveMutex(hAudioMutex);
    }
}
Esempio n. 13
0
void OBS::Start()
{
    if(bRunning) return;

    bStartingUp = true;

    OSEnterMutex (hStartupShutdownMutex);

    scenesConfig.Save();

    //-------------------------------------------------------------

    fps = AppConfig->GetInt(TEXT("Video"), TEXT("FPS"), 30);
    frameTime = 1000/fps;

    //-------------------------------------------------------------

    if(!bLoggedSystemStats)
    {
        LogSystemStats();
        bLoggedSystemStats = TRUE;
    }

    OSCheckForBuggyDLLs();

    //-------------------------------------------------------------
retryHookTest:
    bool alreadyWarnedAboutModules = false;
    if (OSIncompatibleModulesLoaded())
    {
        Log(TEXT("Incompatible modules (pre-D3D) detected."));
        int ret = MessageBox(hwndMain, Str("IncompatibleModules"), NULL, MB_ICONERROR | MB_ABORTRETRYIGNORE);
        if (ret == IDABORT)
        {
            OSLeaveMutex (hStartupShutdownMutex);
            bStartingUp = false;
            return;
        }
        else if (ret == IDRETRY)
        {
            goto retryHookTest;
        }

        alreadyWarnedAboutModules = true;
    }

    String strPatchesError;
    if (OSIncompatiblePatchesLoaded(strPatchesError))
    {
        OSLeaveMutex (hStartupShutdownMutex);
        MessageBox(hwndMain, strPatchesError.Array(), NULL, MB_ICONERROR);
        Log(TEXT("Incompatible patches detected."));
        bStartingUp = false;
        return;
    }

    //-------------------------------------------------------------

    String processPriority = AppConfig->GetString(TEXT("General"), TEXT("Priority"), TEXT("Normal"));
    if (!scmp(processPriority, TEXT("Idle")))
        SetPriorityClass(GetCurrentProcess(), IDLE_PRIORITY_CLASS);
    else if (!scmp(processPriority, TEXT("Above Normal")))
        SetPriorityClass(GetCurrentProcess(), ABOVE_NORMAL_PRIORITY_CLASS);
    else if (!scmp(processPriority, TEXT("High")))
        SetPriorityClass(GetCurrentProcess(), HIGH_PRIORITY_CLASS);

    int networkMode = AppConfig->GetInt(TEXT("Publish"), TEXT("Mode"), 2);
    DWORD delayTime = (DWORD)AppConfig->GetInt(TEXT("Publish"), TEXT("Delay"));

    String strError;

    bFirstConnect = !bReconnecting;

    if(bTestStream)
        network = CreateNullNetwork();
    else
    {
        switch(networkMode)
        {
        case 0: network = (delayTime > 0) ? CreateDelayedPublisher(delayTime) : CreateRTMPPublisher(); break;
        case 1: network = CreateNullNetwork(); break;
        }
    }

    if(!network)
    {
        OSLeaveMutex (hStartupShutdownMutex);

        if(!bReconnecting)
            MessageBox(hwndMain, strError, NULL, MB_ICONERROR);
        else
            DialogBox(hinstMain, MAKEINTRESOURCE(IDD_RECONNECTING), hwndMain, OBS::ReconnectDialogProc);
        bStartingUp = false;
        return;
    }

    bReconnecting = false;

    //-------------------------------------------------------------

    Log(TEXT("=====Stream Start: %s==============================================="), CurrentDateTimeString().Array());

    //-------------------------------------------------------------

    bEnableProjectorCursor = GlobalConfig->GetInt(L"General", L"EnableProjectorCursor", 1) != 0;
    bPleaseEnableProjector = bPleaseDisableProjector = false;

    int monitorID = AppConfig->GetInt(TEXT("Video"), TEXT("Monitor"));
    if(monitorID >= (int)monitors.Num())
        monitorID = 0;

    RECT &screenRect = monitors[monitorID].rect;
    int defCX = screenRect.right  - screenRect.left;
    int defCY = screenRect.bottom - screenRect.top;

    downscaleType = AppConfig->GetInt(TEXT("Video"), TEXT("Filter"), 0);
    downscale = AppConfig->GetFloat(TEXT("Video"), TEXT("Downscale"), 1.0f);
    baseCX = AppConfig->GetInt(TEXT("Video"), TEXT("BaseWidth"),  defCX);
    baseCY = AppConfig->GetInt(TEXT("Video"), TEXT("BaseHeight"), defCY);

    baseCX = MIN(MAX(baseCX, 128), 4096);
    baseCY = MIN(MAX(baseCY, 128), 4096);

    scaleCX = UINT(double(baseCX) / double(downscale));
    scaleCY = UINT(double(baseCY) / double(downscale));

    //align width to 128bit for fast SSE YUV4:2:0 conversion
    outputCX = scaleCX & 0xFFFFFFFC;
    outputCY = scaleCY & 0xFFFFFFFE;

    bUseMultithreadedOptimizations = AppConfig->GetInt(TEXT("General"), TEXT("UseMultithreadedOptimizations"), TRUE) != 0;
    Log(TEXT("  Multithreaded optimizations: %s"), (CTSTR)(bUseMultithreadedOptimizations ? TEXT("On") : TEXT("Off")));

    encoderSkipThreshold = GlobalConfig->GetInt(TEXT("Video"), TEXT("EncoderSkipThreshold"), fps/4);

    //------------------------------------------------------------------

    Log(TEXT("  Base resolution: %ux%u"), baseCX, baseCY);
    Log(TEXT("  Output resolution: %ux%u"), outputCX, outputCY);
    Log(TEXT("------------------------------------------"));

    //------------------------------------------------------------------

    GS = new D3D10System;
    GS->Init();

    //Thanks to ASUS OSD hooking the goddamn user mode driver framework (!!!!), we have to re-check for dangerous
    //hooks after initializing D3D.
retryHookTestV2:
    if (!alreadyWarnedAboutModules)
    {
        if (OSIncompatibleModulesLoaded())
        {
            Log(TEXT("Incompatible modules (post-D3D) detected."));
            int ret = MessageBox(hwndMain, Str("IncompatibleModules"), NULL, MB_ICONERROR | MB_ABORTRETRYIGNORE);
            if (ret == IDABORT)
            {
                //FIXME: really need a better way to abort startup than this...
                delete network;
                delete GS;

                OSLeaveMutex (hStartupShutdownMutex);
                bStartingUp = false;
                return;
            }
            else if (ret == IDRETRY)
            {
                goto retryHookTestV2;
            }
        }
    }

    //-------------------------------------------------------------

    mainVertexShader    = CreateVertexShaderFromFile(TEXT("shaders/DrawTexture.vShader"));
    mainPixelShader     = CreatePixelShaderFromFile(TEXT("shaders/DrawTexture.pShader"));

    solidVertexShader   = CreateVertexShaderFromFile(TEXT("shaders/DrawSolid.vShader"));
    solidPixelShader    = CreatePixelShaderFromFile(TEXT("shaders/DrawSolid.pShader"));

    if(!mainVertexShader || !mainPixelShader)
        CrashError(TEXT("Unable to load DrawTexture shaders"));

    if(!solidVertexShader || !solidPixelShader)
        CrashError(TEXT("Unable to load DrawSolid shaders"));

    //------------------------------------------------------------------

    CTSTR lpShader;
    if(CloseFloat(downscale, 1.0))
        lpShader = TEXT("shaders/DrawYUVTexture.pShader");
    else if(downscale < 2.01)
    {
        switch(downscaleType)
        {
            case 0: lpShader = TEXT("shaders/DownscaleBilinear1YUV.pShader"); break;
            case 1: lpShader = TEXT("shaders/DownscaleBicubicYUV.pShader"); break;
            case 2: lpShader = TEXT("shaders/DownscaleLanczos6tapYUV.pShader"); break;
        }
    }
    else if(downscale < 3.01)
        lpShader = TEXT("shaders/DownscaleBilinear9YUV.pShader");
    else
        CrashError(TEXT("Invalid downscale value (must be either 1.0, 1.5, 2.0, 2.25, or 3.0)"));

    yuvScalePixelShader = CreatePixelShaderFromFile(lpShader);
    if (!yuvScalePixelShader)
        CrashError(TEXT("Unable to create shader from file %s"), lpShader);

    //-------------------------------------------------------------

    for(UINT i=0; i<NUM_RENDER_BUFFERS; i++)
    {
        mainRenderTextures[i] = CreateRenderTarget(baseCX, baseCY, GS_BGRA, FALSE);
        yuvRenderTextures[i]  = CreateRenderTarget(outputCX, outputCY, GS_BGRA, FALSE);
    }

    //-------------------------------------------------------------

    D3D10_TEXTURE2D_DESC td;
    zero(&td, sizeof(td));
    td.Width            = outputCX;
    td.Height           = outputCY;
    td.Format           = DXGI_FORMAT_B8G8R8A8_UNORM;
    td.MipLevels        = 1;
    td.ArraySize        = 1;
    td.SampleDesc.Count = 1;
    td.ArraySize        = 1;
    td.Usage            = D3D10_USAGE_STAGING;
    td.CPUAccessFlags   = D3D10_CPU_ACCESS_READ;

    for(UINT i=0; i<NUM_RENDER_BUFFERS; i++)
    {
        HRESULT err = GetD3D()->CreateTexture2D(&td, NULL, &copyTextures[i]);
        if(FAILED(err))
        {
            CrashError(TEXT("Unable to create copy texture"));
            //todo - better error handling
        }
    }

    //------------------------------------------------------------------

    String strEncoder = AppConfig->GetString(TEXT("Audio Encoding"), TEXT("Codec"), TEXT("AAC"));
    BOOL isAAC = strEncoder.CompareI(TEXT("AAC"));

    UINT format = AppConfig->GetInt(L"Audio Encoding", L"Format", 1);

    if (!isAAC)
        format = 0;

    switch (format) {
    case 0: sampleRateHz = 44100; break;
    default:
    case 1: sampleRateHz = 48000; break;
    }

    Log(L"------------------------------------------");
    Log(L"Audio Format: %uhz", sampleRateHz);

    //------------------------------------------------------------------

    AudioDeviceList playbackDevices;
    bool useInputDevices = AppConfig->GetInt(L"Audio", L"UseInputDevices", false) != 0;
    GetAudioDevices(playbackDevices, useInputDevices ? ADT_RECORDING : ADT_PLAYBACK);

    String strPlaybackDevice = AppConfig->GetString(TEXT("Audio"), TEXT("PlaybackDevice"), TEXT("Default"));
    if(strPlaybackDevice.IsEmpty() || !playbackDevices.HasID(strPlaybackDevice))
    {
        AppConfig->SetString(TEXT("Audio"), TEXT("PlaybackDevice"), TEXT("Default"));
        strPlaybackDevice = TEXT("Default");
    }

    Log(TEXT("Playback device %s"), strPlaybackDevice.Array());
    playbackDevices.FreeData();

    desktopAudio = CreateAudioSource(false, strPlaybackDevice);

    if(!desktopAudio) {
        CrashError(TEXT("Cannot initialize desktop audio sound, more info in the log file."));
    }

    AudioDeviceList audioDevices;
    GetAudioDevices(audioDevices, ADT_RECORDING, false, true);

    String strDevice = AppConfig->GetString(TEXT("Audio"), TEXT("Device"), NULL);
    if(strDevice.IsEmpty() || !audioDevices.HasID(strDevice))
    {
        AppConfig->SetString(TEXT("Audio"), TEXT("Device"), TEXT("Disable"));
        strDevice = TEXT("Disable");
    }

    audioDevices.FreeData();

    String strDefaultMic;
    bool bHasDefault = GetDefaultMicID(strDefaultMic);

    if(strDevice.CompareI(TEXT("Disable")))
        EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), FALSE);
    else
    {
        bool bUseDefault = strDevice.CompareI(TEXT("Default")) != 0;
        if(!bUseDefault || bHasDefault)
        {
            if(bUseDefault)
                strDevice = strDefaultMic;

            micAudio = CreateAudioSource(true, strDevice);

            if(!micAudio)
                MessageBox(hwndMain, Str("MicrophoneFailure"), NULL, 0);
            else
                micAudio->SetTimeOffset(AppConfig->GetInt(TEXT("Audio"), TEXT("MicTimeOffset"), 0));

            EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), micAudio != NULL);
        }
        else
            EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), FALSE);
    }

    //-------------------------------------------------------------

    bool bDisableEncoding = false;

    if (bTestStream)
        bDisableEncoding = GlobalConfig->GetInt(TEXT("General"), TEXT("DisablePreviewEncoding"), false) != 0;

    //-------------------------------------------------------------

    UINT bitRate = (UINT)AppConfig->GetInt(TEXT("Audio Encoding"), TEXT("Bitrate"), 96);

    if (bDisableEncoding)
        audioEncoder = CreateNullAudioEncoder();
    else
#ifdef USE_AAC
    if(isAAC) // && OSGetVersion() >= 7)
        audioEncoder = CreateAACEncoder(bitRate);
    else
#endif
        audioEncoder = CreateMP3Encoder(bitRate);

    //-------------------------------------------------------------

    desktopVol = AppConfig->GetFloat(TEXT("Audio"), TEXT("DesktopVolume"), 1.0f);
    micVol     = AppConfig->GetFloat(TEXT("Audio"), TEXT("MicVolume"),     1.0f);

    //-------------------------------------------------------------

    bRunning = true;

    if(sceneElement)
    {
        scene = CreateScene(sceneElement->GetString(TEXT("class")), sceneElement->GetElement(TEXT("data")));
        XElement *sources = sceneElement->GetElement(TEXT("sources"));
        if(sources)
        {
            UINT numSources = sources->NumElements();
            for(UINT i=0; i<numSources; i++)
            {
                SceneItem *item = scene->AddImageSource(sources->GetElementByID(i));
                if(item)
                {
                    if(ListView_GetItemState(GetDlgItem(hwndMain, ID_SOURCES), i, LVIS_SELECTED) > 0)
                        item->Select(true);
                }
            }
        }

        scene->BeginScene();
        unsigned int numSources = scene->sceneItems.Num();
        for(UINT i=0; i<numSources; i++)
        {
            XElement *source = scene->sceneItems[i]->GetElement();

            String className = source->GetString(TEXT("class"));
            if(scene->sceneItems[i]->bRender && className == "GlobalSource") {
                XElement *globalSourceData = source->GetElement(TEXT("data"));
                String globalSourceName = globalSourceData->GetString(TEXT("name"));
                if(App->GetGlobalSource(globalSourceName) != NULL) {
                    App->GetGlobalSource(globalSourceName)->GlobalSourceEnterScene();
                }
            }
        }
    }

    if(scene && scene->HasMissingSources())
        MessageBox(hwndMain, Str("Scene.MissingSources"), NULL, 0);

    //-------------------------------------------------------------

    int maxBitRate = AppConfig->GetInt   (TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int bufferSize = AppConfig->GetInt   (TEXT("Video Encoding"), TEXT("BufferSize"), 1000);
    int quality    = AppConfig->GetInt   (TEXT("Video Encoding"), TEXT("Quality"),    8);
    String preset  = AppConfig->GetString(TEXT("Video Encoding"), TEXT("Preset"),     TEXT("veryfast"));
    bUsing444      = false;//AppConfig->GetInt   (TEXT("Video Encoding"), TEXT("Use444"),     0) != 0;
    bUseCFR        = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseCFR"), 1) != 0;

    //-------------------------------------------------------------

    bufferingTime = GlobalConfig->GetInt(TEXT("General"), TEXT("SceneBufferingTime"), 700);
    Log(TEXT("Scene buffering time set to %u"), bufferingTime);

    //-------------------------------------------------------------

    bForceMicMono = AppConfig->GetInt(TEXT("Audio"), TEXT("ForceMicMono")) != 0;
    bRecievedFirstAudioFrame = false;

    //hRequestAudioEvent = CreateSemaphore(NULL, 0, 0x7FFFFFFFL, NULL);
    hSoundDataMutex = OSCreateMutex();
    hSoundThread = OSCreateThread((XTHREAD)OBS::MainAudioThread, NULL);

    //-------------------------------------------------------------

    if (!useInputDevices)
        StartBlankSoundPlayback(strPlaybackDevice);

    //-------------------------------------------------------------

    colorDesc.fullRange = false;
    colorDesc.primaries = ColorPrimaries_BT709;
    colorDesc.transfer  = ColorTransfer_IEC6196621;
    colorDesc.matrix    = outputCX >= 1280 || outputCY > 576 ? ColorMatrix_BT709 : ColorMatrix_SMPTE170M;

    videoEncoder = nullptr;
    if (bDisableEncoding)
        videoEncoder = CreateNullVideoEncoder();
    else if(AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0)
        videoEncoder = CreateQSVEncoder(fps, outputCX, outputCY, quality, preset, bUsing444, colorDesc, maxBitRate, bufferSize, bUseCFR);
    else if(CheckNVENCHardwareSupport(true) && AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseNVENC")) != 0)
        videoEncoder = CreateNVENCEncoder(fps, outputCX, outputCY, quality, preset, bUsing444, colorDesc, maxBitRate, bufferSize, bUseCFR);

    if(!videoEncoder)
        videoEncoder = CreateX264Encoder(fps, outputCX, outputCY, quality, preset, bUsing444, colorDesc, maxBitRate, bufferSize, bUseCFR);


    //-------------------------------------------------------------

    // Ensure that the render frame is properly sized
    ResizeRenderFrame(true);

    //-------------------------------------------------------------

    StartRecording();

    //-------------------------------------------------------------

    curFramePic = NULL;
    bShutdownVideoThread = false;
    bShutdownEncodeThread = false;
    //ResetEvent(hVideoThread);
    hEncodeThread = OSCreateThread((XTHREAD)OBS::EncodeThread, NULL);
    hVideoThread = OSCreateThread((XTHREAD)OBS::MainCaptureThread, NULL);

    if(bTestStream)
    {
        EnableWindow(GetDlgItem(hwndMain, ID_STARTSTOP), FALSE);
        SetWindowText(GetDlgItem(hwndMain, ID_TESTSTREAM), Str("MainWindow.StopTest"));
    }
    else
    {
        EnableWindow(GetDlgItem(hwndMain, ID_TESTSTREAM), FALSE);
        SetWindowText(GetDlgItem(hwndMain, ID_STARTSTOP), Str("MainWindow.StopStream"));
    }

    EnableWindow(GetDlgItem(hwndMain, ID_SCENEEDITOR), TRUE);

    //-------------------------------------------------------------

    ReportStartStreamTrigger();
    
    SystemParametersInfo(SPI_SETSCREENSAVEACTIVE, 0, 0, 0);
    SetThreadExecutionState(ES_CONTINUOUS | ES_SYSTEM_REQUIRED | ES_AWAYMODE_REQUIRED | ES_DISPLAY_REQUIRED);

    UpdateRenderViewMessage();

    //update notification icon to reflect current status
    UpdateNotificationAreaIcon();

    OSLeaveMutex (hStartupShutdownMutex);

    bStartingUp = false;
}
Esempio n. 14
0
void RTMPPublisher::SendLoop()
{
    SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_ABOVE_NORMAL);
    while(WaitForSingleObject(hSendSempahore, INFINITE) == WAIT_OBJECT_0)
    {
        while(true)
        {
            OSEnterMutex(hDataMutex);
            if(queuedPackets.Num() == 0)
            {
                OSLeaveMutex(hDataMutex);
                break;
            }

            List<BYTE> packetData;
            PacketType type       = queuedPackets[0].type;
            DWORD      timestamp  = queuedPackets[0].timestamp;
            packetData.TransferFrom(queuedPackets[0].data);

            currentBufferSize -= packetData.Num();

            queuedPackets.Remove(0);

            OSLeaveMutex(hDataMutex);

            //--------------------------------------------

            RTMPPacket packet;
            packet.m_nChannel = (type == PacketType_Audio) ? 0x5 : 0x4;
            packet.m_headerType = RTMP_PACKET_SIZE_MEDIUM;
            packet.m_packetType = (type == PacketType_Audio) ? RTMP_PACKET_TYPE_AUDIO : RTMP_PACKET_TYPE_VIDEO;
            packet.m_nTimeStamp = timestamp;
            packet.m_nInfoField2 = rtmp->m_stream_id;
            packet.m_hasAbsTimestamp = TRUE;

            packet.m_nBodySize = packetData.Num()-RTMP_MAX_HEADER_SIZE;
            packet.m_body = (char*)packetData.Array()+RTMP_MAX_HEADER_SIZE;

            //QWORD sendTimeStart = OSGetTimeMicroseconds();
            if(!RTMP_SendPacket(rtmp, &packet, FALSE))
            {
                //should never reach here with the new shutdown sequence.
                RUNONCE Log(TEXT("RTMP_SendPacket failure, should not happen!"));
                if(!RTMP_IsConnected(rtmp))
                {
                    App->PostStopMessage();
                    break;
                }
            }

            //----------------------------------------------------------

            /*outputRateSize += packetData.Num();
            packetSizeRecord << PacketTimeSize(timestamp, packetData.Num());
            if(packetSizeRecord.Num())
            {
                UINT packetID=0;
                for(; packetID<packetSizeRecord.Num(); packetID++)
                {
                    if(timestamp-packetSizeRecord[packetID].timestamp < outputRateWindowTime)
                        break;
                    else
                        outputRateSize -= packetSizeRecord[packetID].size;
                }

                if(packetID != 0)
                    packetSizeRecord.RemoveRange(0, packetID);
            }*/

            //bytesSent += packetData.Num();
        }

        if (bStopping && WaitForSingleObject(hSendLoopExit, 0) == WAIT_OBJECT_0)
            return;
    }
}
Esempio n. 15
0
bool OBS::SetScene(CTSTR lpScene)
{
    if(bDisableSceneSwitching)
        return false;

    HWND hwndScenes = GetDlgItem(hwndMain, ID_SCENES);
    UINT curSel = (UINT)SendMessage(hwndScenes, LB_GETCURSEL, 0, 0);

    //-------------------------

    if(curSel != LB_ERR)
    {
        UINT textLen = (UINT)SendMessage(hwndScenes, LB_GETTEXTLEN, curSel, 0);

        String strLBName;
        strLBName.SetLength(textLen);

        SendMessage(hwndScenes, LB_GETTEXT, curSel, (LPARAM)strLBName.Array());
        if(!strLBName.CompareI(lpScene))
        {
            UINT id = (UINT)SendMessage(hwndScenes, LB_FINDSTRINGEXACT, -1, (LPARAM)lpScene);
            if(id == LB_ERR)
                return false;

            SendMessage(hwndScenes, LB_SETCURSEL, id, 0);
        }
    }
    else
    {
        UINT id = (UINT)SendMessage(hwndScenes, LB_FINDSTRINGEXACT, -1, (LPARAM)lpScene);
        if(id == LB_ERR)
            return false;

        SendMessage(hwndScenes, LB_SETCURSEL, id, 0);
    }

    //-------------------------

    XElement *scenes = scenesConfig.GetElement(TEXT("scenes"));
    XElement *newSceneElement = scenes->GetElement(lpScene);
    if(!newSceneElement)
        return false;

    if(sceneElement == newSceneElement)
        return true;

    sceneElement = newSceneElement;

    CTSTR lpClass = sceneElement->GetString(TEXT("class"));
    if(!lpClass)
    {
        AppWarning(TEXT("OBS::SetScene: no class found for scene '%s'"), newSceneElement->GetName());
        return false;
    }

    if(bRunning)
    {
        Log(TEXT("++++++++++++++++++++++++++++++++++++++++++++++++++++++"));
        Log(TEXT("  New Scene"));
    }

    XElement *sceneData = newSceneElement->GetElement(TEXT("data"));

    //-------------------------

    Scene *newScene = NULL;
    if(bRunning)
        newScene = CreateScene(lpClass, sceneData);

    //-------------------------

    HWND hwndSources = GetDlgItem(hwndMain, ID_SOURCES);
    bChangingSources = true;
    ListView_DeleteAllItems(hwndSources);
    
    XElement *sources = sceneElement->GetElement(TEXT("sources"));
    if(sources)
    {
        UINT numSources = sources->NumElements();
        ListView_SetItemCount(hwndSources, numSources);

        for(UINT i=0; i<numSources; i++)
        {
            XElement *sourceElement = sources->GetElementByID(i);
            bool render = sourceElement->GetInt(TEXT("render"), 1) > 0;
            
            
            AddSourceItem((LPWSTR)sourceElement->GetName(), render, i);
            
           
            if(bRunning && newScene)
                newScene->AddImageSource(sourceElement);
        }
    }
    bChangingSources = false;

    if(scene && newScene->HasMissingSources())
        MessageBox(hwndMain, Str("Scene.MissingSources"), NULL, 0);

    if(bRunning)
    {
        //todo: cache scenes maybe?  undecided.  not really as necessary with global sources
        OSEnterMutex(hSceneMutex);

        if(scene)
            scene->EndScene();

        Scene *previousScene = scene;
        scene = newScene;

        scene->BeginScene();

        if(!bTransitioning)
        {
            bTransitioning = true;
            transitionAlpha = 0.0f;
        }

        OSLeaveMutex(hSceneMutex);

        delete previousScene;
    }

    return true;
}
Esempio n. 16
0
void RTMPPublisher::SendPacketForReal(BYTE *data, UINT size, DWORD timestamp, PacketType type)
{
    //OSDebugOut (TEXT("SendPacketForReal (%08x)\n"), quickHash(data,size));
    //Log(TEXT("packet| timestamp: %u, type: %u, bytes: %u"), timestamp, (UINT)type, size);

    OSEnterMutex(hDataMutex);

    if(bConnected)
    {
        ProcessPackets();

        bool bSend = bSentFirstKeyframe;

        if(!bSentFirstKeyframe)
        {
            if(type == PacketType_VideoHighest)
            {
                bSend = true;

                OSDebugOut(TEXT("got keyframe: %u\r\n"), OSGetTime());
            }
        }

        if(bSend)
        {
            if(!bSentFirstAudio && type == PacketType_Audio)
            {
                timestamp = 0;
                bSentFirstAudio = true;
            }

            totalFrames++;
            if(type != PacketType_Audio)
                totalVideoFrames++;

            bool bAddPacket = false;
            if(type >= packetWaitType)
            {
                if(type != PacketType_Audio)
                    packetWaitType = PacketType_VideoDisposable;

                bAddPacket = true;
            }

            if(bAddPacket)
            {
                List<BYTE> paddedData;
                paddedData.SetSize(size+RTMP_MAX_HEADER_SIZE);
                mcpy(paddedData.Array()+RTMP_MAX_HEADER_SIZE, data, size);

                if(!bSentFirstKeyframe)
                {
                    DataPacket sei;
                    App->GetVideoEncoder()->GetSEI(sei);
                    paddedData.InsertArray(RTMP_MAX_HEADER_SIZE+5, sei.lpPacket, sei.size);

                    bSentFirstKeyframe = true;
                }

                currentBufferSize += paddedData.Num();

                UINT droppedFrameVal = queuedPackets.Num() ? queuedPackets.Last().distanceFromDroppedFrame+1 : 10000;

                UINT id = FindClosestQueueIndex(timestamp);

                NetworkPacket *queuedPacket = queuedPackets.InsertNew(id);
                queuedPacket->distanceFromDroppedFrame = droppedFrameVal;
                queuedPacket->data.TransferFrom(paddedData);
                queuedPacket->timestamp = timestamp;
                queuedPacket->type = type;
            }
            else
            {
                if(type < PacketType_VideoHigh)
                    numBFramesDumped++;
                else
                    numPFramesDumped++;
            }
        }
    }

    OSLeaveMutex(hDataMutex);
}
Esempio n. 17
0
void OBS::Start()
{
    if(bRunning) return;

    OSEnterMutex (hStartupShutdownMutex);

    scenesConfig.Save();

    //-------------------------------------------------------------

    fps = AppConfig->GetInt(TEXT("Video"), TEXT("FPS"), 30);
    frameTime = 1000/fps;

    //-------------------------------------------------------------

    if(!bLoggedSystemStats)
    {
        LogSystemStats();
        bLoggedSystemStats = TRUE;
    }

    //-------------------------------------------------------------
retryHookTest:
    bool alreadyWarnedAboutModules = false;
    if (OSIncompatibleModulesLoaded())
    {
        Log(TEXT("Incompatible modules (pre-D3D) detected."));
        int ret = MessageBox(hwndMain, Str("IncompatibleModules"), NULL, MB_ICONERROR | MB_ABORTRETRYIGNORE);
        if (ret == IDABORT)
        {
            OSLeaveMutex (hStartupShutdownMutex);
            return;
        }
        else if (ret == IDRETRY)
        {
            goto retryHookTest;
        }

        alreadyWarnedAboutModules = true;
    }

    String strPatchesError;
    if (OSIncompatiblePatchesLoaded(strPatchesError))
    {
        OSLeaveMutex (hStartupShutdownMutex);
        MessageBox(hwndMain, strPatchesError.Array(), NULL, MB_ICONERROR);
        Log(TEXT("Incompatible patches detected."));
        return;
    }

    //-------------------------------------------------------------

    String processPriority = AppConfig->GetString(TEXT("General"), TEXT("Priority"), TEXT("Normal"));
    if (!scmp(processPriority, TEXT("Idle")))
        SetPriorityClass(GetCurrentProcess(), IDLE_PRIORITY_CLASS);
    else if (!scmp(processPriority, TEXT("Above Normal")))
        SetPriorityClass(GetCurrentProcess(), ABOVE_NORMAL_PRIORITY_CLASS);
    else if (!scmp(processPriority, TEXT("High")))
        SetPriorityClass(GetCurrentProcess(), HIGH_PRIORITY_CLASS);

    int networkMode = AppConfig->GetInt(TEXT("Publish"), TEXT("Mode"), 2);
    DWORD delayTime = (DWORD)AppConfig->GetInt(TEXT("Publish"), TEXT("Delay"));

    String strError;

    bFirstConnect = !bReconnecting;

    if(bTestStream)
        network = CreateNullNetwork();
    else
    {
        switch(networkMode)
        {
        case 0: network = (delayTime > 0) ? CreateDelayedPublisher(delayTime) : CreateRTMPPublisher(); break;
        case 1: network = CreateNullNetwork(); break;
        }
    }

    if(!network)
    {
        OSLeaveMutex (hStartupShutdownMutex);

        if(!bReconnecting)
            MessageBox(hwndMain, strError, NULL, MB_ICONERROR);
        else
            DialogBox(hinstMain, MAKEINTRESOURCE(IDD_RECONNECTING), hwndMain, OBS::ReconnectDialogProc);
        return;
    }

    bReconnecting = false;

    //-------------------------------------------------------------

    Log(TEXT("=====Stream Start: %s==============================================="), CurrentDateTimeString().Array());

    //-------------------------------------------------------------

    int monitorID = AppConfig->GetInt(TEXT("Video"), TEXT("Monitor"));
    if(monitorID >= (int)monitors.Num())
        monitorID = 0;

    RECT &screenRect = monitors[monitorID].rect;
    int defCX = screenRect.right  - screenRect.left;
    int defCY = screenRect.bottom - screenRect.top;

    downscaleType = AppConfig->GetInt(TEXT("Video"), TEXT("Filter"), 0);
    downscale = AppConfig->GetFloat(TEXT("Video"), TEXT("Downscale"), 1.0f);
    baseCX = AppConfig->GetInt(TEXT("Video"), TEXT("BaseWidth"),  defCX);
    baseCY = AppConfig->GetInt(TEXT("Video"), TEXT("BaseHeight"), defCY);

    baseCX = MIN(MAX(baseCX, 128), 4096);
    baseCY = MIN(MAX(baseCY, 128), 4096);

    scaleCX = UINT(double(baseCX) / double(downscale));
    scaleCY = UINT(double(baseCY) / double(downscale));

    //align width to 128bit for fast SSE YUV4:2:0 conversion
    outputCX = scaleCX & 0xFFFFFFFC;
    outputCY = scaleCY & 0xFFFFFFFE;

    bUseMultithreadedOptimizations = AppConfig->GetInt(TEXT("General"), TEXT("UseMultithreadedOptimizations"), TRUE) != 0;
    Log(TEXT("  Multithreaded optimizations: %s"), (CTSTR)(bUseMultithreadedOptimizations ? TEXT("On") : TEXT("Off")));

    //------------------------------------------------------------------

    Log(TEXT("  Base resolution: %ux%u"), baseCX, baseCY);
    Log(TEXT("  Output resolution: %ux%u"), outputCX, outputCY);
    Log(TEXT("------------------------------------------"));

    //------------------------------------------------------------------

    GS = new D3D10System;
    GS->Init();

    //Thanks to ASUS OSD hooking the goddamn user mode driver framework (!!!!), we have to re-check for dangerous
    //hooks after initializing D3D.
retryHookTestV2:
    if (!alreadyWarnedAboutModules)
    {
        if (OSIncompatibleModulesLoaded())
        {
            Log(TEXT("Incompatible modules (post-D3D) detected."));
            int ret = MessageBox(hwndMain, Str("IncompatibleModules"), NULL, MB_ICONERROR | MB_ABORTRETRYIGNORE);
            if (ret == IDABORT)
            {
                //FIXME: really need a better way to abort startup than this...
                delete network;
                delete GS;

                OSLeaveMutex (hStartupShutdownMutex);
                return;
            }
            else if (ret == IDRETRY)
            {
                goto retryHookTestV2;
            }
        }
    }

    //-------------------------------------------------------------

    mainVertexShader    = CreateVertexShaderFromFile(TEXT("shaders/DrawTexture.vShader"));
    mainPixelShader     = CreatePixelShaderFromFile(TEXT("shaders/DrawTexture.pShader"));

    solidVertexShader   = CreateVertexShaderFromFile(TEXT("shaders/DrawSolid.vShader"));
    solidPixelShader    = CreatePixelShaderFromFile(TEXT("shaders/DrawSolid.pShader"));

    //------------------------------------------------------------------

    CTSTR lpShader;
    if(CloseFloat(downscale, 1.0))
        lpShader = TEXT("shaders/DrawYUVTexture.pShader");
    else if(downscale < 2.01)
    {
        switch(downscaleType)
        {
            case 0: lpShader = TEXT("shaders/DownscaleBilinear1YUV.pShader"); break;
            case 1: lpShader = TEXT("shaders/DownscaleBicubicYUV.pShader"); break;
            case 2: lpShader = TEXT("shaders/DownscaleLanczos6tapYUV.pShader"); break;
        }
    }
    else if(downscale < 3.01)
        lpShader = TEXT("shaders/DownscaleBilinear9YUV.pShader");
    else
        CrashError(TEXT("Invalid downscale value (must be either 1.0, 1.5, 2.0, 2.25, or 3.0)"));

    yuvScalePixelShader = CreatePixelShaderFromFile(lpShader);
    if (!yuvScalePixelShader)
        CrashError(TEXT("Unable to create shader from file %s"), lpShader);

    //-------------------------------------------------------------

    for(UINT i=0; i<NUM_RENDER_BUFFERS; i++)
    {
        mainRenderTextures[i] = CreateRenderTarget(baseCX, baseCY, GS_BGRA, FALSE);
        yuvRenderTextures[i]  = CreateRenderTarget(outputCX, outputCY, GS_BGRA, FALSE);
    }

    //-------------------------------------------------------------

    D3D10_TEXTURE2D_DESC td;
    zero(&td, sizeof(td));
    td.Width            = outputCX;
    td.Height           = outputCY;
    td.Format           = DXGI_FORMAT_B8G8R8A8_UNORM;
    td.MipLevels        = 1;
    td.ArraySize        = 1;
    td.SampleDesc.Count = 1;
    td.ArraySize        = 1;
    td.Usage            = D3D10_USAGE_STAGING;
    td.CPUAccessFlags   = D3D10_CPU_ACCESS_READ;

    for(UINT i=0; i<NUM_RENDER_BUFFERS; i++)
    {
        HRESULT err = GetD3D()->CreateTexture2D(&td, NULL, &copyTextures[i]);
        if(FAILED(err))
        {
            CrashError(TEXT("Unable to create copy texture"));
            //todo - better error handling
        }
    }

    //-------------------------------------------------------------

    AudioDeviceList playbackDevices;
    GetAudioDevices(playbackDevices, ADT_PLAYBACK);

    String strPlaybackDevice = AppConfig->GetString(TEXT("Audio"), TEXT("PlaybackDevice"), TEXT("Default"));
    if(strPlaybackDevice.IsEmpty() || !playbackDevices.HasID(strPlaybackDevice))
    {
        AppConfig->SetString(TEXT("Audio"), TEXT("PlaybackDevice"), TEXT("Default"));
        strPlaybackDevice = TEXT("Default");
    }

    Log(TEXT("Playback device %s"), strPlaybackDevice.Array());
    playbackDevices.FreeData();

    desktopAudio = CreateAudioSource(false, strPlaybackDevice);

    if(!desktopAudio) {
        CrashError(TEXT("Cannot initialize desktop audio sound, more info in the log file."));
    }

    AudioDeviceList audioDevices;
    GetAudioDevices(audioDevices, ADT_RECORDING);

    String strDevice = AppConfig->GetString(TEXT("Audio"), TEXT("Device"), NULL);
    if(strDevice.IsEmpty() || !audioDevices.HasID(strDevice))
    {
        AppConfig->SetString(TEXT("Audio"), TEXT("Device"), TEXT("Disable"));
        strDevice = TEXT("Disable");
    }

    audioDevices.FreeData();

    String strDefaultMic;
    bool bHasDefault = GetDefaultMicID(strDefaultMic);

    if(strDevice.CompareI(TEXT("Disable")))
        EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), FALSE);
    else
    {
        bool bUseDefault = strDevice.CompareI(TEXT("Default")) != 0;
        if(!bUseDefault || bHasDefault)
        {
            if(bUseDefault)
                strDevice = strDefaultMic;

            micAudio = CreateAudioSource(true, strDevice);

            if(!micAudio)
                MessageBox(hwndMain, Str("MicrophoneFailure"), NULL, 0);
            else
                micAudio->SetTimeOffset(AppConfig->GetInt(TEXT("Audio"), TEXT("MicTimeOffset"), 0));

            EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), micAudio != NULL);
        }
        else
            EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), FALSE);
    }

    //-------------------------------------------------------------

    bool bDisableEncoding = false;

    if (bTestStream)
        bDisableEncoding = GlobalConfig->GetInt(TEXT("General"), TEXT("DisablePreviewEncoding"), false) != 0;

    //-------------------------------------------------------------

    UINT bitRate = (UINT)AppConfig->GetInt(TEXT("Audio Encoding"), TEXT("Bitrate"), 96);
    String strEncoder = AppConfig->GetString(TEXT("Audio Encoding"), TEXT("Codec"), TEXT("AAC"));

    if (bDisableEncoding)
        audioEncoder = CreateNullAudioEncoder();
    else
#ifdef USE_AAC
    if(strEncoder.CompareI(TEXT("AAC")))// && OSGetVersion() >= 7)
        audioEncoder = CreateAACEncoder(bitRate);
    else
#endif
        audioEncoder = CreateMP3Encoder(bitRate);

    //-------------------------------------------------------------

    desktopVol = AppConfig->GetFloat(TEXT("Audio"), TEXT("DesktopVolume"), 1.0f);
    micVol     = AppConfig->GetFloat(TEXT("Audio"), TEXT("MicVolume"),     1.0f);

    //-------------------------------------------------------------

    bRunning = true;

    if(sceneElement)
    {
        scene = CreateScene(sceneElement->GetString(TEXT("class")), sceneElement->GetElement(TEXT("data")));
        XElement *sources = sceneElement->GetElement(TEXT("sources"));
        if(sources)
        {
            UINT numSources = sources->NumElements();
            for(UINT i=0; i<numSources; i++)
            {
                SceneItem *item = scene->AddImageSource(sources->GetElementByID(i));
                if(item)
                {
                    if(ListView_GetItemState(GetDlgItem(hwndMain, ID_SOURCES), i, LVIS_SELECTED) > 0)
                        item->Select(true);
                }
            }
        }

        scene->BeginScene();
    }

    if(scene && scene->HasMissingSources())
        MessageBox(hwndMain, Str("Scene.MissingSources"), NULL, 0);

    //-------------------------------------------------------------

    int maxBitRate = AppConfig->GetInt   (TEXT("Video Encoding"), TEXT("MaxBitrate"), 1000);
    int bufferSize = AppConfig->GetInt   (TEXT("Video Encoding"), TEXT("BufferSize"), 1000);
    int quality    = AppConfig->GetInt   (TEXT("Video Encoding"), TEXT("Quality"),    8);
    String preset  = AppConfig->GetString(TEXT("Video Encoding"), TEXT("Preset"),     TEXT("veryfast"));
    bUsing444      = AppConfig->GetInt   (TEXT("Video Encoding"), TEXT("Use444"),     0) != 0;

    bDupeFrames = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("DupeFrames"), 0) != 0;

    if(bUsing444)
        bDupeFrames = bUseCFR = false;
    else
    {
        bUseCFR = AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseCFR"), 0) != 0;
        if(bUseCFR) bDupeFrames = true;
    }

    //-------------------------------------------------------------

    bWriteToFile = networkMode == 1 || AppConfig->GetInt(TEXT("Publish"), TEXT("SaveToFile")) != 0;
    String strOutputFile = AppConfig->GetString(TEXT("Publish"), TEXT("SavePath"));

    strOutputFile.FindReplace(TEXT("\\"), TEXT("/"));

    if (bWriteToFile)
    {
        OSFindData ofd;
        HANDLE hFind = NULL;
        bool bUseDateTimeName = true;

        if(hFind = OSFindFirstFile(strOutputFile, ofd))
        {
            String strFileExtension = GetPathExtension(strOutputFile);
            String strFileWithoutExtension = GetPathWithoutExtension(strOutputFile);

            if(strFileExtension.IsValid() && !ofd.bDirectory)
            {
                String strNewFilePath;
                UINT curFile = 0;

                do 
                {
                    strNewFilePath.Clear() << strFileWithoutExtension << TEXT(" (") << FormattedString(TEXT("%02u"), ++curFile) << TEXT(").") << strFileExtension;
                } while(OSFileExists(strNewFilePath));

                strOutputFile = strNewFilePath;

                bUseDateTimeName = false;
            }

            if(ofd.bDirectory)
                strOutputFile.AppendChar('/');

            OSFindClose(hFind);
        }

        if(bUseDateTimeName)
        {
            String strFileName = GetPathFileName(strOutputFile);

            if(!strFileName.IsValid() || !IsSafeFilename(strFileName))
            {
                SYSTEMTIME st;
                GetLocalTime(&st);

                String strDirectory = GetPathDirectory(strOutputFile);
                strOutputFile = FormattedString(TEXT("%s/%u-%02u-%02u-%02u%02u-%02u.mp4"), strDirectory.Array(), st.wYear, st.wMonth, st.wDay, st.wHour, st.wMinute, st.wSecond);
            }
        }
    }

    //-------------------------------------------------------------

    bufferingTime = GlobalConfig->GetInt(TEXT("General"), TEXT("SceneBufferingTime"), 400);

    //-------------------------------------------------------------

    bForceMicMono = AppConfig->GetInt(TEXT("Audio"), TEXT("ForceMicMono")) != 0;
    bRecievedFirstAudioFrame = false;

    //hRequestAudioEvent = CreateSemaphore(NULL, 0, 0x7FFFFFFFL, NULL);
    hSoundDataMutex = OSCreateMutex();
    hSoundThread = OSCreateThread((XTHREAD)OBS::MainAudioThread, NULL);

    //-------------------------------------------------------------

    StartBlankSoundPlayback(strPlaybackDevice);

    //-------------------------------------------------------------

    ctsOffset = 0;
    videoEncoder = nullptr;
    if (bDisableEncoding)
        videoEncoder = CreateNullVideoEncoder();
    else if(AppConfig->GetInt(TEXT("Video Encoding"), TEXT("UseQSV")) != 0)
        videoEncoder = CreateQSVEncoder(fps, outputCX, outputCY, quality, preset, bUsing444, maxBitRate, bufferSize, bUseCFR, bDupeFrames);

    if(!videoEncoder)
        videoEncoder = CreateX264Encoder(fps, outputCX, outputCY, quality, preset, bUsing444, maxBitRate, bufferSize, bUseCFR, bDupeFrames);


    //-------------------------------------------------------------

    // Ensure that the render frame is properly sized
    ResizeRenderFrame(true);

    //-------------------------------------------------------------

    if(!bTestStream && bWriteToFile && strOutputFile.IsValid())
    {
        String strFileExtension = GetPathExtension(strOutputFile);
        if(strFileExtension.CompareI(TEXT("flv")))
            fileStream = CreateFLVFileStream(strOutputFile);
        else if(strFileExtension.CompareI(TEXT("mp4")))
            fileStream = CreateMP4FileStream(strOutputFile);

        if(!fileStream)
        {
            Log(TEXT("Warning - OBSCapture::Start: Unable to create the file stream. Check the file path in Broadcast Settings."));
            MessageBox(hwndMain, Str("Capture.Start.FileStream.Warning"), Str("Capture.Start.FileStream.WarningCaption"), MB_OK | MB_ICONWARNING);        
        }
    }

    //-------------------------------------------------------------

    hMainThread = OSCreateThread((XTHREAD)OBS::MainCaptureThread, NULL);

    if(bTestStream)
    {
        EnableWindow(GetDlgItem(hwndMain, ID_STARTSTOP), FALSE);
        SetWindowText(GetDlgItem(hwndMain, ID_TESTSTREAM), Str("MainWindow.StopTest"));
    }
    else
    {
        EnableWindow(GetDlgItem(hwndMain, ID_TESTSTREAM), FALSE);
        SetWindowText(GetDlgItem(hwndMain, ID_STARTSTOP), Str("MainWindow.StopStream"));
    }

    EnableWindow(GetDlgItem(hwndMain, ID_SCENEEDITOR), TRUE);

    //-------------------------------------------------------------

    ReportStartStreamTrigger();
    
    SystemParametersInfo(SPI_SETSCREENSAVEACTIVE, 0, 0, 0);
    SetThreadExecutionState(ES_CONTINUOUS | ES_SYSTEM_REQUIRED | ES_AWAYMODE_REQUIRED | ES_DISPLAY_REQUIRED);

    UpdateRenderViewMessage();

    //update notification icon to reflect current status
    UpdateNotificationAreaIcon();

    OSLeaveMutex (hStartupShutdownMutex);
}
Esempio n. 18
0
void DeviceSource::Preprocess()
{
    if(!bCapturing)
        return;

    //----------------------------------------

    if(bRequestVolume)
    {
        if(audioOut)
            audioOut->SetVolume(fNewVol);
        else if(audioFilter)
        {
            IBasicAudio *basicAudio;
            if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio)))
            {
                long lVol = long((double(fNewVol)*NEAR_SILENTf)-NEAR_SILENTf);
                if(lVol <= -NEAR_SILENT)
                    lVol = -10000;
                basicAudio->put_Volume(lVol);
                basicAudio->Release();
            }
        }
        bRequestVolume = false;
    }

    //----------------------------------------

    SampleData *lastSample = NULL;

    OSEnterMutex(hSampleMutex);

    lastSample = latestVideoSample;
    latestVideoSample = NULL;

    OSLeaveMutex(hSampleMutex);

    //----------------------------------------

    int numThreads = MAX(OSGetTotalCores()-2, 1);

    if(lastSample)
    {
        /*REFERENCE_TIME refTimeStart, refTimeFinish;
        lastSample->GetTime(&refTimeStart, &refTimeFinish);

        static REFERENCE_TIME lastRefTime = 0;
        Log(TEXT("refTimeStart: %llu, refTimeFinish: %llu, offset = %llu"), refTimeStart, refTimeFinish, refTimeStart-lastRefTime);
        lastRefTime = refTimeStart;*/

        if(colorType == DeviceOutputType_RGB)
        {
            if(texture)
            {
                texture->SetImage(lastSample->lpData, GS_IMAGEFORMAT_BGRX, renderCX*4);
                bReadyToDraw = true;
            }
        }
        else if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12)
        {
            if(bUseThreadedConversion)
            {
                if(!bFirstFrame)
                {
                    List<HANDLE> events;
                    for(int i=0; i<numThreads; i++)
                        events << convertData[i].hSignalComplete;

                    WaitForMultipleObjects(numThreads, events.Array(), TRUE, INFINITE);
                    texture->SetImage(lpImageBuffer, GS_IMAGEFORMAT_RGBX, texturePitch);

                    bReadyToDraw = true;
                }
                else
                    bFirstFrame = false;

                for(int i=0; i<numThreads; i++)
                    lastSample->AddRef();

                for(int i=0; i<numThreads; i++)
                {
                    convertData[i].input    = lastSample->lpData;
                    convertData[i].sample   = lastSample;
                    convertData[i].pitch    = texturePitch;
                    convertData[i].output   = lpImageBuffer;
                    SetEvent(convertData[i].hSignalConvert);
                }
            }
            else
            {
                LPBYTE lpData;
                UINT pitch;

                if(texture->Map(lpData, pitch))
                {
                    PackPlanar(lpData, lastSample->lpData, renderCX, renderCY, pitch, 0, renderCY);
                    texture->Unmap();
                }

                bReadyToDraw = true;
            }
        }
        else if(colorType == DeviceOutputType_YVYU || colorType == DeviceOutputType_YUY2)
        {
            LPBYTE lpData;
            UINT pitch;

            if(texture->Map(lpData, pitch))
            {
                Convert422To444(lpData, lastSample->lpData, pitch, true);
                texture->Unmap();
            }

            bReadyToDraw = true;
        }
        else if(colorType == DeviceOutputType_UYVY || colorType == DeviceOutputType_HDYC)
        {
            LPBYTE lpData;
            UINT pitch;

            if(texture->Map(lpData, pitch))
            {
                Convert422To444(lpData, lastSample->lpData, pitch, false);
                texture->Unmap();
            }

            bReadyToDraw = true;
        }

        lastSample->Release();
    }
}
Esempio n. 19
0
void OBS::MainAudioLoop()
{
    DWORD taskID = 0;
    HANDLE hTask = AvSetMmThreadCharacteristics(TEXT("Pro Audio"), &taskID);

    bufferedAudioTimes.Clear();

    bPushToTalkOn = false;

    micMax = desktopMax = VOL_MIN;
    micPeak = desktopPeak = VOL_MIN;

    UINT audioFramesSinceMeterUpdate = 0;
    UINT audioFramesSinceMicMaxUpdate = 0;
    UINT audioFramesSinceDesktopMaxUpdate = 0;

    List<float> mixBuffer, levelsBuffer;
    mixBuffer.SetSize(audioSampleSize*2);
    levelsBuffer.SetSize(audioSampleSize*2);

    latestAudioTime = 0;

    //---------------------------------------------
    // the audio loop of doom

    while (true) {
        OSSleep(5); //screw it, just run it every 5ms

        if (!bRunning)
            break;

        //-----------------------------------------------

        float *desktopBuffer, *micBuffer;

        curDesktopVol = desktopVol * desktopBoost;

        if (bUsingPushToTalk)
            curMicVol = bPushToTalkOn ? micVol : 0.0f;
        else
            curMicVol = micVol;

        curMicVol *= micBoost;

        bool bDesktopMuted = (curDesktopVol < EPSILON);
        bool bMicEnabled   = (micAudio != NULL);

        while (QueryNewAudio()) {
            QWORD timestamp = bufferedAudioTimes[0];
            bufferedAudioTimes.Remove(0);

            zero(mixBuffer.Array(),    audioSampleSize*2*sizeof(float));
            zero(levelsBuffer.Array(), audioSampleSize*2*sizeof(float));

            //----------------------------------------------------------------------------
            // get latest sample for calculating the volume levels

            float *latestDesktopBuffer = NULL, *latestMicBuffer = NULL;

            desktopAudio->GetBuffer(&desktopBuffer, timestamp);
            desktopAudio->GetNewestFrame(&latestDesktopBuffer);

            if (micAudio != NULL) {
                micAudio->GetBuffer(&micBuffer, timestamp);
                micAudio->GetNewestFrame(&latestMicBuffer);
            }

            //----------------------------------------------------------------------------
            // mix desktop samples

            if (desktopBuffer)
                MixAudio(mixBuffer.Array(), desktopBuffer, audioSampleSize*2, false);

            if (latestDesktopBuffer)
                MixAudio(levelsBuffer.Array(), latestDesktopBuffer, audioSampleSize*2, false);

            //----------------------------------------------------------------------------
            // get latest aux volume level samples and mix

            OSEnterMutex(hAuxAudioMutex);

            for (UINT i=0; i<auxAudioSources.Num(); i++) {
                float *latestAuxBuffer;

                if(auxAudioSources[i]->GetNewestFrame(&latestAuxBuffer))
                    MixAudio(levelsBuffer.Array(), latestAuxBuffer, audioSampleSize*2, false);
            }

            //----------------------------------------------------------------------------
            // mix output aux sound samples with the desktop

            for (UINT i=0; i<auxAudioSources.Num(); i++) {
                float *auxBuffer;

                if(auxAudioSources[i]->GetBuffer(&auxBuffer, timestamp))
                    MixAudio(mixBuffer.Array(), auxBuffer, audioSampleSize*2, false);
            }

            OSLeaveMutex(hAuxAudioMutex);

            //----------------------------------------------------------------------------
            // multiply samples by volume and compute RMS and max of samples
            // Use 1.0f instead of curDesktopVol, since aux audio sources already have their volume set, and shouldn't be boosted anyway.

            float desktopRMS = 0, micRMS = 0, desktopMx = 0, micMx = 0;
            if (latestDesktopBuffer)
                CalculateVolumeLevels(levelsBuffer.Array(), audioSampleSize*2, 1.0f, desktopRMS, desktopMx);
            if (bMicEnabled && latestMicBuffer)
                CalculateVolumeLevels(latestMicBuffer, audioSampleSize*2, curMicVol, micRMS, micMx);

            //----------------------------------------------------------------------------
            // convert RMS and Max of samples to dB 

            desktopRMS = toDB(desktopRMS);
            micRMS = toDB(micRMS);
            desktopMx = toDB(desktopMx);
            micMx = toDB(micMx);

            //----------------------------------------------------------------------------
            // update max if sample max is greater or after 1 second

            float maxAlpha = 0.15f;
            UINT peakMeterDelayFrames = audioSamplesPerSec * 3;

            if (micMx > micMax)
                micMax = micMx;
            else
                micMax = maxAlpha * micMx + (1.0f - maxAlpha) * micMax;

            if(desktopMx > desktopMax)
                desktopMax = desktopMx;
            else
                desktopMax = maxAlpha * desktopMx + (1.0f - maxAlpha) * desktopMax;

            //----------------------------------------------------------------------------
            // update delayed peak meter

            if (micMax > micPeak || audioFramesSinceMicMaxUpdate > peakMeterDelayFrames) {
                micPeak = micMax;
                audioFramesSinceMicMaxUpdate = 0;
            } else {
                audioFramesSinceMicMaxUpdate += audioSampleSize;
            }

            if (desktopMax > desktopPeak || audioFramesSinceDesktopMaxUpdate > peakMeterDelayFrames) {
                desktopPeak = desktopMax;
                audioFramesSinceDesktopMaxUpdate = 0;
            } else {
                audioFramesSinceDesktopMaxUpdate += audioSampleSize;
            }

            //----------------------------------------------------------------------------
            // low pass the level sampling

            float rmsAlpha = 0.15f;
            desktopMag = rmsAlpha * desktopRMS + desktopMag * (1.0f - rmsAlpha);
            micMag = rmsAlpha * micRMS + micMag * (1.0f - rmsAlpha);

            //----------------------------------------------------------------------------
            // update the meter about every 50ms

            audioFramesSinceMeterUpdate += audioSampleSize;
            if (audioFramesSinceMeterUpdate >= (audioSampleSize*5)) {
                PostMessage(hwndMain, WM_COMMAND, MAKEWPARAM(ID_MICVOLUMEMETER, VOLN_METERED), 0);
                audioFramesSinceMeterUpdate = 0;
            }

            //----------------------------------------------------------------------------
            // mix mic and desktop sound
            // also, it's perfectly fine to just mix into the returned buffer

            if (bMicEnabled && micBuffer)
                MixAudio(mixBuffer.Array(), micBuffer, audioSampleSize*2, bForceMicMono);

            EncodeAudioSegment(mixBuffer.Array(), audioSampleSize, timestamp);
        }

        //-----------------------------------------------

        if (!bRecievedFirstAudioFrame && pendingAudioFrames.Num())
            bRecievedFirstAudioFrame = true;
    }

    desktopMag = desktopMax = desktopPeak = VOL_MIN;
    micMag = micMax = micPeak = VOL_MIN;

    PostMessage(hwndMain, WM_COMMAND, MAKEWPARAM(ID_MICVOLUMEMETER, VOLN_METERED), 0);

    for (UINT i=0; i<pendingAudioFrames.Num(); i++)
        pendingAudioFrames[i].audioData.Clear();

    AvRevertMmThreadCharacteristics(hTask);
}
Esempio n. 20
0
bool OBS::SetScene(CTSTR lpScene)
{
    if(bDisableSceneSwitching)
        return false;

    HWND hwndScenes = GetDlgItem(hwndMain, ID_SCENES);
    UINT curSel = (UINT)SendMessage(hwndScenes, LB_GETCURSEL, 0, 0);

    //-------------------------

    if(curSel != LB_ERR)
    {
        UINT textLen = (UINT)SendMessage(hwndScenes, LB_GETTEXTLEN, curSel, 0);

        String strLBName;
        strLBName.SetLength(textLen);

        SendMessage(hwndScenes, LB_GETTEXT, curSel, (LPARAM)strLBName.Array());
        if(!strLBName.CompareI(lpScene))
        {
            UINT id = (UINT)SendMessage(hwndScenes, LB_FINDSTRINGEXACT, -1, (LPARAM)lpScene);
            if(id == LB_ERR)
                return false;

            SendMessage(hwndScenes, LB_SETCURSEL, id, 0);
        }
    }
    else
    {
        UINT id = (UINT)SendMessage(hwndScenes, LB_FINDSTRINGEXACT, -1, (LPARAM)lpScene);
        if(id == LB_ERR)
            return false;

        SendMessage(hwndScenes, LB_SETCURSEL, id, 0);
    }

    //-------------------------

    XElement *scenes = scenesConfig.GetElement(TEXT("scenes"));
    XElement *newSceneElement = scenes->GetElement(lpScene);
    if(!newSceneElement)
        return false;

    if(sceneElement == newSceneElement)
        return true;

    sceneElement = newSceneElement;

    CTSTR lpClass = sceneElement->GetString(TEXT("class"));
    if(!lpClass)
    {
        AppWarning(TEXT("OBS::SetScene: no class found for scene '%s'"), newSceneElement->GetName());
        return false;
    }

    DWORD sceneChangeStartTime;

    if(bRunning)
    {
        Log(TEXT("++++++++++++++++++++++++++++++++++++++++++++++++++++++"));
        Log(TEXT("  New Scene"));

        sceneChangeStartTime = OSGetTime();
    }

    XElement *sceneData = newSceneElement->GetElement(TEXT("data"));

    //-------------------------

    Scene *newScene = NULL;
    if(bRunning)
        newScene = CreateScene(lpClass, sceneData);

    //-------------------------

    HWND hwndSources = GetDlgItem(hwndMain, ID_SOURCES);

    SendMessage(hwndSources, WM_SETREDRAW, (WPARAM)FALSE, (LPARAM) 0);

    bChangingSources = true;
    ListView_DeleteAllItems(hwndSources);

    XElement *sources = sceneElement->GetElement(TEXT("sources"));
    if(sources)
    {
        UINT numSources = sources->NumElements();
        ListView_SetItemCount(hwndSources, numSources);

        for(UINT i=0; i<numSources; i++)
        {
            XElement *sourceElement = sources->GetElementByID(i);
            bool render = sourceElement->GetInt(TEXT("render"), 1) > 0;

            InsertSourceItem(i, (LPWSTR)sourceElement->GetName(), render);

            if(bRunning && newScene)
                newScene->AddImageSource(sourceElement);
        }
    }

    bChangingSources = false;
    SendMessage(hwndSources, WM_SETREDRAW, (WPARAM)TRUE, (LPARAM) 0);
    RedrawWindow(hwndSources, NULL, NULL, RDW_ERASE | RDW_FRAME | RDW_INVALIDATE | RDW_ALLCHILDREN);

    if(scene && newScene && newScene->HasMissingSources())
        MessageBox(hwndMain, Str("Scene.MissingSources"), NULL, 0);

    if(bRunning)
    {
        //todo: cache scenes maybe?  undecided.  not really as necessary with global sources
        OSEnterMutex(hSceneMutex);

        UINT numSources;

        if (scene)
        {
            //shutdown previous scene, if any
            numSources = scene->sceneItems.Num();
            for(UINT i=0; i<numSources; i++)
            {
                XElement *source = scene->sceneItems[i]->GetElement();
                String className = source->GetString(TEXT("class"));
                if(scene->sceneItems[i]->bRender && className == "GlobalSource") {
                    XElement *globalSourceData = source->GetElement(TEXT("data"));
                    String globalSourceName = globalSourceData->GetString(TEXT("name"));
                    if(App->GetGlobalSource(globalSourceName) != NULL) {
                        App->GetGlobalSource(globalSourceName)->GlobalSourceLeaveScene();
                    }
                }
            }

            scene->EndScene();
        }

        Scene *previousScene = scene;
        scene = newScene;

        scene->BeginScene();

        numSources = scene->sceneItems.Num();
        for(UINT i=0; i<numSources; i++)
        {
            XElement *source = scene->sceneItems[i]->GetElement();
            String className = source->GetString(TEXT("class"));
            if(scene->sceneItems[i]->bRender && className == "GlobalSource") {
                XElement *globalSourceData = source->GetElement(TEXT("data"));
                String globalSourceName = globalSourceData->GetString(TEXT("name"));
                if(App->GetGlobalSource(globalSourceName) != NULL) {
                    App->GetGlobalSource(globalSourceName)->GlobalSourceEnterScene();
                }
            }
        }

        if(!bTransitioning)
        {
            bTransitioning = true;
            transitionAlpha = 0.0f;
        }

        OSLeaveMutex(hSceneMutex);

        delete previousScene;

        DWORD sceneChangeTime = OSGetTime() - sceneChangeStartTime;
        if (sceneChangeTime >= 500)
            Log(TEXT("PERFORMANCE WARNING: Scene change took %u ms, maybe some sources should be global sources?"), sceneChangeTime);
    }

    if(API != NULL)
       ReportSwitchScenes(lpScene);

    return true;
}
Esempio n. 21
0
void RTMPPublisher::SendLoop()
{
    SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_ABOVE_NORMAL);
    while(WaitForSingleObject(hSendSempahore, INFINITE) == WAIT_OBJECT_0)
    {
        /*//--------------------------------------------
        // read

        DWORD pendingBytes = 0;
        ioctlsocket(rtmp->m_sb.sb_socket, FIONREAD, &pendingBytes);
        if(pendingBytes)
        {
            RTMPPacket packet;
            zero(&packet, sizeof(packet));

            while(RTMP_ReadPacket(rtmp, &packet) && !RTMPPacket_IsReady(&packet) && RTMP_IsConnected(rtmp));

            if(!RTMP_IsConnected(rtmp))
            {
                App->PostStopMessage();
                bStopping = true;
                break;
            }

            RTMPPacket_Free(&packet);
        }*/

        //--------------------------------------------
        // send

        while(true)
        {
            if(bStopping)
                return;

            OSEnterMutex(hDataMutex);
            if(queuedPackets.Num() == 0)
            {
                OSLeaveMutex(hDataMutex);
                break;
            }

            List<BYTE> packetData;
            PacketType type       = queuedPackets[0].type;
            DWORD      timestamp  = queuedPackets[0].timestamp;
            packetData.TransferFrom(queuedPackets[0].data);

            currentBufferSize -= packetData.Num();

            queuedPackets.Remove(0);

            OSLeaveMutex(hDataMutex);

            //--------------------------------------------

            RTMPPacket packet;
            packet.m_nChannel = (type == PacketType_Audio) ? 0x5 : 0x4;
            packet.m_headerType = RTMP_PACKET_SIZE_MEDIUM;
            packet.m_packetType = (type == PacketType_Audio) ? RTMP_PACKET_TYPE_AUDIO : RTMP_PACKET_TYPE_VIDEO;
            packet.m_nTimeStamp = timestamp;
            packet.m_nInfoField2 = rtmp->m_stream_id;
            packet.m_hasAbsTimestamp = TRUE;

            packet.m_nBodySize = packetData.Num()-RTMP_MAX_HEADER_SIZE;
            packet.m_body = (char*)packetData.Array()+RTMP_MAX_HEADER_SIZE;

            //QWORD sendTimeStart = OSGetTimeMicroseconds();
            if(!RTMP_SendPacket(rtmp, &packet, FALSE))
            {
                if(!RTMP_IsConnected(rtmp))
                {
                    App->PostStopMessage();
                    bStopping = true;
                    break;
                }

                RUNONCE Log(TEXT("okay, this is strange"));
            }

            //----------------------------------------------------------

            /*outputRateSize += packetData.Num();
            packetSizeRecord << PacketTimeSize(timestamp, packetData.Num());
            if(packetSizeRecord.Num())
            {
                UINT packetID=0;
                for(; packetID<packetSizeRecord.Num(); packetID++)
                {
                    if(timestamp-packetSizeRecord[packetID].timestamp < outputRateWindowTime)
                        break;
                    else
                        outputRateSize -= packetSizeRecord[packetID].size;
                }

                if(packetID != 0)
                    packetSizeRecord.RemoveRange(0, packetID);
            }*/

            //bytesSent += packetData.Num();
        }
    }
}
Esempio n. 22
0
void OBS::SendFrame(VideoSegment &curSegment, QWORD firstFrameTime)
{
    if(!bSentHeaders)
    {
        network->BeginPublishing();
        bSentHeaders = true;
    }

    OSEnterMutex(hSoundDataMutex);

    if(pendingAudioFrames.Num())
    {
        while(pendingAudioFrames.Num())
        {
            if(firstFrameTime < pendingAudioFrames[0].timestamp)
            {
                UINT audioTimestamp = UINT(pendingAudioFrames[0].timestamp-firstFrameTime);

                //stop sending audio packets when we reach an audio timestamp greater than the video timestamp
                if(audioTimestamp > curSegment.timestamp)
                    break;

                if(audioTimestamp == 0 || audioTimestamp > lastAudioTimestamp)
                {
                    List<BYTE> &audioData = pendingAudioFrames[0].audioData;
                    if(audioData.Num())
                    {
                        //Log(TEXT("a:%u, %llu"), audioTimestamp, frameInfo.firstFrameTime+audioTimestamp);

                        network->SendPacket(audioData.Array(), audioData.Num(), audioTimestamp, PacketType_Audio);
                        if(fileStream)
                            fileStream->AddPacket(audioData.Array(), audioData.Num(), audioTimestamp, PacketType_Audio);

                        audioData.Clear();

                        lastAudioTimestamp = audioTimestamp;
                    }
                }
            }
            else
                nop();

            pendingAudioFrames[0].audioData.Clear();
            pendingAudioFrames.Remove(0);
        }
    }

    OSLeaveMutex(hSoundDataMutex);

    for(UINT i=0; i<curSegment.packets.Num(); i++)
    {
        VideoPacketData &packet = curSegment.packets[i];

        if(packet.type == PacketType_VideoHighest)
            bRequestKeyframe = false;

        //Log(TEXT("v:%u, %llu"), curSegment.timestamp, frameInfo.firstFrameTime+curSegment.timestamp);

        network->SendPacket(packet.data.Array(), packet.data.Num(), curSegment.timestamp, packet.type);
        if(fileStream)
            fileStream->AddPacket(packet.data.Array(), packet.data.Num(), curSegment.timestamp, packet.type);
    }
}
Esempio n. 23
0
void RTMPPublisher::SocketLoop()
{
    int delayTime;
    int latencyPacketSize;

    WSANETWORKEVENTS networkEvents;

    SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_ABOVE_NORMAL);

    WSAEventSelect(rtmp->m_sb.sb_socket, hWriteEvent, FD_READ|FD_WRITE);

    //Low latency mode works by delaying delayTime ms between calls to send() and only sending
    //a buffer as large as latencyPacketSize at once. This causes keyframes and other data bursts
    //to be sent over several sends instead of one large one.
    if (lowLatencyMode == LL_MODE_AUTO)
    {
        //Auto mode aims for a constant rate of whatever the stream bitrate is and segments into
        //MTU sized packets (test packet captures indicated that despite nagling being enabled,
        //the size of the send() buffer is still important for some reason). Note that delays
        //become very short at this rate, and it can take a while for the buffer to empty after
        //a keyframe.
        delayTime = 1400.0f / (dataBufferSize / 1000.0f);
        latencyPacketSize = 1460;
    }
    else if (lowLatencyMode == LL_MODE_FIXED)
    {
        //We use latencyFactor - 2 to guarantee we're always sending at a slightly higher
        //rate than the maximum expected data rate so we don't get backed up
        latencyPacketSize = dataBufferSize / (latencyFactor - 2);
        delayTime = 1000 / latencyFactor;
    }
    else
    {
        latencyPacketSize = dataBufferSize;
        delayTime = 0;
    }

    do
    {
        int status = WaitForSingleObject(hWriteEvent, INFINITE);
        if (status == WAIT_ABANDONED || status == WAIT_FAILED)
        {
            Log(TEXT("RTMPPublisher::SocketLoop: Aborting due to hWriteEvent mutex"));
            return;
        }

        if (WSAEnumNetworkEvents (rtmp->m_sb.sb_socket, NULL, &networkEvents))
        {
            //App->SetStreamReport(FormattedString(TEXT("Disconnected: WSAEnumNetworkEvents failed, %d"), WSAGetLastError()).Array());
            Log(TEXT("RTMPPublisher::SocketLoop: Aborting due to WSAEnumNetworkEvents failure, %d"), WSAGetLastError());
            App->PostStopMessage();
            bStopping = true;
            return;
        }

        if (networkEvents.lNetworkEvents & FD_CLOSE)
        {
            //App->SetStreamReport(FormattedString(TEXT("Disconnected: Socket was closed, error %d"), networkEvents.iErrorCode[FD_CLOSE_BIT]).Array());
            Log(TEXT("RTMPPublisher::SocketLoop: Aborting due to FD_CLOSE, error %d"), networkEvents.iErrorCode[FD_CLOSE_BIT]);
            App->PostStopMessage();
            bStopping = true;
            return;
        }

        if (networkEvents.lNetworkEvents & FD_READ)
        {
            BYTE discard[16384];
            int ret, errorCode;
            BOOL fatalError = FALSE;

            for (;;)
            {
                ret = recv(rtmp->m_sb.sb_socket, (char *)discard, sizeof(discard), 0);
                if (ret == -1)
                {
                    errorCode = WSAGetLastError();

                    if (errorCode == WSAEWOULDBLOCK)
                        break;

                    fatalError = TRUE;
                }
                else if (ret == 0)
                {
                    errorCode = 0;
                    fatalError = TRUE;
                }

                if (fatalError)
                {
                    Log(TEXT("RTMPPublisher::SocketLoop: Socket error, recv() returned %d, GetLastError() %d"), ret, errorCode);
                    //App->SetStreamReport(FormattedString(TEXT("Disconnected: Socket receive failed, error %d"), errorCode));
                    OSLeaveMutex(hDataBufferMutex);
                    App->PostStopMessage();
                    bStopping = true;
                    return;
                }
            }

            /*RTMPPacket packet;
            zero(&packet, sizeof(packet));

            do
            {
                RTMP_ReadPacket(rtmp, &packet);
            } while (!RTMPPacket_IsReady(&packet) && RTMP_IsConnected(rtmp));

            if(!RTMP_IsConnected(rtmp))
            {
                App->PostStopMessage();
                bStopping = true;
            }

            RTMPPacket_Free(&packet);*/
        }
        
        if (networkEvents.lNetworkEvents & FD_WRITE)
        {
            while (!bStopping)
            {
                status = WaitForSingleObject(hBufferEvent, INFINITE);
                if (status == WAIT_ABANDONED || status == WAIT_FAILED)
                {
                    Log(TEXT("RTMPPublisher::SocketLoop: Aborting due to hBufferEvent mutex"));
                    return;
                }

                if (bStopping)
                {
                    Log(TEXT("RTMPPublisher::SocketLoop: Aborting due to bStopping"));
                    return;
                }

                OSEnterMutex(hDataBufferMutex);

                if (!curDataBufferLen)
                {
                    OSLeaveMutex(hDataBufferMutex);
                    Log(TEXT("RTMPPublisher::SocketLoop: Trying to send, but no data available?!"));
                    continue;
                }
                
                int ret;
                if (lowLatencyMode != LL_MODE_NONE)
                {
                    int sendLength = min (latencyPacketSize, curDataBufferLen);
                    ret = send(rtmp->m_sb.sb_socket, (const char *)dataBuffer, sendLength, 0);
                }
                else
                {
                    ret = send(rtmp->m_sb.sb_socket, (const char *)dataBuffer, curDataBufferLen, 0);
                }

                if (ret > 0)
                {
                    if (curDataBufferLen - ret)
                        memmove(dataBuffer, dataBuffer + ret, curDataBufferLen - ret);
                    curDataBufferLen -= ret;

                    bytesSent += ret;

                    SetEvent(hBufferSpaceAvailableEvent);
                }
                else
                {
                    int errorCode;
                    BOOL fatalError = FALSE;

                    if (ret == -1)
                    {
                        errorCode = WSAGetLastError();

                        if (errorCode == WSAEWOULDBLOCK)
                        {
                            OSLeaveMutex(hDataBufferMutex);
                            break;
                        }

                        fatalError = TRUE;
                    }
                    else if (ret == 0)
                    {
                        errorCode = 0;
                        fatalError = TRUE;
                    }

                    if (fatalError)
                    {
                        //connection closed, or connection was aborted / socket closed / etc, that's a fatal error for us.
                        Log(TEXT("RTMPPublisher::SocketLoop: Socket error, send() returned %d, GetLastError() %d"), ret, errorCode);
                        //App->SetStreamReport(FormattedString(TEXT("Disconnected: Socket send failed, error %d"), errorCode));
                        OSLeaveMutex(hDataBufferMutex);
                        App->PostStopMessage();
                        bStopping = true;
                        return;
                    }
                }

                if (curDataBufferLen <= 1000)
                    ResetEvent(hBufferEvent);

                OSLeaveMutex(hDataBufferMutex);

                if (delayTime)
                    Sleep (delayTime);
            }
        }
    } while (!bStopping);

    Log(TEXT("RTMPPublisher::SocketLoop: Aborting due to loop exit"));
}
Esempio n. 24
0
void OBS::Stop()
{
    if(!bRunning) return;

    OSEnterMutex(hStartupShutdownMutex);

    ReportStopStreamTrigger();

    bRunning = false;
    if(hMainThread)
    {
        OSTerminateThread(hMainThread, 20000);
        hMainThread = NULL;
    }

    for(UINT i=0; i<globalSources.Num(); i++)
        globalSources[i].source->EndScene();

    if(scene)
        scene->EndScene();

    //-------------------------------------------------------------

    if(hSoundThread)
    {
        //ReleaseSemaphore(hRequestAudioEvent, 1, NULL);
        OSTerminateThread(hSoundThread, 20000);
    }

    //if(hRequestAudioEvent)
    //    CloseHandle(hRequestAudioEvent);
    if(hSoundDataMutex)
        OSCloseMutex(hSoundDataMutex);

    hSoundThread = NULL;
    //hRequestAudioEvent = NULL;
    hSoundDataMutex = NULL;

    //-------------------------------------------------------------

    StopBlankSoundPlayback();

    //-------------------------------------------------------------

    delete fileStream;
    fileStream = NULL;

    delete network;
    network = NULL;

    delete micAudio;
    micAudio = NULL;

    delete desktopAudio;
    desktopAudio = NULL;

    delete audioEncoder;
    audioEncoder = NULL;

    delete videoEncoder;
    videoEncoder = NULL;

    //-------------------------------------------------------------

    for(UINT i=0; i<pendingAudioFrames.Num(); i++)
        pendingAudioFrames[i].audioData.Clear();
    pendingAudioFrames.Clear();

    //-------------------------------------------------------------

    if(GS)
        GS->UnloadAllData();

    //-------------------------------------------------------------

    delete scene;
    scene = NULL;

    for(UINT i=0; i<globalSources.Num(); i++)
        globalSources[i].FreeData();
    globalSources.Clear();

    //-------------------------------------------------------------

    for(UINT i=0; i<auxAudioSources.Num(); i++)
        delete auxAudioSources[i];
    auxAudioSources.Clear();

    //-------------------------------------------------------------

    for(UINT i=0; i<NUM_RENDER_BUFFERS; i++)
    {
        delete mainRenderTextures[i];
        delete yuvRenderTextures[i];

        mainRenderTextures[i] = NULL;
        yuvRenderTextures[i] = NULL;
    }

    for(UINT i=0; i<NUM_RENDER_BUFFERS; i++)
    {
        SafeRelease(copyTextures[i]);
    }

    delete transitionTexture;
    transitionTexture = NULL;

    //-------------------------------------------------------------

    delete mainVertexShader;
    delete mainPixelShader;
    delete yuvScalePixelShader;

    delete solidVertexShader;
    delete solidPixelShader;

    mainVertexShader = NULL;
    mainPixelShader = NULL;
    yuvScalePixelShader = NULL;

    solidVertexShader = NULL;
    solidPixelShader = NULL;

    //-------------------------------------------------------------

    delete GS;
    GS = NULL;

    //-------------------------------------------------------------

    ResizeRenderFrame(false);
    RedrawWindow(hwndRenderFrame, NULL, NULL, RDW_INVALIDATE|RDW_UPDATENOW);

    //-------------------------------------------------------------

    AudioDeviceList audioDevices;
    GetAudioDevices(audioDevices, ADT_RECORDING);

    String strDevice = AppConfig->GetString(TEXT("Audio"), TEXT("Device"), NULL);
    if(strDevice.IsEmpty() || !audioDevices.HasID(strDevice))
    {
        AppConfig->SetString(TEXT("Audio"), TEXT("Device"), TEXT("Disable"));
        strDevice = TEXT("Disable");
    }

    audioDevices.FreeData();
    EnableWindow(GetDlgItem(hwndMain, ID_MICVOLUME), !strDevice.CompareI(TEXT("Disable")));

    //-------------------------------------------------------------

    ClearStreamInfo();

    Log(TEXT("=====Stream End: %s================================================="), CurrentDateTime().Array());

    //update notification icon to reflect current status
    UpdateNotificationAreaIcon();

    
    SetWindowText(GetDlgItem(hwndMain, ID_TESTSTREAM), Str("MainWindow.TestStream"));
    EnableWindow(GetDlgItem(hwndMain, ID_STARTSTOP), TRUE);
    SetWindowText(GetDlgItem(hwndMain, ID_STARTSTOP), Str("MainWindow.StartStream"));
    EnableWindow(GetDlgItem(hwndMain, ID_TESTSTREAM), TRUE);


    bEditMode = false;
    SendMessage(GetDlgItem(hwndMain, ID_SCENEEDITOR), BM_SETCHECK, BST_UNCHECKED, 0);
    EnableWindow(GetDlgItem(hwndMain, ID_SCENEEDITOR), FALSE);
    ClearStatusBar();

    InvalidateRect(hwndRenderFrame, NULL, TRUE);

    for(UINT i=0; i<bufferedVideo.Num(); i++)
        bufferedVideo[i].Clear();
    bufferedVideo.Clear();

    SystemParametersInfo(SPI_SETSCREENSAVEACTIVE, 1, 0, 0);
    SetThreadExecutionState(ES_CONTINUOUS);

    bTestStream = false;

    UpdateRenderViewMessage();

    OSLeaveMutex(hStartupShutdownMutex);
}
Esempio n. 25
0
void ClearGLData()
{
    if(copyData)
        copyData->lastRendered = -1;

    if(hCopyThread)
    {
        bKillThread = true;
        SetEvent(hCopyEvent);
        if(WaitForSingleObject(hCopyThread, 500) != WAIT_OBJECT_0)
            TerminateThread(hCopyThread, -1);

        CloseHandle(hCopyThread);
        CloseHandle(hCopyEvent);

        hCopyThread = NULL;
        hCopyEvent = NULL;
    }

    for(int i=0; i<NUM_BUFFERS; i++)
    {
        if(glLockedTextures[i])
        {
            OSEnterMutex(glDataMutexes[i]);

            glBindBuffer(GL_PIXEL_PACK_BUFFER, gltextures[i]);
            glUnmapBuffer(GL_PIXEL_PACK_BUFFER);
            glBindBuffer(GL_PIXEL_PACK_BUFFER, 0);

            glLockedTextures[i] = false;

            OSLeaveMutex(glDataMutexes[i]);
        }
    }

    if(bHasTextures)
    {
        glDeleteBuffers(NUM_BUFFERS, gltextures);
        bHasTextures = false;

        ZeroMemory(gltextures, sizeof(gltextures));
    }

    for(int i=0; i<NUM_BUFFERS; i++)
    {
        if(glDataMutexes[i])
        {
            OSCloseMutex(glDataMutexes[i]);
            glDataMutexes[i] = NULL;
        }
    }

    DestroySharedMemory();
    copyData = NULL;
    copyWait = 0;
    lastTime = 0;
    curCapture = 0;
    curCPUTexture = 0;
    keepAliveTime = 0;
    resetCount++;
    pCopyData = NULL;

    logOutput << CurrentTimeString() << "---------------------- Cleared OpenGL Capture ----------------------" << endl;
}