Ejemplo n.º 1
0
Archivo: API.cpp Proyecto: Aslai/OBS
void OBS::CallHotkey(DWORD hotkeyID, bool bDown)
{
    OBSAPIInterface *apiInterface = (OBSAPIInterface*)API;
    OBSHOTKEYPROC hotkeyProc = NULL;
    DWORD hotkey = 0;
    UPARAM param = NULL;

    OSEnterMutex(hHotkeyMutex);

    for(UINT i=0; i<apiInterface->hotkeys.Num(); i++)
    {
        HotkeyInfo &hi = apiInterface->hotkeys[i];
        if(hi.hotkeyID == hotkeyID)
        {
            if (!hi.hotkeyProc)
            {
                OSLeaveMutex(hHotkeyMutex);
                return;
            }

            hotkeyProc  = hi.hotkeyProc;
            param       = hi.param;
            hotkey      = hi.hotkey;
            break;
        }
    }

    OSLeaveMutex(hHotkeyMutex);

    if (!hotkeyProc)
        return;

    hotkeyProc(hotkey, param, bDown);
}
Ejemplo n.º 2
0
bool DeviceAudioSource::GetNextBuffer(void **buffer, UINT *numFrames, QWORD *timestamp)
{
    if(sampleBuffer.Num() >= sampleSegmentSize)
    {
        OSEnterMutex(hAudioMutex);

		if (!sampleBuffer.Num())
		{
			OSLeaveMutex(hAudioMutex);
			return false;
		}

        mcpy(outputBuffer.Array(), sampleBuffer.Array(), sampleSegmentSize);
        sampleBuffer.RemoveRange(0, sampleSegmentSize);

        OSLeaveMutex(hAudioMutex);

        *buffer = outputBuffer.Array();
        *numFrames = sampleFrameCount;

		*timestamp = 0;//赋值时间戳
		if (!lastTimestamp || LastTimeTimeStamp > lastTimestamp + 10) {
			*timestamp = LastTimeTimeStamp;
		}
		else {
			*timestamp = lastTimestamp + 10;
		}
		//Log(TEXT("hhhhhhhBLiveGetAudioTime() = %llu,*timestamp = %llu"), BLiveGetAudioTime(), *timestamp);
		lastTimestamp = *timestamp;
        return true;
    }

    return false;
}
Ejemplo n.º 3
0
void PipeAudioSource::ReceiveAudio(LPBYTE lpData, UINT dataLength,long long pts)
{
	if (lpData)
    {
		bool bPlayLive = false;
		if (bLiveIntance)
		{
			OSEnterMutex(hAudioMutex);
			sampleBuffer.AppendArray(lpData, dataLength);
// 			AudioTimestamp audioTimestamp;
// 			audioTimestamp.count = dataLength / m_iBlockSize;
// 			audioTimestamp.pts = pts;
// 			sampleBufferPts.push_back(audioTimestamp);
// 			if (pts - m_iLastPts > 50 || m_iLastPts - pts > 50)
// 			{
// 				Log::writeMessage(LOG_RTSPSERV, 1, "LINE:%d,FUNC:%s 音频数据有抖动: LastPts : %lld. CurrntPts : %lld. diff = %lld.",
// 					__LINE__, __FUNCTION__, m_lTimeDiffWithAPI + m_iLastPts, m_lTimeDiffWithAPI + pts, pts - m_iLastPts);
// 			}
// 			m_iLastPts = pts;
			OSLeaveMutex(hAudioMutex);
			bPlayLive = m_bPlayPcmLive;
		}
		else
		{
			OSEnterMutex(hAudioMutex);
			sampleBuffer.RemoveRange(0, sampleBuffer.Num());
			OSLeaveMutex(hAudioMutex);
		}

		OSEnterMutex(hAudioMutex);
		int Len = dataLength;
		if (m_pAudioWaveOut && (m_bPlayPcmLocal || bPlayLive))
		{
			char *OutBuffer;
			CaculateVolume((LPVOID)lpData, Len, (void**)&OutBuffer);

			m_pAudioWaveOut->push_pcm_data((char*)OutBuffer, Len);

			if (!bSameDevice && bProjector && m_pSecWaveOut)
				m_pSecWaveOut->push_pcm_data((char*)OutBuffer, Len);

		}
		else if (bProjector)
		{
			char *OutBuffer;
			CaculateVolume((LPVOID)lpData, Len, (void**)&OutBuffer);

			if (bSameDevice && m_pAudioWaveOut)
			{
				m_pAudioWaveOut->push_pcm_data((char*)OutBuffer, Len);
			}
			else if (m_pSecWaveOut)
			{
				m_pSecWaveOut->push_pcm_data((char*)OutBuffer, Len);
			}
		}
		OSLeaveMutex(hAudioMutex);
    }
}
Ejemplo n.º 4
0
int RTMPPublisher::BufferedSend(RTMPSockBuf *sb, const char *buf, int len, RTMPPublisher *network)
{
    bool bWasEmpty = false;
    bool bComplete = false;
    int fullLen = len;

retrySend:

    OSEnterMutex(network->hDataBufferMutex);

    if (network->curDataBufferLen + len >= network->dataBufferSize)
    {
        ULONG idealSendBacklog;

        //Log(TEXT("RTMPPublisher::BufferedSend: Socket buffer is full (%d / %d bytes), waiting to send %d bytes"), network->curDataBufferLen, network->dataBufferSize, len);
        ++network->totalTimesWaited;
        network->totalBytesWaited += len;

        OSLeaveMutex(network->hDataBufferMutex);

        if (!idealsendbacklogquery(sb->sb_socket, &idealSendBacklog))
        {
            int curTCPBufSize, curTCPBufSizeSize = sizeof(curTCPBufSize);
            getsockopt (sb->sb_socket, SOL_SOCKET, SO_SNDBUF, (char *)&curTCPBufSize, &curTCPBufSizeSize);

            if (curTCPBufSize < (int)idealSendBacklog)
            {
                int bufferSize = (int)idealSendBacklog;
                setsockopt(sb->sb_socket, SOL_SOCKET, SO_SNDBUF, (const char *)&bufferSize, sizeof(bufferSize));
                Log(TEXT("RTMPPublisher::BufferedSend: Increasing socket send buffer to ISB %d"), idealSendBacklog, curTCPBufSize);
            }
        }

        int status = WaitForSingleObject(network->hBufferSpaceAvailableEvent, INFINITE);
        if (status == WAIT_ABANDONED || status == WAIT_FAILED || network->bStopping)
            return 0;
        goto retrySend;
    }

    if (network->curDataBufferLen <= 1000)
        bWasEmpty = true;

    mcpy(network->dataBuffer + network->curDataBufferLen, buf, len);
    network->curDataBufferLen += len;

    if (bWasEmpty)
        SetEvent (network->hBufferEvent);

    OSLeaveMutex(network->hDataBufferMutex);

    return len;
}
Ejemplo n.º 5
0
void MemoryCapture::Destroy()
{
    bInitialized = false;

    if(hMemoryMutex)
        OSEnterMutex(hMemoryMutex);

    copyData = NULL;
    textureBuffers[0] = NULL;
    textureBuffers[1] = NULL;
    delete texture;
    texture = NULL;

    if(sharedMemory)
        UnmapViewOfFile(sharedMemory);

    if(hFileMap)
        CloseHandle(hFileMap);

    if(hMemoryMutex)
    {
        OSLeaveMutex(hMemoryMutex);
        OSCloseMutex(hMemoryMutex);
    }
}
Ejemplo n.º 6
0
void AgentSource::Render(const Vect2 &pos, const Vect2 &size, Texture *texture, bool bScaleFull, bool bIsLiveC)
{
		OSEnterMutex(HLock);
		if (globalSource) 
			globalSource->Render(pos, size, texture,bScaleFull);
		OSLeaveMutex(HLock);
}
Ejemplo n.º 7
0
Archivo: OBS.cpp Proyecto: SeargeDP/OBS
void OBS::SetStatusBarData()
{
    if (bRunning && OSTryEnterMutex(hStartupShutdownMutex))
    {
        if (!App->network)
            return;

        HWND hwndStatusBar = GetDlgItem(hwndMain, ID_STATUS);

        SendMessage(hwndStatusBar, WM_SETREDRAW, 0, 0);
        SendMessage(hwndStatusBar, SB_SETTEXT, 0 | SBT_OWNERDRAW, NULL);
        SendMessage(hwndStatusBar, SB_SETTEXT, 1 | SBT_OWNERDRAW, NULL);
        SendMessage(hwndStatusBar, SB_SETTEXT, 2 | SBT_OWNERDRAW, NULL);
        SendMessage(hwndStatusBar, SB_SETTEXT, 3 | SBT_OWNERDRAW, NULL);
        SendMessage(hwndStatusBar, SB_SETTEXT, 4 | SBT_OWNERDRAW, NULL);

        SendMessage(hwndStatusBar, WM_SETREDRAW, 1, 0);
        InvalidateRect(hwndStatusBar, NULL, FALSE);
    
        if(bRunning)
        {
            ReportStreamStatus(bRunning, bTestStream, 
                (UINT) App->bytesPerSec, App->curStrain, 
                (UINT)this->totalStreamTime, (UINT)App->network->NumTotalVideoFrames(),
                (UINT)App->curFramesDropped, (UINT) App->captureFPS);
        }

        OSLeaveMutex(hStartupShutdownMutex);
    }
}
Ejemplo n.º 8
0
UINT OBSAPIInterface::CreateHotkey(DWORD hotkey, OBSHOTKEYPROC hotkeyProc, UPARAM param)
{
    if(!hotkey)
        return 0;

    //FIXME: vk and fsModifiers aren't used?
    DWORD vk = LOBYTE(hotkey);
    DWORD modifier = HIBYTE(hotkey);
    DWORD fsModifiers = 0;

    if(modifier & HOTKEYF_ALT)
        fsModifiers |= MOD_ALT;
    if(modifier & HOTKEYF_CONTROL)
        fsModifiers |= MOD_CONTROL;
    if(modifier & HOTKEYF_SHIFT)
        fsModifiers |= MOD_SHIFT;

    OSEnterMutex(App->hHotkeyMutex);
    HotkeyInfo &hi      = *hotkeys.CreateNew();
    hi.hotkeyID         = ++curHotkeyIDVal;
    hi.hotkey           = hotkey;
    hi.hotkeyProc       = hotkeyProc;
    hi.param            = param;
    hi.bModifiersDown   = false;
    hi.bHotkeyDown      = false;
    OSLeaveMutex(App->hHotkeyMutex);

    return curHotkeyIDVal;
}
Ejemplo n.º 9
0
bool OBS::QueryNewAudio()
{
    bool bAudioBufferFilled = false;

    while (!bAudioBufferFilled) {
        bool bGotAudio = false;

        if ((desktopAudio->QueryAudio2(curDesktopVol)) != NoAudioAvailable) {
            QueryAudioBuffers(true);
            bGotAudio = true;
        }

        bAudioBufferFilled = desktopAudio->GetBufferedTime() >= App->bufferingTime;

        if (!bGotAudio && bAudioBufferFilled)
            QueryAudioBuffers(false);

        if (bAudioBufferFilled || !bGotAudio)
            break;
    }

    /* wait until buffers are completely filled before accounting for burst */
    if (!bAudioBufferFilled)
    {
        QWORD timestamp;
        int burst = 0;

        // No more desktop data, drain auxilary/mic buffers until they're dry to prevent burst data
        OSEnterMutex(hAuxAudioMutex);
        for(UINT i=0; i<auxAudioSources.Num(); i++)
        {
            while (auxAudioSources[i]->QueryAudio2(auxAudioSources[i]->GetVolume(), true) != NoAudioAvailable)
                burst++;

            if (auxAudioSources[i]->GetLatestTimestamp(timestamp))
                auxAudioSources[i]->SortAudio(timestamp);

            /*if (burst > 10)
                Log(L"Burst happened for %s", auxAudioSources[i]->GetDeviceName2());*/
        }

        OSLeaveMutex(hAuxAudioMutex);

        burst = 0;

        if (micAudio)
        {
            while (micAudio->QueryAudio2(curMicVol, true) != NoAudioAvailable)
                burst++;

            /*if (burst > 10)
                Log(L"Burst happened for %s", micAudio->GetDeviceName2());*/

            if (micAudio->GetLatestTimestamp(timestamp))
                micAudio->SortAudio(timestamp);
        }
    }

    return bAudioBufferFilled;
}
Ejemplo n.º 10
0
bool OBS::QueryAudioBuffers(bool bQueriedDesktopDebugParam)
{
    bool bGotSomeAudio = false;

    if (!latestAudioTime) {
        desktopAudio->GetEarliestTimestamp(latestAudioTime); //will always return true
    } else {
        QWORD latestDesktopTimestamp;
        if (desktopAudio->GetLatestTimestamp(latestDesktopTimestamp)) {
            if ((latestAudioTime+10) > latestDesktopTimestamp)
                return false;
        }
        latestAudioTime += 10;
    }

    bufferedAudioTimes << latestAudioTime;

    OSEnterMutex(hAuxAudioMutex);
    for(UINT i=0; i<auxAudioSources.Num(); i++)
    {
        if (auxAudioSources[i]->QueryAudio2(auxAudioSources[i]->GetVolume(), true) != NoAudioAvailable)
            bGotSomeAudio = true;
    }

    OSLeaveMutex(hAuxAudioMutex);

    if(micAudio != NULL)
    {
        if (micAudio->QueryAudio2(curMicVol, true) != NoAudioAvailable)
            bGotSomeAudio = true;
    }

    return bGotSomeAudio;
}
Ejemplo n.º 11
0
void WebSocketOBSTriggerHandler::ScenesChanged()
{
    json_t* update = json_object();
    json_object_set_new(update, "update-type", json_string("ScenesChanged"));
    
    OSEnterMutex(this->updateQueueMutex);
    this->updates.Add(update);
    OSLeaveMutex(this->updateQueueMutex);
}
Ejemplo n.º 12
0
void WebSocketOBSTriggerHandler::ScenesSwitching(CTSTR scene)
{
    json_t* update = json_object();
    json_object_set_new(update, "update-type", json_string("SwitchScenes"));
    json_object_set_new(update, "scene-name", json_string_wchar(scene));

    OSEnterMutex(this->updateQueueMutex);
    this->updates.Add(update);
    OSLeaveMutex(this->updateQueueMutex);
}
Ejemplo n.º 13
0
void WebSocketOBSTriggerHandler::StreamStopping(bool previewOnly)
{
    json_t* update = json_object();
    json_object_set_new(update, "update-type", json_string("StreamStopping"));
    json_object_set_new(update, "preview-only", json_boolean(previewOnly));

    OSEnterMutex(this->updateQueueMutex);
    this->updates.Add(update);
    OSLeaveMutex(this->updateQueueMutex);
}
Ejemplo n.º 14
0
Texture* MemoryCapture::LockTexture()
{
    LPVOID address = NULL;
    if(!bInitialized || !copyData || !texture)
        return NULL;

    OSEnterMutex(hMemoryMutex);

    curTexture = copyData->lastRendered;

    if(curTexture < 2)
    {
        DWORD nextTexture = (curTexture == 1) ? 0 : 1;
    
        if(WaitForSingleObject(textureMutexes[curTexture], 0) == WAIT_OBJECT_0)
            hMutex = textureMutexes[curTexture];
        else if(WaitForSingleObject(textureMutexes[nextTexture], 0) == WAIT_OBJECT_0)
        {
            hMutex = textureMutexes[nextTexture];
            curTexture = nextTexture;
        }

        if(hMutex)
        {
            BYTE *lpData;
            UINT texPitch;

            if(texture->Map(lpData, texPitch))
            {
                if(pitch == texPitch)
                    SSECopy(lpData, textureBuffers[curTexture], pitch*height);
                else
                {
                    UINT bestPitch = MIN(pitch, texPitch);
                    LPBYTE input = textureBuffers[curTexture];
                    for(UINT y=0; y<height; y++)
                    {
                        LPBYTE curInput  = ((LPBYTE)input)  + (pitch*y);
                        LPBYTE curOutput = ((LPBYTE)lpData) + (texPitch*y);

                        SSECopy(curOutput, curInput, bestPitch);
                    }
                }

                texture->Unmap();
            }
            ReleaseMutex(hMutex);
        }

        hMutex = NULL;
    }
    OSLeaveMutex(hMemoryMutex);

    return texture; 
}
Ejemplo n.º 15
0
void DeviceAudioSource::SetLiveInstance(bool bLiveInstance)
{
	this->bLiveInstance = bLiveInstance;

	if (!bLiveInstance && hAudioMutex)
	{
		OSEnterMutex(hAudioMutex);
		sampleBuffer.RemoveRange(0, sampleBuffer.Num());
		OSLeaveMutex(hAudioMutex);
	}
}
 static void MergeProfileInfo(ProfileNodeInfo &info)
 {
     OSEnterMutex(hProfilerMutex);
     ProfileNodeInfo *sum = FindProfile(info.lpName);
     if(!sum)
     {
         sum = profilerData.CreateNew();
         sum->lpName = info.lpName;
     }
     sum->MergeProfileInfo(&info, info.lastCall, sum->lastCall + info.lastCall);
     OSLeaveMutex(hProfilerMutex);
 }
Ejemplo n.º 17
0
void DeviceSource::Preprocess()
{
    if(!bCapturing)
        return;

    //----------------------------------------

    if(bRequestVolume)
    {
        if(audioOut)
            audioOut->SetVolume(fNewVol);
        else if(audioFilter)
        {
            IBasicAudio *basicAudio;
            if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio)))
            {
                long lVol = long((double(fNewVol)*NEAR_SILENTf)-NEAR_SILENTf);
                if(lVol <= -NEAR_SILENT)
                    lVol = -10000;
                basicAudio->put_Volume(lVol);
                basicAudio->Release();
            }
        }
        bRequestVolume = false;
    }

    //----------------------------------------

    SampleData *lastSample = NULL;

    OSEnterMutex(hSampleMutex);

    lastSample = latestVideoSample;
    latestVideoSample = NULL;

    OSLeaveMutex(hSampleMutex);

    //----------------------------------------

    int numThreads = MAX(OSGetTotalCores()-2, 1);

    if(lastSample)
    {
        /*REFERENCE_TIME refTimeStart, refTimeFinish;
        lastSample->GetTime(&refTimeStart, &refTimeFinish);

        static REFERENCE_TIME lastRefTime = 0;
        //Log(TEXT("refTimeStart: %llu, refTimeFinish: %llu, offset = %llu"), refTimeStart, refTimeFinish, refTimeStart-lastRefTime);
        lastRefTime = refTimeStart;*/

        lastSample->Release();
    }
}
Ejemplo n.º 18
0
json_t* WebSocketOBSTriggerHandler::popUpdate()
{
    OSEnterMutex(this->updateQueueMutex);
    json_t* ret = NULL;
    if(this->updates.Num() > 0)
    {
        ret = this->updates.GetElement(0);
        this->updates.Remove(0);
    }
    OSLeaveMutex(this->updateQueueMutex);

    return ret;
}
Ejemplo n.º 19
0
int RTMPPublisher::BufferedSend(RTMPSockBuf *sb, const char *buf, int len, RTMPPublisher *network)
{
    bool bComplete = false;
    int fullLen = len;

    //NOTE: This function is called from the SendLoop thread, be careful of race conditions.

retrySend:

    //We may have been disconnected mid-shutdown or something, just pretend we wrote the data
    //to avoid blocking if the socket loop exited.
    if (!RTMP_IsConnected(network->rtmp))
        return len;

    OSEnterMutex(network->hDataBufferMutex);

    if (network->curDataBufferLen + len >= network->dataBufferSize)
    {
        //Log(TEXT("RTMPPublisher::BufferedSend: Socket buffer is full (%d / %d bytes), waiting to send %d bytes"), network->curDataBufferLen, network->dataBufferSize, len);
        ++network->totalTimesWaited;
        network->totalBytesWaited += len;

        OSLeaveMutex(network->hDataBufferMutex);

        int status = WaitForSingleObject(network->hBufferSpaceAvailableEvent, INFINITE);
        if (status == WAIT_ABANDONED || status == WAIT_FAILED)
            return 0;
        goto retrySend;
    }

    mcpy(network->dataBuffer + network->curDataBufferLen, buf, len);
    network->curDataBufferLen += len;

    OSLeaveMutex(network->hDataBufferMutex);

    SetEvent (network->hBufferEvent);

    return len;
}
Ejemplo n.º 20
0
UINT OBS::AddStreamInfo(CTSTR lpInfo, StreamInfoPriority priority)
{
    OSEnterMutex(hInfoMutex);

    StreamInfo &streamInfo = *streamInfoList.CreateNew();
    UINT id = streamInfo.id = ++streamInfoIDCounter;
    streamInfo.priority = priority;
    streamInfo.strInfo = lpInfo;

    OSLeaveMutex(hInfoMutex);

    return id;
}
Ejemplo n.º 21
0
void OBSAPIInterface::DeleteHotkey(UINT hotkeyID)
{
    OSEnterMutex(App->hHotkeyMutex);
    for(UINT i=0; i<hotkeys.Num(); i++)
    {
        if(hotkeys[i].hotkeyID == hotkeyID)
        {
            hotkeys.Remove(i);
            break;
        }
    }
    OSLeaveMutex(App->hHotkeyMutex);
}
Ejemplo n.º 22
0
void DeviceSource::ReceiveVideo(IMediaSample *sample)
{
    if(bCapturing)
    {
        OSEnterMutex(hSampleMutex);

        SafeRelease(curSample);
        curSample = sample;
        curSample->AddRef();

        OSLeaveMutex(hSampleMutex);
    }
}
Ejemplo n.º 23
0
Vect2 AgentSource::GetSize() const
{
		float cx = data["cx"].asInt();
		float cy = data["cy"].asInt();
		Vect2 Value(cx, cy);

		OSEnterMutex(HLock);
		if (globalSource)
			Value = globalSource->GetSize();
		OSLeaveMutex(HLock);

		return Value;
}
Ejemplo n.º 24
0
void OBS::EncodeAudioSegment(float *buffer, UINT numFrames, QWORD timestamp)
{
    DataPacket packet;
    if(audioEncoder->Encode(buffer, numFrames, packet, timestamp))
    {
        OSEnterMutex(hSoundDataMutex);

        FrameAudio *frameAudio = pendingAudioFrames.CreateNew();
        frameAudio->audioData.CopyArray(packet.lpPacket, packet.size);
        frameAudio->timestamp = timestamp;

        OSLeaveMutex(hSoundDataMutex);
    }
}
Ejemplo n.º 25
0
void WebSocketOBSTriggerHandler::DesktopVolumeChanged(float level, bool muted, bool finalValue)
{
    json_t* update = json_object();
    json_object_set_new(update, "update-type", json_string("VolumeChanged"));
    
    json_object_set_new(update, "channel", json_string("desktop"));
    json_object_set_new(update, "volume", json_real(level));
    json_object_set_new(update, "muted", json_boolean(muted));
    json_object_set_new(update, "finalValue", json_boolean(finalValue));

    OSEnterMutex(this->updateQueueMutex);
    this->updates.Add(update);
    OSLeaveMutex(this->updateQueueMutex);
} 
Ejemplo n.º 26
0
int RTMPPublisher::FlushDataBuffer()
{
    unsigned long zero = 0;
    
    //make it blocking again
    ioctlsocket(rtmp->m_sb.sb_socket, FIONBIO, &zero);

    OSEnterMutex(hDataBufferMutex);
    int ret = send(rtmp->m_sb.sb_socket, (const char *)dataBuffer, curDataBufferLen, 0);
    curDataBufferLen = 0;
    OSLeaveMutex(hDataBufferMutex);

    return ret;
}
Ejemplo n.º 27
0
void * __restrict DebugAlloc::_Allocate(size_t dwSize)
{
    if(!dwSize) return NULL;

    OSEnterMutex(hDebugMutex);

    LPVOID lpRet;

    if(bEnableTracking)
    {
        ++allocationCounter;
        ++totalAllocations;
    }

    if(bEnableTracking && allocationCounter == memoryBreakID)
        ProgramBreak();

    if((lpRet=FastAlloc::_Allocate(dwSize)) && bEnableTracking)
    {
        Allocation allocTemp;

        Allocation *new_array = (Allocation*)FastAlloc::_Allocate(sizeof(Allocation)*++numAllocations);
        zero(new_array, sizeof(Allocation)*numAllocations);


        allocTemp.Address = lpRet;
        if(lpAllocCurFile)
            scpy(allocTemp.lpFile, lpAllocCurFile);
        allocTemp.dwLine = dwAllocCurLine;

        if(bEnableTracking)
            allocTemp.allocationID = allocationCounter;
        else
            allocTemp.allocationID = INVALID;

        if(AllocationList)
            mcpy(new_array, AllocationList, sizeof(Allocation)*(numAllocations-1));

        FastAlloc::_Free(AllocationList);

        AllocationList = new_array;

        mcpy(&AllocationList[numAllocations-1], &allocTemp, sizeof(Allocation));
    }

    OSLeaveMutex(hDebugMutex);

    return lpRet;
}
Ejemplo n.º 28
0
void WebSocketOBSTriggerHandler::SourceChanged(CTSTR sourceName, XElement* source)
{
    json_t* update = json_object();
    json_object_set_new(update, "update-type", json_string("SourceChanged"));

    XElement* xsources = OBSGetSceneElement()->GetElement(TEXT("sources"));
    
    json_object_set_new(update, "source-name", json_string_wchar(sourceName));
    
    json_object_set_new(update, "source", getSourceJson(source));

    OSEnterMutex(this->updateQueueMutex);
    this->updates.Add(update);
    OSLeaveMutex(this->updateQueueMutex);
}
Ejemplo n.º 29
0
void OBS::SetStreamInfoPriority(UINT infoID, StreamInfoPriority priority)
{
    OSEnterMutex(hInfoMutex);

    for(UINT i=0; i<streamInfoList.Num(); i++)
    {
        if(streamInfoList[i].id == infoID)
        {
            streamInfoList[i].priority = priority;
            break;
        }
    }

    OSLeaveMutex(hInfoMutex);
}
Ejemplo n.º 30
0
void OBS::SetStreamInfo(UINT infoID, CTSTR lpInfo)
{
    OSEnterMutex(hInfoMutex);

    for(UINT i=0; i<streamInfoList.Num(); i++)
    {
        if(streamInfoList[i].id == infoID)
        {
            streamInfoList[i].strInfo = lpInfo;
            break;
        }
    }

    OSLeaveMutex(hInfoMutex);
}