void DeviceSource::Preprocess() { if(!bCapturing) return; //---------------------------------------- if(bRequestVolume) { if(audioOut) audioOut->SetVolume(fNewVol); else if(audioFilter) { IBasicAudio *basicAudio; if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio))) { long lVol = long((double(fNewVol)*NEAR_SILENTf)-NEAR_SILENTf); if(lVol <= -NEAR_SILENT) lVol = -10000; basicAudio->put_Volume(lVol); basicAudio->Release(); } } bRequestVolume = false; } //---------------------------------------- SampleData *lastSample = NULL; OSEnterMutex(hSampleMutex); lastSample = latestVideoSample; latestVideoSample = NULL; OSLeaveMutex(hSampleMutex); //---------------------------------------- int numThreads = MAX(OSGetTotalCores()-2, 1); if(lastSample) { /*REFERENCE_TIME refTimeStart, refTimeFinish; lastSample->GetTime(&refTimeStart, &refTimeFinish); static REFERENCE_TIME lastRefTime = 0; //Log(TEXT("refTimeStart: %llu, refTimeFinish: %llu, offset = %llu"), refTimeStart, refTimeFinish, refTimeStart-lastRefTime); lastRefTime = refTimeStart;*/ lastSample->Release(); } }
DWORD DeviceSource::SampleThread(DeviceSource *source) { HANDLE hSampleMutex = source->hSampleMutex; LONGLONG lastTime = GetQPCTime100NS(), bufferTime = 0, frameWait = 0, curBufferTime = source->bufferTime; LONGLONG lastSampleTime = 0; bool bFirstFrame = true; bool bFirstDelay = true; while (WaitForSingleObject(source->hStopSampleEvent, 2) == WAIT_TIMEOUT) { LONGLONG t = GetQPCTime100NS(); LONGLONG delta = t-lastTime; lastTime = t; OSEnterMutex(hSampleMutex); if (source->samples.Num()) { if (bFirstFrame) { bFirstFrame = false; lastSampleTime = source->samples[0]->timestamp; } //wait until the requested delay has been buffered before processing packets if (bufferTime >= source->bufferTime) { frameWait += delta; //if delay time was adjusted downward, remove packets accordingly bool bBufferTimeChanged = (curBufferTime != source->bufferTime); if (bBufferTimeChanged) { if (curBufferTime > source->bufferTime) { if (source->audioOut) source->audioOut->FlushSamples(); LONGLONG lostTime = curBufferTime - source->bufferTime; bufferTime -= lostTime; if (source->samples.Num()) { LONGLONG startTime = source->samples[0]->timestamp; while (source->samples.Num()) { SampleData *sample = source->samples[0]; if ((sample->timestamp - startTime) >= lostTime) break; lastSampleTime = sample->timestamp; sample->Release(); source->samples.Remove(0); } } } curBufferTime = source->bufferTime; } while (source->samples.Num()) { SampleData *sample = source->samples[0]; LONGLONG timestamp = sample->timestamp; LONGLONG sampleTime = timestamp - lastSampleTime; //sometimes timestamps can go to shit with horrible garbage devices. //so, bypass any unusual timestamp offsets. if (sampleTime < -6000000 || sampleTime > 6000000) { //OSDebugOut(TEXT("sample time: %lld\r\n"), sampleTime); sampleTime = 0; } if (frameWait < sampleTime) break; if (sample->bAudio) { if (source->audioOut) source->audioOut->ReceiveAudio(sample->lpData, sample->dataLength); sample->Release(); } else { SafeRelease(source->latestVideoSample); source->latestVideoSample = sample; } source->samples.Remove(0); if (sampleTime > 0) frameWait -= sampleTime; lastSampleTime = timestamp; } } } OSLeaveMutex(hSampleMutex); if (!bFirstFrame && bufferTime < source->bufferTime) bufferTime += delta; } return 0; }
void DeviceSource::Preprocess() { if(!bCapturing) return; //---------------------------------------- if(bRequestVolume) { if(audioOut) audioOut->SetVolume(fNewVol); else if(audioFilter) { IBasicAudio *basicAudio; if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio))) { long lVol = long((double(fNewVol)*NEAR_SILENTf)-NEAR_SILENTf); if(lVol <= -NEAR_SILENT) lVol = -10000; basicAudio->put_Volume(lVol); basicAudio->Release(); } } bRequestVolume = false; } //---------------------------------------- SampleData *lastSample = NULL; OSEnterMutex(hSampleMutex); lastSample = latestVideoSample; latestVideoSample = NULL; OSLeaveMutex(hSampleMutex); //---------------------------------------- int numThreads = MAX(OSGetTotalCores()-2, 1); if(lastSample) { /*REFERENCE_TIME refTimeStart, refTimeFinish; lastSample->GetTime(&refTimeStart, &refTimeFinish); static REFERENCE_TIME lastRefTime = 0; Log(TEXT("refTimeStart: %llu, refTimeFinish: %llu, offset = %llu"), refTimeStart, refTimeFinish, refTimeStart-lastRefTime); lastRefTime = refTimeStart;*/ if(colorType == DeviceOutputType_RGB) { if(texture) { texture->SetImage(lastSample->lpData, GS_IMAGEFORMAT_BGRX, renderCX*4); bReadyToDraw = true; } } else if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12) { if(bUseThreadedConversion) { if(!bFirstFrame) { List<HANDLE> events; for(int i=0; i<numThreads; i++) events << convertData[i].hSignalComplete; WaitForMultipleObjects(numThreads, events.Array(), TRUE, INFINITE); texture->SetImage(lpImageBuffer, GS_IMAGEFORMAT_RGBX, texturePitch); bReadyToDraw = true; } else bFirstFrame = false; for(int i=0; i<numThreads; i++) lastSample->AddRef(); for(int i=0; i<numThreads; i++) { convertData[i].input = lastSample->lpData; convertData[i].sample = lastSample; convertData[i].pitch = texturePitch; convertData[i].output = lpImageBuffer; SetEvent(convertData[i].hSignalConvert); } } else { LPBYTE lpData; UINT pitch; if(texture->Map(lpData, pitch)) { PackPlanar(lpData, lastSample->lpData, renderCX, renderCY, pitch, 0, renderCY); texture->Unmap(); } bReadyToDraw = true; } } else if(colorType == DeviceOutputType_YVYU || colorType == DeviceOutputType_YUY2) { LPBYTE lpData; UINT pitch; if(texture->Map(lpData, pitch)) { Convert422To444(lpData, lastSample->lpData, pitch, true); texture->Unmap(); } bReadyToDraw = true; } else if(colorType == DeviceOutputType_UYVY || colorType == DeviceOutputType_HDYC) { LPBYTE lpData; UINT pitch; if(texture->Map(lpData, pitch)) { Convert422To444(lpData, lastSample->lpData, pitch, false); texture->Unmap(); } bReadyToDraw = true; } lastSample->Release(); } }