IMediaSample *DirectShowSampleScheduler::takeSample(bool *eos) { QMutexLocker locker(&m_mutex); if (m_head && m_head->isReady(m_clock)) { IMediaSample *sample = m_head->sample(); sample->AddRef(); *eos = m_head->isLast(); m_head = m_head->remove(); if (!m_head) m_tail = 0; m_semaphore.release(1); return sample; } else { return 0; } }
//---------------------------------------------------------------------------- //! @brief 次のサンプルを得る //! @param pSample : サンプルを返すポインタのポインタ //! @return エラーコード //---------------------------------------------------------------------------- HRESULT CWMOutput::GetNextSample( IMediaSample **pSample ) { HRESULT hr; if( m_StreamNum == 0 || pSample == NULL ) return S_FALSE; // このストリームはない INSSBuffer *pWMSample = NULL; QWORD cnsSampleTime; QWORD cnsDuration; DWORD dwFlags; if( FAILED(hr = WMReader()->GetNextSample( m_StreamNum, &pWMSample, &cnsSampleTime, &cnsDuration, &dwFlags, NULL, NULL )) ) { if( hr == NS_E_NO_MORE_SAMPLES ) return S_FALSE; return hr; } REFERENCE_TIME startTime = (REFERENCE_TIME)cnsSampleTime; REFERENCE_TIME endTime = (REFERENCE_TIME)(cnsSampleTime + cnsDuration); IMediaSample *pOutSample = reinterpret_cast<CWMBuffer*>(pWMSample)->GetSample(); pOutSample->AddRef(); pWMSample->Release(); pOutSample->SetMediaTime(&startTime, &endTime); #if 0 if( startTime < Reader()->m_StartTime ) pOutSample->SetPreroll(TRUE); else pOutSample->SetPreroll(FALSE); #endif startTime -= Reader()->m_StartTime; endTime -= Reader()->m_StartTime; pOutSample->SetTime(&startTime, &endTime); pOutSample->SetSyncPoint(dwFlags & WM_SF_CLEANPOINT); *pSample = pOutSample; return hr; }
STDMETHODIMP CGraphConnector::Receive(IMediaSample *pSample) { //!!!make sure to be after buffer fetch or it will deadlock ! //!!!do not use Autolock // CAutoLock lock( &m_QueueLock ); #ifdef DEBUGGING_IN_TESTBENCH LONGLONG start,end; pSample->GetTime( &start, &end ); DlogVideo6("%d)Video input sample size %d for connector %p, queued %d, start %d, end %d\n", PacketCounterIn++, pSample->GetActualDataLength(), this, m_pSampleQueue[0].size(),(int)start,(int)end ); DlogAudio6("%d)Audio input sample size %d for connector %p, queued %d, start %d, end %d\n", PacketCounterIn++, pSample->GetActualDataLength(), this, m_pSampleQueue[0].size(),(int)start,(int)end ); #endif IMediaSample *pLocalSample = NULL; if( m_nAllocatorType == GCAT_USE_FROM_INPUT ) { pLocalSample = pSample; pLocalSample->AddRef(); } else if( m_nAllocatorType == GCAT_USE_INTERNAL ) { if( m_nAllocatorType == GCAT_USE_INTERNAL && m_bBufferDecided == FALSE ) SetAllocatorProperties(); if( m_bBufferDecided == FALSE ) { Dlog("Error : Could not set allocator properties 5\n"); return S_FALSE; } //!!! this can deadlock until fetchbuffer will release some buffers. Make sure to be outside of our list lock ! int RetryCount = 1; HRESULT hr; do{ hr = m_pAllocator->GetBuffer( &pLocalSample, NULL, NULL, 0 ); if( FAILED(hr) ) { Dlog("Error : Could not get free buffer to store sample! Should we block ?\n"); return hr; } // copy the input sample content into the local sample hr = CopySample( pSample, pLocalSample ); if ( hr != S_OK ) { pLocalSample->Release(); pLocalSample = NULL; HRESULT hr1 = IncreaseBufferSizeRuntime( pSample->GetActualDataLength() * 2 + 1024 ); if( hr1 != S_OK ) Dlog1("Error : Could not resize buffer pool, buffer sizes to %d\n", pSample->GetActualDataLength() + 1024); RetryCount--; } }while( hr != S_OK && RetryCount >= 0 ); if( hr != S_OK ) { Dlog("Error : Could not copy input into output buffer\n"); return S_OK; } } #ifdef DEBUGGING_IN_TESTBENCH /* DumpSampleInfo( pSample, this, " Connector input(from src) " ); if( pLocalSample != pSample ) DumpSampleInfo( pLocalSample, this, " Connector input(local pooled) " ); Dlog("Graph Connector received a media packet % d \n", PacketCounterIn++); */ #endif m_QueueLock.Lock(); if( m_Type == GCB_THROW_PACKET_UNLESS_OUTPUT_CONNECTED ) { for( int i=0;i<m_nNumOutputs;i++) if( m_pOutputPin[i]->IsConnected() == TRUE ) { pLocalSample->AddRef(); m_pSampleQueue[i].push_front( pLocalSample ); } } else if( m_Type == GCB_KEEP_PACKET_UNTIL_PUSHED ) { for( int i=0;i<m_nNumOutputs;i++) { pLocalSample->AddRef(); m_pSampleQueue[i].push_front( pLocalSample ); } } pLocalSample->Release(); m_QueueLock.Unlock(); return S_OK; }
void DeviceSource::Preprocess() { if(!bCapturing) return; //---------------------------------------- if(bRequestVolume) { if(audioOut) audioOut->SetVolume(fNewVol); else if(audioFilter) { IBasicAudio *basicAudio; if(SUCCEEDED(audioFilter->QueryInterface(IID_IBasicAudio, (void**)&basicAudio))) { long lVol = long((double(fNewVol)*NEAR_SILENTf)-NEAR_SILENTf); if(lVol <= -NEAR_SILENT) lVol = -10000; basicAudio->put_Volume(lVol); basicAudio->Release(); } } bRequestVolume = false; } //---------------------------------------- IMediaSample *lastSample = NULL; OSEnterMutex(hSampleMutex); if(curSample) { lastSample = curSample; curSample = NULL; } OSLeaveMutex(hSampleMutex); int numThreads = MAX(OSGetTotalCores()-2, 1); if(lastSample) { REFERENCE_TIME refTimeStart, refTimeFinish; lastSample->GetTime(&refTimeStart, &refTimeFinish); static REFERENCE_TIME lastRefTime = 0; Log(TEXT("refTimeStart: %llu, refTimeFinish: %llu, offset = %llu"), refTimeStart, refTimeFinish, refTimeStart-lastRefTime); lastRefTime = refTimeStart; BYTE *lpImage = NULL; if(colorType == DeviceOutputType_RGB) { if(texture) { if(SUCCEEDED(lastSample->GetPointer(&lpImage))) texture->SetImage(lpImage, GS_IMAGEFORMAT_BGRX, renderCX*4); bReadyToDraw = true; } } else if(colorType == DeviceOutputType_I420 || colorType == DeviceOutputType_YV12) { if(bUseThreadedConversion) { if(!bFirstFrame) { List<HANDLE> events; for(int i=0; i<numThreads; i++) events << convertData[i].hSignalComplete; WaitForMultipleObjects(numThreads, events.Array(), TRUE, INFINITE); texture->SetImage(lpImageBuffer, GS_IMAGEFORMAT_RGBX, texturePitch); bReadyToDraw = true; } else bFirstFrame = false; if(SUCCEEDED(lastSample->GetPointer(&lpImage))) { for(int i=0; i<numThreads; i++) lastSample->AddRef(); for(int i=0; i<numThreads; i++) { convertData[i].input = lpImage; convertData[i].pitch = texturePitch; convertData[i].output = lpImageBuffer; convertData[i].sample = lastSample; SetEvent(convertData[i].hSignalConvert); } } } else { if(SUCCEEDED(lastSample->GetPointer(&lpImage))) { LPBYTE lpData; UINT pitch; if(texture->Map(lpData, pitch)) { PackPlanar(lpData, lpImage, renderCX, renderCY, pitch, 0, renderCY); texture->Unmap(); } } bReadyToDraw = true; } } else if(colorType == DeviceOutputType_YVYU || colorType == DeviceOutputType_YUY2) { if(SUCCEEDED(lastSample->GetPointer(&lpImage))) { LPBYTE lpData; UINT pitch; if(texture->Map(lpData, pitch)) { Convert422To444(lpData, lpImage, pitch, true); texture->Unmap(); } } bReadyToDraw = true; } else if(colorType == DeviceOutputType_UYVY || colorType == DeviceOutputType_HDYC) { if(SUCCEEDED(lastSample->GetPointer(&lpImage))) { LPBYTE lpData; UINT pitch; if(texture->Map(lpData, pitch)) { Convert422To444(lpData, lpImage, pitch, false); texture->Unmap(); } } bReadyToDraw = true; } lastSample->Release(); } }