REFERENCE_TIME RtspSourcePin::SynchronizeTimestamp(const MediaPacketSample& mediaSample) { auto SyncWithMediaSample = [this](const MediaPacketSample& mediaSample) { CRefTime streamTime; m_pFilter->StreamTime(streamTime); uint32_t latencyMSecs = static_cast<RtspSourceFilter*>(m_pFilter)->_latencyMSecs; _streamTimeBaseline = streamTime.GetUnits() + latencyMSecs * 10000i64; _rtpPresentationTimeBaseline = mediaSample.timestamp(); }; if (_firstSample) { SyncWithMediaSample(mediaSample); _firstSample = false; // If we're lucky the first sample is also synced using RTCP _rtcpSynced = mediaSample.isRtcpSynced(); } // First sample wasn't RTCP sync'ed, try the next time else if (!_rtcpSynced) { _rtcpSynced = mediaSample.isRtcpSynced(); if (_rtcpSynced) SyncWithMediaSample(mediaSample); } return mediaSample.timestamp() - _rtpPresentationTimeBaseline + _streamTimeBaseline; }
HRESULT CScreenCaptureSourcePin::FillBuffer(IMediaSample *pSample) { FTL::FTLThreadWaitType waitType = _GetWaitType(INFINITE); switch (waitType) { case FTL::ftwtStop: return S_FALSE; //quit case FTL::ftwtError: return E_UNEXPECTED; //case FTL::ftwtContinue: //case FTL::ftwtTimeOut: default: //just continue break; } #if 0 //FUNCTION_BLOCK_TRACE(1); CheckPointer(pSample, E_POINTER); ASSERT(m_mt.formattype == FORMAT_VideoInfo); m_nFrameNumber++; //make the samples scheduling HRESULT hr = S_OK; REFERENCE_TIME rtLatency = 0; if (FAILED(GetLatency(&rtLatency))) { rtLatency = UNITS / DEFAULT_FPS ; } REFERENCE_TIME rtStart, rtStop; BOOL bShouldDeliver = FALSE; do { if (m_dwAdviseToken == 0) { DX_VERIFY(m_pClock->GetTime(&m_rtClockStart)); //fixed frame rate, so can use AdvisePeriodic DX_VERIFY(m_pClock->AdvisePeriodic(m_rtClockStart + rtLatency, rtLatency, (HSEMAPHORE)m_hSemaphore, &m_dwAdviseToken)); } else { DWORD dwResult = WaitForSingleObject(m_hSemaphore, INFINITE); } bShouldDeliver = TRUE; rtStart = m_rtStart; rtStop = m_rtStart + 1; DX_VERIFY(pSample->SetTime(&rtStart, &rtStop)); FTLASSERT(m_pScreenCaptureImpl); if (m_pScreenCaptureImpl) { LPBYTE pBuffer = NULL; DX_VERIFY(pSample->GetPointer(&pBuffer)); VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)m_mt.pbFormat; //int nSize = min(pVih->bmiHeader.biSizeImage, (DWORD)cbData); HBITMAP hDIB = m_pScreenCaptureImpl->CopyScreenToBitmap(&m_rcCapture, pBuffer, (BITMAPINFO *) &(pVih->bmiHeader)); DeleteObject(hDIB); } DX_VERIFY(m_pClock->GetTime(&m_rtClockStop)); DX_VERIFY(pSample->GetTime(&rtStart, &rtStop)); if (rtLatency > 0 && rtLatency * 3 < m_rtClockStop - m_rtClockStart) { //Why? m_rtClockStop = m_rtClockStart + rtLatency; } rtStop = rtStart + (m_rtClockStop - m_rtClockStart); m_rtStart = rtStop; //lock (m_csPinLock) { rtStart -= m_rtStreamOffset; rtStop -= m_rtStreamOffset; } DX_VERIFY(pSample->SetMediaTime(&m_nFrameNumber, &m_nFrameNumber)); DX_VERIFY(pSample->SetTime(&rtStart, &rtStop)); m_rtClockStart = m_rtClockStop; bShouldDeliver = ((rtStart >= 0) && (rtStop >= 0)); if (bShouldDeliver) { //lock (m_csPinLock) if (m_rtStartAt != -1) { if (m_rtStartAt > rtStart) { bShouldDeliver = FALSE; } else { if (m_dwStartCookie != 0 && !m_bStartNotified) { m_bStartNotified = TRUE; DX_VERIFY(m_pFilter->NotifyEvent(EC_STREAM_CONTROL_STARTED, (LONG_PTR)this, m_dwStartCookie)); if (FAILED(hr)) { return hr; } } } } if (!bShouldDeliver) { //Why? continue; } if (m_rtStopAt != -1) { if (m_rtStopAt < rtStart) { if (!m_bStopNotified) { m_bStopNotified = TRUE; if (m_dwStopCookie != 0) { DX_VERIFY(m_pFilter->NotifyEvent(EC_STREAM_CONTROL_STOPPED, (LONG_PTR)this, m_dwStopCookie)); if (FAILED(hr)) { return hr; } } bShouldDeliver = m_bShouldFlush; } else { bShouldDeliver = FALSE; } // EOS -- EndOfStream if (!bShouldDeliver) { return S_FALSE; } } } } } while (!bShouldDeliver); return hr; //DX_VERIFY(m_pFilter->StreamTime(rtStart)); //LONGLONG llStartTime = m_ElapseCounter.GetElapseTime(); //REFERENCE_TIME rtStreamTime = m_rtSampleTime;// llStartTime / 100; // rfStreamTime.GetUnits(); //loop: //REFERENCE_TIME rtStart = rtStreamTime; //m_iFrameNumber * m_rtFrameLength; //REFERENCE_TIME rtStop = rtStart + m_rtFrameLength; //if (rtStreamTime > rtStop) //{ // OutputDebugString(L"lost capture \r\n"); // ++m_iFrameNumber; // goto loop; //} //while (rtStreamTime < rtStart) //{ // m_pFilter->StreamTime(rfStreamTime); // rtStreamTime = rfStreamTime.GetUnits(); // // REFERENCE_TIME rtWaitTime = rtStart - rtStreamTime; // // ::WaitForSingleObject(m_hWaitEvent, rtWaitTime/10000); //} BYTE *pData = NULL; long cbData = 0; { DX_VERIFY(pSample->GetPointer(&pData)); cbData = pSample->GetSize(); //if (m_bZeroMemory) //{ // ZeroMemory(pData, cbData); //} { CAutoLock cAutoLockShared(&m_cSharedState); ASSERT(m_mt.formattype == FORMAT_VideoInfo); VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)m_mt.pbFormat; int nSize = min(pVih->bmiHeader.biSizeImage, (DWORD)cbData); //* HBITMAP hDib = m_pScreenCaptureImpl->CopyScreenToBitmap(&m_rcCapture, pData, (BITMAPINFO *) &(pVih->bmiHeader)); if (hDib) { DeleteObject(hDib); } //CRefTime rtStart = rfStreamTime; //m_rtSampleTime; //m_rtSampleTime += (LONG) m_iRepeatTime; CRefTime rtStop;// = m_ElapseCounter.GetElapseTime() / 100; DX_VERIFY(m_pFilter->StreamTime(rtStop)); //m_rtSampleTime = rtStop; //ATLTRACE(TEXT("CScreenCaptureSourcePin::FillBuffer , start=%lld(%f ms), stop=%lld(%f ms)\n"), // rtStart, float(rtStart) / 10000, rtStop, float(rtStop) / 10000); DX_VERIFY(pSample->SetTime((REFERENCE_TIME *)&rtStart, (REFERENCE_TIME *)&rtStop)); //每一帧都是一个同步点 DX_VERIFY(pSample->SetSyncPoint(TRUE)); BOOL bWait = FALSE; DWORD dwWillWaitTime = 0; //LONGLONG llElapseTime = rtStop.GetUnits() - rtStart.GetUnits(); // //m_ElapseCounter.GetElapseTime() - llStartTime; //if ( llElapseTime < MILLISECONDS_TO_100NS_UNITS(m_iRepeatTime)) //{ // bWait = TRUE; // dwWillWaitTime = (MILLISECONDS_TO_100NS_UNITS(m_iRepeatTime) - llElapseTime) / 10000; // if (dwWillWaitTime > 1) // { // //WaitForSingleObject(m_hStopEvent, dwWillWaitTime ); // } //} } } //FTLTRACE(TEXT("llElapseTime = %lld, bWait=%d, dwWillWaitTime=%d\n"), llElapseTime, bWait, dwWillWaitTime); #endif CheckPointer(pSample, E_POINTER); HRESULT hr = E_FAIL; CRefTime rfStreamTime; { //CAutoLock cObjectLock(m_pLock); DX_VERIFY(m_pFilter->StreamTime(rfStreamTime)); } REFERENCE_TIME rtStreamTime = rfStreamTime.GetUnits(); if (m_rfMaxRecordTime != 0 && rtStreamTime > m_rfMaxRecordTime) { //max time over //if there is preview window, just return S_FALSE is OK //if there is NOT preview window, can not stop graph automatic m_pFilter->NotifyEvent(TIME_OVER, static_cast<LONG_PTR>(m_rfMaxRecordTime / (UNITS / MILLISECONDS)), 0); return S_FALSE; } REFERENCE_TIME rtStart = 0; REFERENCE_TIME rtStop = 0; do { rtStart = m_nFrameNumber * m_nAvgTimePerFrame; rtStop = rtStart + m_nAvgTimePerFrame; if( rtStreamTime > rtStop) { OutputDebugString(L"lost capture \r\n"); ++m_nFrameNumber; } } while (rtStreamTime > rtStop); while (rtStreamTime < rtStart) { m_pFilter->StreamTime(rfStreamTime); rtStreamTime = rfStreamTime.GetUnits(); // REFERENCE_TIME rtWaitTime = rtStart - rtStreamTime; // ::WaitForSingleObject(m_hWaitEvent, rtWaitTime/10000); } BYTE *pData = NULL; long cbData = 0; CAutoLock cAutoLockShared(&m_cSharedState); DX_VERIFY(pSample->GetPointer(&pData)); cbData = pSample->GetSize(); ASSERT(m_mt.formattype == FORMAT_VideoInfo); VIDEOINFOHEADER *pVih = (VIDEOINFOHEADER*)m_mt.pbFormat; int nSize = min(pVih->bmiHeader.biSizeImage, (DWORD)cbData); HBITMAP hDib = m_pScreenCaptureImpl->CopyScreenToBitmap(&m_rcCapture, pData, (BITMAPINFO *) &(pVih->bmiHeader)); if (hDib) { if (m_bFirstFrame) { m_bFirstFrame = FALSE; DX_VERIFY(m_pFilter->NotifyEvent(FIRST_FRAME, (LONG_PTR)(hDib), NULL)); } else { DeleteObject(hDib); } } //REFERENCE_TIME rtTemp; //IReferenceClock *pClock; //m_pFilter->GetSyncSource(&pClock); //pClock->GetTime(&rtTemp); //pClock->Release(); //ST_FRAME_TIME *pTmp = new ST_FRAME_TIME(); //pTmp->llStartTime = rtStart; //pTmp->llStopTime = rtStop; //pTmp->nFrameIndex = m_iFrameNumber; //m_pFilter->NotifyEvent(FRAME_TIME, (LONG_PTR)pTmp, NULL); DX_VERIFY(pSample->SetTime(&rtStart, &rtStop)); m_nFrameNumber++; DX_VERIFY(pSample->SetSyncPoint(TRUE)); return S_OK; return hr; }
HRESULT RtspSourcePin::FillBuffer(IMediaSample* pSample) { MediaPacketSample mediaSample; _mediaPacketQueue.pop(mediaSample); if (mediaSample.invalid()) { DebugLog("%S pin: End of streaming!\n", m_pName); return S_FALSE; } BYTE* pData; HRESULT hr = pSample->GetPointer(&pData); if (FAILED(hr)) return hr; long length = pSample->GetSize(); if (_codecFourCC == DWORD('h264')) { // Append SPS and PPS to the first packet (they come out-band) if (_firstSample) { // Retrieve them from media type format buffer BYTE* decoderSpecific = (BYTE*)(((VIDEOINFOHEADER2*)_mediaType.Format()) + 1); ULONG decoderSpecificLength = _mediaType.FormatLength() - sizeof(VIDEOINFOHEADER2); memcpy_s(pData, length, decoderSpecific, decoderSpecificLength); pData += decoderSpecificLength; length -= decoderSpecificLength; } // Append 4-byte start code 00 00 00 01 in network byte order that precedes each NALU ((uint32_t*)pData)[0] = 0x01000000; pData += startCodesSize; length -= startCodesSize; // Finally copy media packet contens to IMediaSample memcpy_s(pData, length, mediaSample.data(), mediaSample.size()); pSample->SetActualDataLength(mediaSample.size() + startCodesSize); pSample->SetSyncPoint(IsIdrFrame(mediaSample)); } else if (_codecFourCC == DWORD('avc1')) { // Append 4-byte length field (network byte order) that precedes each NALU uint32_t lengthField = static_cast<uint32_t>(mediaSample.size()); pData[0] = ((uint8_t*)&lengthField)[3]; pData[1] = ((uint8_t*)&lengthField)[2]; pData[2] = ((uint8_t*)&lengthField)[1]; pData[3] = ((uint8_t*)&lengthField)[0]; pData += lengthFieldSize; length -= lengthFieldSize; // Finally copy media packet contens to IMediaSample memcpy_s(pData, length, mediaSample.data(), mediaSample.size()); pSample->SetActualDataLength(mediaSample.size() + lengthFieldSize); pSample->SetSyncPoint(IsIdrFrame(mediaSample)); } else { // No appending - just copy raw data memcpy_s(pData, length, mediaSample.data(), mediaSample.size()); pSample->SetActualDataLength(mediaSample.size()); pSample->SetSyncPoint(FALSE); } REFERENCE_TIME ts = SynchronizeTimestamp(mediaSample); pSample->SetTime(&ts, NULL); // Calculate current play time (does not include offset from initial time seek) CRefTime streamTime; m_pFilter->StreamTime(streamTime); uint32_t latencyMSecs = static_cast<RtspSourceFilter*>(m_pFilter)->_latencyMSecs; _currentPlayTime = streamTime.GetUnits() - (_streamTimeBaseline - latencyMSecs * 10000i64); return S_OK; }