Пример #1
0
    dsnerror_t Decode(const BYTE *src, int size, double pts, double *newpts, BYTE *pImage, int keyframe)
    {
        IMediaSample* sample = NULL;
        REFERENCE_TIME start = PTS2RT(pts); /* sometimes I get x99999 instead of y00000 */
        REFERENCE_TIME stoptime = start + m_frametime;
        BYTE *ptr;

        DSN_CHECK(m_pAll->GetBuffer(&sample, 0, 0, 0), DSN_FAIL_DECODESAMPLE);

        DSN_CHECK(sample->SetActualDataLength(size), DSN_FAIL_DECODESAMPLE);
        DSN_CHECK(sample->GetPointer(&ptr), DSN_FAIL_DECODESAMPLE);
        memcpy(ptr, src, size);
        DSN_CHECK(sample->SetTime(&start, &stoptime), DSN_FAIL_DECODESAMPLE);
        DSN_CHECK(sample->SetSyncPoint(keyframe), DSN_FAIL_DECODESAMPLE);
        DSN_CHECK(sample->SetPreroll(pImage ? 0 : 1), DSN_FAIL_DECODESAMPLE);
        DSN_CHECK(sample->SetDiscontinuity(m_discontinuity), DSN_FAIL_DECODESAMPLE);
        m_discontinuity = 0;

        m_pOurOutput->SetPointer(pImage);
        DSN_CHECK(m_pImp->Receive(sample), DSN_FAIL_RECEIVE);
        sample->Release();

        *newpts = RT2PTS(m_pOurOutput->GetPTS());
        return DSN_OK;
    }
Пример #2
0
STDMETHODIMP FakeOutputPin::PushBuffer(byte *buffer,
                                       __int64 start, __int64 stop,
                                       unsigned int size, bool discont)
{
    IMediaSample *pSample = NULL;

    if (start != -1) {
        start /= 100;
        stop /= 100;
    }

    HRESULT hres = GetDeliveryBuffer(&pSample, NULL, NULL, 0);
    if (hres == S_OK && pSample)
    {
        BYTE *sample_buffer;
        pSample->GetPointer(&sample_buffer);
        if(sample_buffer)
        {
            memcpy (sample_buffer, buffer, size);
            pSample->SetActualDataLength(size);
        }
        pSample->SetDiscontinuity(discont);

        pSample->SetSyncPoint(TRUE);
        pSample->SetPreroll(FALSE);

        if (start != -1)
            pSample->SetTime(&start, &stop);

        hres = Deliver(pSample);
        pSample->Release();
    }

    return S_OK;
}
Пример #3
0
HRESULT CWavPackSplitterFilterInputPin::DeliverOneFrame(WavPack_parser* wpp)
{
	IMediaSample *pSample;
	BYTE *Buffer = NULL;
	HRESULT hr;
	unsigned long FrameLenBytes = 0, FrameLenSamples = 0, FrameIndex = 0;

	// Get a new media sample
	hr = m_pParentFilter->m_pOutputPin->GetDeliveryBuffer(&pSample, NULL, NULL, 0);
	if (FAILED(hr)) {
		return hr;
	}

	hr = pSample->GetPointer(&Buffer);
	if (FAILED(hr)) {
		pSample->Release();
		return hr;
	}

	FrameLenBytes = wavpack_parser_read_frame(wpp, Buffer,
											  &FrameIndex, &FrameLenSamples);
	if (!FrameLenBytes) {
		// Something bad happened, let's end here
		pSample->Release();
		m_pParentFilter->m_pOutputPin->DeliverEndOfStream();
		// TODO : check if we need to stop the thread
		return hr;
	}
	pSample->SetActualDataLength(FrameLenBytes);

	REFERENCE_TIME rtStart, rtStop;
	rtStart = FrameIndex;
	rtStop = rtStart + FrameLenSamples;
	rtStart = (rtStart * 10000000) / wpp->sample_rate;
	rtStop = (rtStop * 10000000) / wpp->sample_rate;

	rtStart -= m_pParentFilter->m_rtStart;
	rtStop  -= m_pParentFilter->m_rtStart;

	pSample->SetTime(&rtStart, &rtStop);
	pSample->SetPreroll(FALSE);
	pSample->SetDiscontinuity(m_bDiscontinuity);
	if (m_bDiscontinuity) {
		m_bDiscontinuity = FALSE;
	}
	pSample->SetSyncPoint(TRUE);

	// Deliver the sample
	hr = m_pParentFilter->m_pOutputPin->Deliver(pSample);
	pSample->Release();
	pSample = NULL;
	if (FAILED(hr)) {
		return hr;
	}

	return S_OK;
}
Пример #4
0
HRESULT CAMROutputPin::DeliverDataPacketAMR(DataPacketAMR &packet)
{
	IMediaSample *sample;
	HRESULT hr = GetDeliveryBuffer(&sample, NULL, NULL, 0);
	if (FAILED(hr)) {
		return E_FAIL;
	}

	// we should have enough space in there
	long lsize = sample->GetSize();
	ASSERT(lsize >= packet.size);

	BYTE *buf;
	sample->GetPointer(&buf);

	memcpy(buf, packet.buf, packet.size);
	sample->SetActualDataLength(packet.size);

	// sync point, discontinuity ?
	if (packet.sync_point) {
		sample->SetSyncPoint(TRUE);
	} else {
		sample->SetSyncPoint(FALSE);
	}

	if (packet.discontinuity) { 
		sample->SetDiscontinuity(TRUE); 
	} else { 
		sample->SetDiscontinuity(FALSE); 
	}

	// do we have a time stamp ?
	if (packet.has_time) { 
		sample->SetTime(&packet.rtStart, &packet.rtStop); 
	}

	// dorucime
	hr = Deliver(sample);	
	sample->Release();
	return hr;
}
Пример #5
0
//----------------------------------------------------------------------------
//! @brief	  	次のサンプルを得る
//! @param		pSample : サンプルを返すポインタのポインタ
//! @return		エラーコード
//----------------------------------------------------------------------------
HRESULT CWMOutput::GetNextSample( IMediaSample **pSample )
{
	HRESULT hr;
	if( m_StreamNum == 0 || pSample == NULL )
		return S_FALSE;	// このストリームはない

	INSSBuffer	*pWMSample = NULL;
	QWORD	cnsSampleTime;
	QWORD	cnsDuration;
	DWORD	dwFlags;

	if( FAILED(hr = WMReader()->GetNextSample( m_StreamNum, &pWMSample, &cnsSampleTime, &cnsDuration, &dwFlags, NULL, NULL )) )
	{
		if( hr == NS_E_NO_MORE_SAMPLES ) return S_FALSE;
		return hr;
	}

	REFERENCE_TIME	startTime = (REFERENCE_TIME)cnsSampleTime;
	REFERENCE_TIME	endTime = (REFERENCE_TIME)(cnsSampleTime + cnsDuration);
	IMediaSample *pOutSample = reinterpret_cast<CWMBuffer*>(pWMSample)->GetSample();
	pOutSample->AddRef();
	pWMSample->Release();
	pOutSample->SetMediaTime(&startTime, &endTime);
#if 0
	if( startTime < Reader()->m_StartTime )
		pOutSample->SetPreroll(TRUE);
	else
		pOutSample->SetPreroll(FALSE);
#endif
	startTime -= Reader()->m_StartTime;
	endTime -= Reader()->m_StartTime;
	pOutSample->SetTime(&startTime, &endTime);
	pOutSample->SetSyncPoint(dwFlags & WM_SF_CLEANPOINT);
	*pSample = pOutSample;

	return hr;
}
Пример #6
0
// Set up our output sample
HRESULT
CTransformFilter::InitializeOutputSample(IMediaSample *pSample, IMediaSample **ppOutSample) {
    IMediaSample *pOutSample;

    // default - times are the same

    AM_SAMPLE2_PROPERTIES * const pProps = m_pInput->SampleProps();
    DWORD dwFlags = m_bSampleSkipped ? AM_GBF_PREVFRAMESKIPPED : 0;

    // This will prevent the image renderer from switching us to DirectDraw
    // when we can't do it without skipping frames because we're not on a
    // keyframe.  If it really has to switch us, it still will, but then we
    // will have to wait for the next keyframe
    if(!(pProps->dwSampleFlags & AM_SAMPLE_SPLICEPOINT)) {
        dwFlags |= AM_GBF_NOTASYNCPOINT;
    }

    ASSERT(m_pOutput->m_pAllocator != NULL);
    HRESULT hr = m_pOutput->m_pAllocator->GetBuffer(&pOutSample
        , pProps->dwSampleFlags & AM_SAMPLE_TIMEVALID ?
        &pProps->tStart : NULL
        , pProps->dwSampleFlags & AM_SAMPLE_STOPVALID ?
        &pProps->tStop : NULL
        , dwFlags);
    *ppOutSample = pOutSample;
    if(FAILED(hr)) {
        return hr;
    }

    ASSERT(pOutSample);
    IMediaSample2 *pOutSample2;
    if(SUCCEEDED(pOutSample->QueryInterface(IID_IMediaSample2,
        (void **)&pOutSample2))) {
        /*  Modify it */
        AM_SAMPLE2_PROPERTIES OutProps;
        EXECUTE_ASSERT(SUCCEEDED(pOutSample2->GetProperties(FIELD_OFFSET(AM_SAMPLE2_PROPERTIES, tStart), (PBYTE)&OutProps)));
        OutProps.dwTypeSpecificFlags = pProps->dwTypeSpecificFlags;
        OutProps.dwSampleFlags =
            (OutProps.dwSampleFlags & AM_SAMPLE_TYPECHANGED) |
            (pProps->dwSampleFlags & ~AM_SAMPLE_TYPECHANGED);

        OutProps.tStart = pProps->tStart;
        OutProps.tStop  = pProps->tStop;
        OutProps.cbData = FIELD_OFFSET(AM_SAMPLE2_PROPERTIES, dwStreamId);

        hr = pOutSample2->SetProperties(FIELD_OFFSET(AM_SAMPLE2_PROPERTIES, dwStreamId),
            (PBYTE)&OutProps);
        if(pProps->dwSampleFlags & AM_SAMPLE_DATADISCONTINUITY) {
            m_bSampleSkipped = FALSE;
        }
        pOutSample2->Release();
    }
    else {
        if(pProps->dwSampleFlags & AM_SAMPLE_TIMEVALID) {
            pOutSample->SetTime(&pProps->tStart,
                &pProps->tStop);
        }
        if(pProps->dwSampleFlags & AM_SAMPLE_SPLICEPOINT) {
            pOutSample->SetSyncPoint(TRUE);
        }
        if(pProps->dwSampleFlags & AM_SAMPLE_DATADISCONTINUITY) {
            pOutSample->SetDiscontinuity(TRUE);
            m_bSampleSkipped = FALSE;
        }
        // Copy the media times

        LONGLONG MediaStart, MediaEnd;
        if(pSample->GetMediaTime(&MediaStart,&MediaEnd) == NOERROR) {
            pOutSample->SetMediaTime(&MediaStart,&MediaEnd);
        }
    }
    return S_OK;
}
Пример #7
0
//
// Copy
//
// return a pointer to an identical copy of pSample
IMediaSample * CTransInPlaceFilter::Copy(IMediaSample *pSource)
{
    IMediaSample * pDest;

    HRESULT hr;
    REFERENCE_TIME tStart, tStop;
    const BOOL bTime = S_OK == pSource->GetTime( &tStart, &tStop);

    // this may block for an indeterminate amount of time
    hr = OutputPin()->PeekAllocator()->GetBuffer(
              &pDest
              , bTime ? &tStart : NULL
              , bTime ? &tStop : NULL
              , m_bSampleSkipped ? AM_GBF_PREVFRAMESKIPPED : 0
              );

    if (FAILED(hr)) {
        return NULL;
    }

    ASSERT(pDest);
    IMediaSample2 *pSample2;
    if (SUCCEEDED(pDest->QueryInterface(IID_IMediaSample2, (void **)&pSample2))) {
        HRESULT hr = pSample2->SetProperties(
            FIELD_OFFSET(AM_SAMPLE2_PROPERTIES, pbBuffer),
            (PBYTE)m_pInput->SampleProps());
        pSample2->Release();
        if (FAILED(hr)) {
            pDest->Release();
            return NULL;
        }
    } else {
        if (bTime) {
            pDest->SetTime(&tStart, &tStop);
        }

        if (S_OK == pSource->IsSyncPoint()) {
            pDest->SetSyncPoint(TRUE);
        }
        if (S_OK == pSource->IsDiscontinuity() || m_bSampleSkipped) {
            pDest->SetDiscontinuity(TRUE);
        }
        if (S_OK == pSource->IsPreroll()) {
            pDest->SetPreroll(TRUE);
        }

        // Copy the media type
        AM_MEDIA_TYPE *pMediaType;
        if (S_OK == pSource->GetMediaType(&pMediaType)) {
            pDest->SetMediaType(pMediaType);
            DeleteMediaType( pMediaType );
        }

    }

    m_bSampleSkipped = FALSE;

    // Copy the sample media times
    REFERENCE_TIME TimeStart, TimeEnd;
    if (pSource->GetMediaTime(&TimeStart,&TimeEnd) == NOERROR) {
        pDest->SetMediaTime(&TimeStart,&TimeEnd);
    }

    // Copy the actual data length and the actual data.
    {
        const long lDataLength = pSource->GetActualDataLength();
        pDest->SetActualDataLength(lDataLength);

        // Copy the sample data
        {
            BYTE *pSourceBuffer, *pDestBuffer;
            long lSourceSize  = pSource->GetSize();
            long lDestSize = pDest->GetSize();

            ASSERT(lDestSize >= lSourceSize && lDestSize >= lDataLength);

            pSource->GetPointer(&pSourceBuffer);
            pDest->GetPointer(&pDestBuffer);
            ASSERT(lDestSize == 0 || pSourceBuffer != NULL && pDestBuffer != NULL);

            CopyMemory( (PVOID) pDestBuffer, (PVOID) pSourceBuffer, lDataLength );
        }
    }

    return pDest;

} // Copy
Пример #8
0
HRESULT CWavPackDSSplitterInputPin::DeliverOneFrame(WavPack_parser* wpp)
{
    IMediaSample *pSample;
    BYTE *Buffer = NULL;
    HRESULT hr;
    unsigned long FrameLenBytes = 0, FrameLenSamples = 0, FrameIndex = 0;

    // Get a new media sample
    hr = m_pParentFilter->m_pOutputPin->GetDeliveryBuffer(&pSample, NULL, NULL, 0); 
    if (FAILED(hr))
    {
        DebugLog("CWavPackDSSplitterInputPin::DoProcessingLoop GetDeliveryBuffer failed 0x%08X",hr);
        return hr;
    }
    
    hr = pSample->GetPointer(&Buffer);
    if (FAILED(hr))
    {
        DebugLog("CWavPackDSSplitterInputPin::DoProcessingLoop GetPointer failed 0x%08X",hr);
        pSample->Release();
        return hr;
    }
    
    FrameLenBytes = wavpack_parser_read_frame(wpp, Buffer,
        &FrameIndex, &FrameLenSamples);
    if(!FrameLenBytes)
    {
        // Something bad happened, let's end here
        pSample->Release();
        m_pParentFilter->m_pOutputPin->DeliverEndOfStream();
        // TODO : check if we need to stop the thread
        DebugLog("CWavPackDSSplitterInputPin::DoProcessingLoop wavpack_parser_read_frame error");
        return hr;
    }
    pSample->SetActualDataLength(FrameLenBytes);
    
    if(wpp->is_correction == TRUE)
    {    
        IMediaSample2 *pSample2;
        if (SUCCEEDED(pSample->QueryInterface(IID_IMediaSample2, (void **)&pSample2)))
        {
            AM_SAMPLE2_PROPERTIES ams2p;
            ZeroMemory(&ams2p, sizeof(AM_SAMPLE2_PROPERTIES));
            hr = pSample2->GetProperties(sizeof(AM_SAMPLE2_PROPERTIES), (PBYTE)&ams2p);
            if(SUCCEEDED(hr))
            {            
                ams2p.dwStreamId = AM_STREAM_BLOCK_ADDITIONNAL;
                pSample2->SetProperties(sizeof(AM_SAMPLE2_PROPERTIES), (PBYTE)&ams2p);
            }
            pSample2->Release();
            pSample2 = NULL;
        }
    }
    
    REFERENCE_TIME rtStart, rtStop;
    rtStart = FrameIndex;
    rtStop = rtStart + FrameLenSamples;
    rtStart = (rtStart * 10000000) / wpp->sample_rate;
    rtStop = (rtStop * 10000000) / wpp->sample_rate;
    
    rtStart -= m_pParentFilter->m_rtStart;
    rtStop  -= m_pParentFilter->m_rtStart;
    
    pSample->SetTime(&rtStart, &rtStop);
    pSample->SetPreroll(FALSE);
    pSample->SetDiscontinuity(m_bDiscontinuity);
    if(m_bDiscontinuity)
    {
        m_bDiscontinuity = FALSE;
    }
    pSample->SetSyncPoint(TRUE);
    
    // Deliver the sample
    hr = m_pParentFilter->m_pOutputPin->Deliver(pSample);
    pSample->Release();
    pSample = NULL;
    if (FAILED(hr))
    {
        DebugLog("CWavPackDSSplitterInputPin::DoProcessingLoop Deliver failed 0x%08X",hr);
        return hr;
    }

    return S_OK;
}
Пример #9
0
HRESULT CAudioDecFilter::Transform(IMediaSample *pIn, IMediaSample *pOut)
{
	// 入力データポインタを取得する
	const DWORD InSize = pIn->GetActualDataLength();
	BYTE *pInData = NULL;
	HRESULT hr = pIn->GetPointer(&pInData);
	if (FAILED(hr))
		return hr;

	{
		CAutoLock Lock(&m_cPropLock);

		/*
			複数の音声フォーマットに対応する場合、この辺りでフォーマットの判定をする
		*/
		if (!m_pDecoder) {
			m_pDecoder = new CAacDecoder();
			m_pDecoder->Open();
		}

		REFERENCE_TIME rtStart, rtEnd;
		hr = pIn->GetTime(&rtStart, &rtEnd);
		if (FAILED(hr))
			rtStart = -1;
		if (pIn->IsDiscontinuity() == S_OK) {
			m_bDiscontinuity = true;
			m_bInputDiscontinuity = true;
		} else if (hr == S_OK || hr == VFW_S_NO_STOP_TIME) {
			if (!m_bJitterCorrection) {
				m_StartTime = rtStart;
			} else if (m_StartTime >= 0
					&& _abs64(rtStart - m_StartTime) > MAX_JITTER) {
				TRACE(TEXT("Resync audio stream time (%lld -> %lld [%f])\n"),
					  m_StartTime, rtStart,
					  (double)(rtStart - m_StartTime) / (double)REFERENCE_TIME_SECOND);
				m_StartTime = rtStart;
			}
		}
		if (m_StartTime < 0 || m_bDiscontinuity) {
			TRACE(TEXT("Initialize audio stream time (%lld)\n"), rtStart);
			m_StartTime = rtStart;
		}

		m_BitRateCalculator.Update(InSize);
	}

	DWORD InDataPos = 0;
	FrameSampleInfo SampleInfo;
	SampleInfo.pData = &m_OutData;

	hr = S_OK;

	while (InDataPos < InSize) {
		{
			CAutoLock Lock(&m_cPropLock);

			CAudioDecoder::DecodeFrameInfo FrameInfo;
			const DWORD DataSize = InSize - InDataPos;
			DWORD DecodeSize = DataSize;
			if (!m_pDecoder->Decode(&pInData[InDataPos], &DecodeSize, &FrameInfo)) {
				if (DecodeSize < DataSize) {
					InDataPos += DecodeSize;
					continue;
				}
				break;
			}
			InDataPos += DecodeSize;

			if (FrameInfo.bDiscontinuity)
				m_bDiscontinuity = true;

			SampleInfo.bMediaTypeChanged = false;

			hr = OnFrame(FrameInfo.pData, FrameInfo.Samples, FrameInfo.Info,
						 &SampleInfo);
		}

		if (SUCCEEDED(hr)) {
			if (SampleInfo.bMediaTypeChanged) {
				hr = ReconnectOutput(SampleInfo.MediaBufferSize, SampleInfo.MediaType);
				if (FAILED(hr))
					break;
				OutputLog(TEXT("出力メディアタイプを更新します。\r\n"));
				hr = m_pOutput->SetMediaType(&SampleInfo.MediaType);
				if (FAILED(hr)) {
					OutputLog(TEXT("出力メディアタイプを設定できません。(%08x)\r\n"), hr);
					break;
				}
				m_MediaType = SampleInfo.MediaType;
				m_bDiscontinuity = true;
				m_bInputDiscontinuity = true;
			}

			IMediaSample *pOutSample = NULL;
			hr = m_pOutput->GetDeliveryBuffer(&pOutSample, NULL, NULL, 0);
			if (FAILED(hr)) {
				OutputLog(TEXT("出力メディアサンプルを取得できません。(%08x)\r\n"), hr);
				break;
			}

			if (SampleInfo.bMediaTypeChanged)
				pOutSample->SetMediaType(&m_MediaType);

			// 出力ポインタ取得
			BYTE *pOutBuff = NULL;
			hr = pOutSample->GetPointer(&pOutBuff);
			if (FAILED(hr)) {
				OutputLog(TEXT("出力サンプルのバッファを取得できません。(%08x)\r\n"), hr);
				pOutSample->Release();
				break;
			}

			::CopyMemory(pOutBuff, m_OutData.GetData(), m_OutData.GetSize());
			pOutSample->SetActualDataLength(m_OutData.GetSize());

			if (m_StartTime >= 0) {
				REFERENCE_TIME rtDuration, rtStart, rtEnd;
				rtDuration = REFERENCE_TIME_SECOND * (LONGLONG)SampleInfo.Samples / FREQUENCY;
				rtStart = m_StartTime;
				m_StartTime += rtDuration;
				// 音ずれ補正用時間シフト
				if (m_DelayAdjustment > 0) {
					// 最大2倍まで時間を遅らせる
					if (rtDuration >= m_DelayAdjustment) {
						rtDuration += m_DelayAdjustment;
						m_DelayAdjustment = 0;
					} else {
						m_DelayAdjustment -= rtDuration;
						rtDuration *= 2;
					}
				} else if (m_DelayAdjustment < 0) {
					// 最短1/2まで時間を早める
					if (rtDuration >= -m_DelayAdjustment * 2) {
						rtDuration += m_DelayAdjustment;
						m_DelayAdjustment = 0;
					} else {
						m_DelayAdjustment += rtDuration;
						rtDuration /= 2;
					}
				} else {
					rtStart += m_Delay;
				}
				rtEnd = rtStart + rtDuration;
				pOutSample->SetTime(&rtStart, &rtEnd);
			}
			pOutSample->SetMediaTime(NULL, NULL);
			pOutSample->SetPreroll(FALSE);
#if 0
			// Discontinuityを設定すると倍速再生がおかしくなる模様
			pOutSample->SetDiscontinuity(m_bDiscontinuity);
#else
			pOutSample->SetDiscontinuity(m_bInputDiscontinuity);
#endif
			m_bDiscontinuity = false;
			m_bInputDiscontinuity = false;
			pOutSample->SetSyncPoint(TRUE);

			hr = m_pOutput->Deliver(pOutSample);
#ifdef _DEBUG
			if (FAILED(hr)) {
				OutputLog(TEXT("サンプルを送信できません。(%08x)\r\n"), hr);
				if (m_bPassthrough && !m_bPassthroughError) {
					m_bPassthroughError = true;
					if (m_pEventHandler)
						m_pEventHandler->OnSpdifPassthroughError(hr);
				}
			}
#endif
			pOutSample->Release();
			if (FAILED(hr))
				break;
		}
	}

	return hr;
}
Пример #10
0
HRESULT CLAVOutputPin::DeliverPacket(Packet *pPacket)
{
  HRESULT hr = S_OK;
  IMediaSample *pSample = nullptr;

  long nBytes = (long)pPacket->GetDataSize();

  if(nBytes == 0) {
    goto done;
  }

  CHECK_HR(hr = GetDeliveryBuffer(&pSample, nullptr, nullptr, 0));

  if (m_bPacketAllocator) {
    ILAVMediaSample *pLAVSample = nullptr;
    CHECK_HR(hr = pSample->QueryInterface(&pLAVSample));
    CHECK_HR(hr = pLAVSample->SetPacket(pPacket));
    SafeRelease(&pLAVSample);
  } else {
    // Resize buffer if it is too small
    // This can cause a playback hick-up, we should avoid this if possible by setting a big enough buffer size
    if(nBytes > pSample->GetSize()) {
      SafeRelease(&pSample);
      ALLOCATOR_PROPERTIES props, actual;
      CHECK_HR(hr = m_pAllocator->GetProperties(&props));
      // Give us 2 times the requested size, so we don't resize every time
      props.cbBuffer = nBytes*2;
      if(props.cBuffers > 1) {
        CHECK_HR(hr = __super::DeliverBeginFlush());
        CHECK_HR(hr = __super::DeliverEndFlush());
      }
      CHECK_HR(hr = m_pAllocator->Decommit());
      CHECK_HR(hr = m_pAllocator->SetProperties(&props, &actual));
      CHECK_HR(hr = m_pAllocator->Commit());
      CHECK_HR(hr = GetDeliveryBuffer(&pSample, nullptr, nullptr, 0));
    }

    // Fill the sample
    BYTE* pData = nullptr;
    if(FAILED(hr = pSample->GetPointer(&pData)) || !pData) goto done;

    memcpy(pData, pPacket->GetData(), nBytes);
  }

  if(pPacket->pmt) {
    DbgLog((LOG_TRACE, 10, L"::DeliverPacket() - sending new media type to decoder"));
    pSample->SetMediaType(pPacket->pmt);
    pPacket->bDiscontinuity = true;

    CAutoLock cAutoLock(m_pLock);
    CMediaType pmt = *(pPacket->pmt);
    m_mts.clear();
    m_mts.push_back(pmt);
    pPacket->pmt = nullptr;

    SetMediaType(&pmt);
  }

  bool fTimeValid = pPacket->rtStart != Packet::INVALID_TIME;

  // IBitRateInfo
  m_BitRate.nBytesSinceLastDeliverTime += nBytes;

  if (fTimeValid) {
    if (m_BitRate.rtLastDeliverTime == Packet::INVALID_TIME) {
      m_BitRate.rtLastDeliverTime = pPacket->rtStart;
      m_BitRate.nBytesSinceLastDeliverTime = 0;
    }

    if (m_BitRate.rtLastDeliverTime + 10000000 < pPacket->rtStart) {
      REFERENCE_TIME rtDiff = pPacket->rtStart - m_BitRate.rtLastDeliverTime;

      double dSecs, dBits;

      dSecs = rtDiff / 10000000.0;
      dBits = 8.0 * m_BitRate.nBytesSinceLastDeliverTime;
      m_BitRate.nCurrentBitRate = (DWORD)(dBits / dSecs);

      m_BitRate.rtTotalTimeDelivered += rtDiff;
      m_BitRate.nTotalBytesDelivered += m_BitRate.nBytesSinceLastDeliverTime;

      dSecs = m_BitRate.rtTotalTimeDelivered / 10000000.0;
      dBits = 8.0 * m_BitRate.nTotalBytesDelivered;
      m_BitRate.nAverageBitRate = (DWORD)(dBits / dSecs);

      m_BitRate.rtLastDeliverTime = pPacket->rtStart;
      m_BitRate.nBytesSinceLastDeliverTime = 0;
    }
  }

  CHECK_HR(hr = pSample->SetActualDataLength(nBytes));
  CHECK_HR(hr = pSample->SetTime(fTimeValid ? &pPacket->rtStart : nullptr, fTimeValid ? &pPacket->rtStop : nullptr));
  CHECK_HR(hr = pSample->SetMediaTime(nullptr, nullptr));
  CHECK_HR(hr = pSample->SetDiscontinuity(pPacket->bDiscontinuity));
  CHECK_HR(hr = pSample->SetSyncPoint(pPacket->bSyncPoint));
  CHECK_HR(hr = pSample->SetPreroll(fTimeValid && pPacket->rtStart < 0));
  // Deliver
  CHECK_HR(hr = Deliver(pSample));

done:
  if (!m_bPacketAllocator || !pSample)
    SAFE_DELETE(pPacket);
  SafeRelease(&pSample);
  return hr;
}
Пример #11
0
HRESULT CMPVlcSourceStream::DoBufferProcessingLoop(void)
{
    Command com;
    HRESULT result = S_OK;
    BOOL bStop = false;

    OnThreadStartPlay();

    LogInfo("Starting grabber thread");

    if (m_exec)
    {
        LogInfo("Executing external command: %s %s", m_exec, m_exec_opt);
        ::ShellExecuteA(0, NULL, m_exec, m_exec_opt, NULL, SW_HIDE);
        Sleep(m_exec_wait);
    }

    //libvlc_vlm_seek_media(m_vlc, "vlc_ds_stream", 0);

    if (libvlc_vlm_play_media (m_vlc, "vlc_ds_stream") != 0)
    {
        LogError("libvlc_vlm_play_media failed");
        return S_FALSE;
    }

    OVERLAPPED o;
    o.hEvent = CreateEvent( NULL, FALSE, FALSE, NULL);
    o.Internal = o.InternalHigh = o.Offset = o.OffsetHigh = 0;

    ConnectNamedPipe(m_hPipe, &o);

    WaitForSingleObject(o.hEvent, 20000);
    BOOL fConnected = HasOverlappedIoCompleted(&o);

    SetThreadPriority(m_hThread, THREAD_PRIORITY_TIME_CRITICAL);

    if (!fConnected)
    {
        LogError("ConnectNamedPipe failed");
        CancelIo(m_hPipe);
        CloseHandle(o.hEvent);
        return S_FALSE;
    }
    else do
        {
            BOOL requestAvail = FALSE;
            while ((requestAvail = CheckRequest(&com)) == FALSE)
            {
                //LogDebug ("Command: %d", com);

                IMediaSample *pSample;

                HRESULT hr = GetDeliveryBuffer(&pSample,NULL,NULL,0);
                if (FAILED(hr))
                    continue;

                // fill buffer
                // ------------------------------------------------------------------------------------
                hr = S_OK;
                BYTE *pData;
                DWORD cbData;

                CheckPointer(pSample, E_POINTER);
                // Access the sample's data buffer
                pSample->GetPointer((BYTE **)&pData);
                cbData = pSample->GetSize();

                DWORD startRecvTime = GetTickCount();
                m_buffsize = 0;

                do
                {
                    DWORD cbBytesRead = 0;
                    ResetEvent(o.hEvent);
                    o.Internal = o.InternalHigh = o.Offset = o.OffsetHigh = 0;
                    BOOL fSuccess = ReadFile( m_hPipe,  pData + m_buffsize,  cbData - m_buffsize,  &cbBytesRead, &o);

                    if (GetLastError() == ERROR_IO_PENDING)
                    {
                        for (int n=0; n < 20; n++)
                        {
                            if ((requestAvail = CheckRequest(&com)) == TRUE)
                                break;
                            if (WaitForSingleObject(o.hEvent, 1000) == WAIT_OBJECT_0)
                                break;
                        }
                        fSuccess = GetOverlappedResult(m_hPipe, &o, &cbBytesRead, false);
                    }

                    if (!fSuccess || cbBytesRead == 0)
                    {
                        CancelIo(m_hPipe);
                        break;
                    }
                    m_buffsize += cbBytesRead;

                } while ( !requestAvail && m_buffsize < (cbData  * 3 / 4)
                          && abs((signed long)(GetTickCount() - startRecvTime)) < 100);

                // ------------------------------------------------------------------------------------

                if (m_buffsize != 0 && !(requestAvail && com == CMD_STOP))
                {
                    LogDebug("Posting %d / %d bytes", m_buffsize, pSample->GetSize());

                    REFERENCE_TIME rtStart = startRecvTime;
                    REFERENCE_TIME rtStop  = GetTickCount();

                    pSample->SetTime(&rtStart, &rtStop);
                    pSample->SetActualDataLength(m_buffsize);
                    pSample->SetSyncPoint(TRUE);

                    hr = Deliver(pSample);
                    // downstream filter returns S_FALSE if it wants us to
                    // stop or an error if it's reporting an error.
                    if(hr != S_OK)
                    {
                        LogInfo("Deliver() returned %08x; stopping", hr);
                        bStop = true;
                    }

                } else
                {
                    // FillBuffer returned false
                    bStop = true;
                    DeliverEndOfStream();
                }

                pSample->Release();

                if (bStop)
                    break;
            }

            if (requestAvail)
            {
                LogInfo("Received command: %d", com);
                if (com == CMD_RUN || com == CMD_PAUSE) {
                    Reply(NOERROR);
                } else if (com != CMD_STOP) {
                    Reply((DWORD) E_UNEXPECTED);
                    LogDebug("Unexpected command %d!!!", com);
                }
            }
        } while (com != CMD_STOP && bStop == false);

    //DeliverEndOfStream();
    LogDebug("end loop");

    HANDLE hSDThread = CreateThread(NULL, 0, &VlcStreamDiscardThread, LPVOID(m_hPipe), 0, 0);

    libvlc_vlm_stop_media(m_vlc, "vlc_ds_stream");
    LogDebug("libvlc_vlm_stop_media");

    if (WaitForSingleObject(hSDThread, 30000) == WAIT_TIMEOUT)
    {
        DWORD ec;
        LogError("Terminating StreamDiscardThread!");
        GetExitCodeThread(hSDThread, &ec);
        TerminateThread(hSDThread, ec);
    }
    DisconnectNamedPipe(m_hPipe);
    LogDebug("DoBufferProcessingLoop end");

    CloseHandle(o.hEvent);
    return result;
}
Пример #12
0
GstFlowReturn VideoFakeSrcPin::PushBuffer(GstBuffer *buffer)
{
    IMediaSample *pSample = NULL;
    byte *data = GST_BUFFER_DATA (buffer);
    int attempts = 0;
    HRESULT hres;
    BYTE *sample_buffer;
    AM_MEDIA_TYPE *mediatype;

    StartUsingOutputPin();

    while (attempts < MAX_ATTEMPTS)
    {
        hres = GetDeliveryBuffer(&pSample, NULL, NULL, 0);
        if (SUCCEEDED (hres))
            break;
        attempts++;
        Sleep(100);
    }

    if (FAILED (hres))
    {
        StopUsingOutputPin();
        GST_WARNING ("Could not get sample for delivery to sink: %x", hres);
        return GST_FLOW_ERROR;
    }

    pSample->GetPointer(&sample_buffer);
    pSample->GetMediaType(&mediatype);
    if (mediatype)
        SetMediaType (mediatype);

    if(sample_buffer)
    {
        /* Copy to the destination stride.
         * This is not just a simple memcpy because of the different strides.
         * TODO: optimise for the same-stride case and avoid the copy entirely.
         */
        CopyToDestinationBuffer (data, sample_buffer);
    }

    pSample->SetDiscontinuity(FALSE); /* Decoded frame; unimportant */
    pSample->SetSyncPoint(TRUE); /* Decoded frame; always a valid syncpoint */
    pSample->SetPreroll(FALSE); /* For non-displayed frames.
                                 Not used in GStreamer */

    /* Disable synchronising on this sample. We instead let GStreamer handle
     * this at a higher level, inside BaseSink. */
    pSample->SetTime(NULL, NULL);

    while (attempts < MAX_ATTEMPTS)
    {
        hres = Deliver(pSample);
        if (SUCCEEDED (hres))
            break;
        attempts++;
        Sleep(100);
    }

    pSample->Release();

    StopUsingOutputPin();

    if (SUCCEEDED (hres))
        return GST_FLOW_OK;
    else {
        GST_WARNING_OBJECT (this, "Failed to deliver sample: %x", hres);
        if (hres == VFW_E_NOT_CONNECTED)
            return GST_FLOW_NOT_LINKED;
        else
            return GST_FLOW_ERROR;
    }
}
Пример #13
0
HRESULT CWavPackDSDecoder::Receive(IMediaSample *pSample)
{
    //  Check for other streams and pass them on 
    AM_SAMPLE2_PROPERTIES * const pProps = m_pInput->SampleProps(); 
    if ((pProps->dwStreamId != AM_STREAM_MEDIA) &&
        (pProps->dwStreamId != AM_STREAM_BLOCK_ADDITIONNAL))
    {
        return m_pOutput->Deliver(pSample);
    }
    
    ASSERT(pSample);
    // If no output to deliver to then no point sending us data 
    ASSERT(m_pOutput != NULL);

    HRESULT hr = S_OK;
    BYTE *pSrc, *pDst;
    DWORD SrcLength = pSample->GetActualDataLength();
    hr = pSample->GetPointer(&pSrc);
    if(FAILED(hr))
        return hr;
    
     // Check for minimal block size
    if(SrcLength < (3 * sizeof(uint32_t)))
    {
        return S_OK;
    }

    WAVEFORMATEX* pwfx = (WAVEFORMATEX*)m_pInput->CurrentMediaType().Format();
    BOOL bSeveralBlocks = (pwfx->nChannels > 2);
 
    if(pProps->dwStreamId == AM_STREAM_MEDIA)
    {
        REFERENCE_TIME rtStop;
        if(pSample->IsSyncPoint() == S_OK)
        {
            pSample->GetTime(&m_rtFrameStart, &rtStop);
            m_TotalSamples = 0;
        }

        m_MainBlockDiscontinuity = (pSample->IsDiscontinuity() == S_OK);

        reconstruct_wavpack_frame(
            m_MainFrame,
            &m_CommonFrameData,
            (char*)pSrc,
            SrcLength,            
            TRUE,
            bSeveralBlocks,
            m_PrivateData.version);

        if(m_HybridMode == TRUE)
        {
            // Stop here and wait for correction data
            return S_OK;
        }
    }
    
    if((m_HybridMode == TRUE) && 
       (pProps->dwStreamId == AM_STREAM_BLOCK_ADDITIONNAL))
    {
        // rebuild correction data block
        reconstruct_wavpack_frame(
            m_CorrectionFrame,
            &m_CommonFrameData,
            (char*)pSrc,
            SrcLength,
            FALSE,
            bSeveralBlocks,
            m_PrivateData.version);
    }

    if(wavpack_buffer_decoder_load_frame(m_Codec, m_MainFrame->data, m_MainFrame->len,
        m_HybridMode ? m_CorrectionFrame->data : NULL, m_CorrectionFrame->len) == 0)
    {
        // Something is wrong
        return S_FALSE;
    }
   
    // We can precise the decoding mode now
    if(m_HybridMode == FALSE)
    {
        if(m_CommonFrameData.array_flags[0] & WV_HYBRID_FLAG)
        {
            m_DecodingMode = DECODING_MODE_LOSSY;
        }
        else
        {
            m_DecodingMode = DECODING_MODE_LOSSLESS;
        }
    }

    uint32_t samplesLeft = m_CommonFrameData.block_samples;
    while(samplesLeft > 0)
    {
        // Set up the output sample
        IMediaSample *pOutSample;
        hr = InitializeOutputSample(pSample, &pOutSample);
        if(FAILED(hr))
        {
            break;
        }
    
        DWORD DstLength = pOutSample->GetSize();
        hr = pOutSample->GetPointer(&pDst);
        if(FAILED(hr))
        {
            pOutSample->Release();
            break;
        }

        DstLength &= 0xFFFFFFF8;
    
        long samples = wavpack_buffer_decoder_unpack(m_Codec,(int32_t *)pDst, m_SamplesPerBuffer);
        if(samples)
        {
            wavpack_buffer_format_samples(m_Codec,
                (uchar *) pDst,
                (long*) pDst,
                samples);
            
            DstLength = samples *
                WavpackGetBytesPerSample(m_Codec->wpc) *
                WavpackGetNumChannels (m_Codec->wpc);

            pOutSample->SetActualDataLength(DstLength);
            
            REFERENCE_TIME rtStart, rtStop;
            rtStart = m_rtFrameStart + (REFERENCE_TIME)(((double)m_TotalSamples / WavpackGetSampleRate(m_Codec->wpc)) * 10000000);
            m_TotalSamples += samples;
            rtStop = m_rtFrameStart + (REFERENCE_TIME)(((double)m_TotalSamples / WavpackGetSampleRate(m_Codec->wpc)) * 10000000);

            if(rtStart < 0 && rtStop < 0)
            {
                // No need to deliver this sample it will be skipped
                pOutSample->Release();
                continue;
            }
            pOutSample->SetTime(&rtStart, &rtStop);
            pOutSample->SetSyncPoint(TRUE);
            pOutSample->SetDiscontinuity(m_MainBlockDiscontinuity);
            if(m_MainBlockDiscontinuity == TRUE)
            {
                m_MainBlockDiscontinuity = FALSE;
            }

            hr = m_pOutput->Deliver(pOutSample);
            if(FAILED(hr))
            {
                pOutSample->Release();
                break;
            }
            pOutSample->Release();
        }
        else
        {
            pOutSample->Release();
            break;
        }
        samplesLeft -= samples;
    }
    
    m_DecodedFrames++;
    m_CrcError = WavpackGetNumErrors(m_Codec->wpc);
    
    return S_OK;
}
Пример #14
0
        IMediaSample *QMemInputPin::duplicateSampleForOutput(IMediaSample *sample, IMemAllocator *alloc)
        {
            LONG length = sample->GetActualDataLength();

            HRESULT hr = alloc->Commit();
            if (hr == HRESULT(VFW_E_SIZENOTSET)) {
                ALLOCATOR_PROPERTIES prop = getDefaultAllocatorProperties();
                prop.cbBuffer = qMax(prop.cbBuffer, length);
                ALLOCATOR_PROPERTIES actual;
                //we just try to set the properties...
                alloc->SetProperties(&prop, &actual);
                hr = alloc->Commit();
            }

            Q_ASSERT(SUCCEEDED(hr));

            IMediaSample *out;
            hr = alloc->GetBuffer(&out, 0, 0, AM_GBF_NOTASYNCPOINT);
            Q_ASSERT(SUCCEEDED(hr));

            //let's copy the sample
            {
                REFERENCE_TIME start, end;
                sample->GetTime(&start, &end);
                out->SetTime(&start, &end);
            }

            hr = out->SetActualDataLength(length);
            Q_ASSERT(SUCCEEDED(hr));
            hr = out->SetDiscontinuity(sample->IsDiscontinuity());
            Q_ASSERT(SUCCEEDED(hr));

            {
                LONGLONG start, end;
                hr = sample->GetMediaTime(&start, &end);
                if (hr != HRESULT(VFW_E_MEDIA_TIME_NOT_SET)) {
                    hr = out->SetMediaTime(&start, &end);
                    Q_ASSERT(SUCCEEDED(hr));
                }
            }

            AM_MEDIA_TYPE *type = 0;
            hr = sample->GetMediaType(&type);
            Q_ASSERT(SUCCEEDED(hr));
            hr = out->SetMediaType(type);
            Q_ASSERT(SUCCEEDED(hr));

            hr = out->SetPreroll(sample->IsPreroll());
            Q_ASSERT(SUCCEEDED(hr));
            hr = out->SetSyncPoint(sample->IsSyncPoint());
            Q_ASSERT(SUCCEEDED(hr));

            BYTE *dest = 0, *src = 0;
            hr = out->GetPointer(&dest);
            Q_ASSERT(SUCCEEDED(hr));
            hr = sample->GetPointer(&src);
            Q_ASSERT(SUCCEEDED(hr));

            qMemCopy(dest, src, sample->GetActualDataLength());

            return out;
        }
Пример #15
0
HRESULT CLAVAudio::DeliverBitstream(AVCodecID codec, const BYTE *buffer, DWORD dwSize, DWORD dwFrameSize, REFERENCE_TIME rtStartInput, REFERENCE_TIME rtStopInput)
{
  HRESULT hr = S_OK;

  CMediaType mt = CreateBitstreamMediaType(codec, m_bsParser.m_dwSampleRate);
  WAVEFORMATEX* wfe = (WAVEFORMATEX*)mt.Format();

  if(FAILED(hr = ReconnectOutput(dwSize, mt))) {
    return hr;
  }

  IMediaSample *pOut;
  BYTE *pDataOut = NULL;
  if(FAILED(GetDeliveryBuffer(&pOut, &pDataOut))) {
    return E_FAIL;
  }

  REFERENCE_TIME rtStart = m_rtStart, rtStop = AV_NOPTS_VALUE;
  // TrueHD timings
  // Since the SPDIF muxer takes 24 frames and puts them into one IEC61937 frame, we use the cached timestamp from before.
  if (codec == AV_CODEC_ID_TRUEHD) {
    // long-term cache is valid
    if (m_rtBitstreamCache != AV_NOPTS_VALUE)
      rtStart = m_rtBitstreamCache;
    // Duration - stop time of the current frame is valid
    if (rtStopInput != AV_NOPTS_VALUE)
      rtStop = rtStopInput;
    else // no actual time of the current frame, use typical TrueHD frame size, 24 * 0.83333ms
      rtStop = rtStart + (REFERENCE_TIME)(200000 / m_dRate);
    m_rtStart = rtStop;
  } else {
    double dDuration = DBL_SECOND_MULT * (double)m_bsParser.m_dwSamples / m_bsParser.m_dwSampleRate / m_dRate;
    m_dStartOffset += fmod(dDuration, 1.0);

    // Add rounded duration to rtStop
    rtStop = rtStart + (REFERENCE_TIME)(dDuration + 0.5);
    // and unrounded to m_rtStart..
    m_rtStart += (REFERENCE_TIME)dDuration;
    // and accumulate error..
    if (m_dStartOffset > 0.5) {
      m_rtStart++;
      m_dStartOffset -= 1.0;
    }
  }

  REFERENCE_TIME rtJitter = rtStart - m_rtBitstreamCache;
  m_faJitter.Sample(rtJitter);

  REFERENCE_TIME rtJitterMin = m_faJitter.AbsMinimum();
  if (m_settings.AutoAVSync && abs(rtJitterMin) > m_JitterLimit && m_bHasVideo) {
    DbgLog((LOG_TRACE, 10, L"::Deliver(): corrected A/V sync by %I64d", rtJitterMin));
    m_rtStart -= rtJitterMin;
    m_faJitter.OffsetValues(-rtJitterMin);
    m_bDiscontinuity = TRUE;
  }

#ifdef DEBUG
  DbgLog((LOG_CUSTOM5, 20, L"Bitstream Delivery, rtStart(calc): %I64d, rtStart(input): %I64d, duration: %I64d, diff: %I64d", rtStart, m_rtBitstreamCache, rtStop-rtStart, rtJitter));

  if (m_faJitter.CurrentSample() == 0) {
    DbgLog((LOG_TRACE, 20, L"Jitter Stats: min: %I64d - max: %I64d - avg: %I64d", rtJitterMin, m_faJitter.AbsMaximum(), m_faJitter.Average()));
  }
#endif
  m_rtBitstreamCache = AV_NOPTS_VALUE;

  if(m_settings.AudioDelayEnabled) {
    REFERENCE_TIME rtDelay = (REFERENCE_TIME)((m_settings.AudioDelay * 10000i64) / m_dRate);
    rtStart += rtDelay;
    rtStop += rtDelay;
  }

  pOut->SetTime(&rtStart, &rtStop);
  pOut->SetMediaTime(NULL, NULL);

  pOut->SetPreroll(FALSE);
  pOut->SetDiscontinuity(m_bDiscontinuity);
  m_bDiscontinuity = FALSE;
  pOut->SetSyncPoint(TRUE);

  pOut->SetActualDataLength(dwSize);

  memcpy(pDataOut, buffer, dwSize);

  if(hr == S_OK) {
    hr = m_pOutput->GetConnected()->QueryAccept(&mt);
    if (hr == S_FALSE && m_nCodecId == AV_CODEC_ID_DTS && m_bDTSHD) {
      DbgLog((LOG_TRACE, 1, L"DTS-HD Media Type failed with %0#.8x, trying fallback to DTS core", hr));
      m_bForceDTSCore = TRUE;
      UpdateBitstreamContext();
      goto done;
    }
    DbgLog((LOG_TRACE, 1, L"Sending new Media Type (QueryAccept: %0#.8x)", hr));
    m_pOutput->SetMediaType(&mt);
    pOut->SetMediaType(&mt);
  }

  hr = m_pOutput->Deliver(pOut);
  if (FAILED(hr)) {
    DbgLog((LOG_ERROR, 10, L"::DeliverBitstream failed with code: %0#.8x", hr));
  }

done:
  SafeRelease(&pOut);
  return hr;
}
Пример #16
0
HRESULT CLAVOutputPin::DeliverPacket(Packet *pPacket)
{
  HRESULT hr = S_OK;
  IMediaSample *pSample = NULL;

  long nBytes = (long)pPacket->GetDataSize();

  if(nBytes == 0) {
    goto done;
  }

  CHECK_HR(hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0));

  if (m_bPacketAllocator) {
    ILAVMediaSample *pLAVSample = NULL;
    CHECK_HR(hr = pSample->QueryInterface(&pLAVSample));
    CHECK_HR(hr = pLAVSample->SetPacket(pPacket));
    SafeRelease(&pLAVSample);
  } else {
    // Resize buffer if it is too small
    // This can cause a playback hick-up, we should avoid this if possible by setting a big enough buffer size
    if(nBytes > pSample->GetSize()) {
      SafeRelease(&pSample);
      ALLOCATOR_PROPERTIES props, actual;
      CHECK_HR(hr = m_pAllocator->GetProperties(&props));
      // Give us 2 times the requested size, so we don't resize every time
      props.cbBuffer = nBytes*2;
      if(props.cBuffers > 1) {
        CHECK_HR(hr = __super::DeliverBeginFlush());
        CHECK_HR(hr = __super::DeliverEndFlush());
      }
      CHECK_HR(hr = m_pAllocator->Decommit());
      CHECK_HR(hr = m_pAllocator->SetProperties(&props, &actual));
      CHECK_HR(hr = m_pAllocator->Commit());
      CHECK_HR(hr = GetDeliveryBuffer(&pSample, NULL, NULL, 0));
    }

    // Fill the sample
    BYTE* pData = NULL;
    if(FAILED(hr = pSample->GetPointer(&pData)) || !pData) goto done;

    memcpy(pData, pPacket->GetData(), nBytes);
  }

  if(pPacket->pmt) {
    DbgLog((LOG_TRACE, 10, L"::DeliverPacket() - sending new media type to decoder"));
    pSample->SetMediaType(pPacket->pmt);
    pPacket->bDiscontinuity = true;

    CAutoLock cAutoLock(m_pLock);
    CMediaType pmt = *(pPacket->pmt);
    m_mts.clear();
    m_mts.push_back(pmt);
    pPacket->pmt = NULL;

    SetMediaType(&pmt);
  }

  bool fTimeValid = pPacket->rtStart != Packet::INVALID_TIME;

  CHECK_HR(hr = pSample->SetActualDataLength(nBytes));
  CHECK_HR(hr = pSample->SetTime(fTimeValid ? &pPacket->rtStart : NULL, fTimeValid ? &pPacket->rtStop : NULL));
  CHECK_HR(hr = pSample->SetMediaTime(NULL, NULL));
  CHECK_HR(hr = pSample->SetDiscontinuity(pPacket->bDiscontinuity));
  CHECK_HR(hr = pSample->SetSyncPoint(pPacket->bSyncPoint));
  CHECK_HR(hr = pSample->SetPreroll(fTimeValid && pPacket->rtStart < 0));
  // Deliver
  CHECK_HR(hr = Deliver(pSample));

done:
  if (!m_bPacketAllocator)
    SAFE_DELETE(pPacket);
  SafeRelease(&pSample);
  return hr;
}
Пример #17
0
HRESULT CTTASplitterInputPin::DoProcessingLoop(void)
{
	DWORD com;
	IMediaSample *pSample;
	BYTE *Buffer;
	HRESULT hr;
	unsigned long FrameLenBytes, FrameLenSamples, FrameIndex;

	Reply(NOERROR);
	m_bAbort = FALSE;

	m_pParentFilter->m_pOutputPin->DeliverNewSegment(0,
	m_pParentFilter->m_rtStop - m_pParentFilter->m_rtStart,
	m_pParentFilter->m_dRateSeeking);

	do {
		if (m_pIACBW->StreamPos >= m_pIACBW->StreamLen || tta_parser_eof(m_pTTAParser)) {
			m_pParentFilter->m_pOutputPin->DeliverEndOfStream();
			return NOERROR;
		}

		hr = m_pParentFilter->m_pOutputPin->GetDeliveryBuffer(&pSample, NULL, NULL, 0); 
		if (FAILED(hr)) {
			return hr;
		}

		hr = pSample->GetPointer(&Buffer);
		if (FAILED(hr)) {
			pSample->Release();
			return hr;
		}

		FrameLenBytes = tta_parser_read_frame(m_pTTAParser, Buffer, &FrameIndex, &FrameLenSamples);
		if (!FrameLenBytes) {
			pSample->Release();
			m_pParentFilter->m_pOutputPin->DeliverEndOfStream();
			return hr;
		}

		pSample->SetActualDataLength(FrameLenBytes);

		REFERENCE_TIME rtStart, rtStop;
		rtStart	= (FrameIndex * m_pTTAParser->FrameLen);
		rtStop	= rtStart + FrameLenSamples;
		rtStart	= (rtStart * 10000000) / m_pTTAParser->TTAHeader.SampleRate;
		rtStop	= (rtStop * 10000000) / m_pTTAParser->TTAHeader.SampleRate;

		rtStart	-= m_pParentFilter->m_rtStart;
		rtStop	-= m_pParentFilter->m_rtStart;

		pSample->SetTime(&rtStart, &rtStop);
		pSample->SetPreroll(FALSE);
		pSample->SetDiscontinuity(m_bDiscontinuity);

		if (m_bDiscontinuity) {
			m_bDiscontinuity = FALSE;
		}
		pSample->SetSyncPoint(TRUE);

		hr = m_pParentFilter->m_pOutputPin->Deliver(pSample);
		pSample->Release();
		pSample = NULL;

		if (FAILED(hr)) {
			return hr;
		}
	} while (!CheckRequest((DWORD*)&com) && !m_bAbort);

	return NOERROR;
}