Пример #1
0
HRESULT CBaseVideoFilter::GetDeliveryBuffer(int w, int h, IMediaSample** ppOut)
{
    CheckPointer(ppOut, E_POINTER);

    HRESULT hr;

    if (FAILED(hr = ReconnectOutput(w, h))) {
        return hr;
    }

    if (FAILED(hr = m_pOutput->GetDeliveryBuffer(ppOut, NULL, NULL, 0))) {
        return hr;
    }

    AM_MEDIA_TYPE* pmt;
    if (SUCCEEDED((*ppOut)->GetMediaType(&pmt)) && pmt) {
        CMediaType mt = *pmt;
        m_pOutput->SetMediaType(&mt);
        DeleteMediaType(pmt);
    }

    (*ppOut)->SetDiscontinuity(FALSE);
    (*ppOut)->SetSyncPoint(TRUE);

    // FIXME: hell knows why but without this the overlay mixer starts very skippy
    // (don't enable this for other renderers, the old for example will go crazy if you do)
    if (GetCLSID(m_pOutput->GetConnected()) == CLSID_OverlayMixer) {
        (*ppOut)->SetDiscontinuity(TRUE);
    }

    return S_OK;
}
Пример #2
0
HRESULT CLAVAudio::DeliverBitstream(AVCodecID codec, const BYTE *buffer, DWORD dwSize, DWORD dwFrameSize, REFERENCE_TIME rtStartInput, REFERENCE_TIME rtStopInput)
{
  HRESULT hr = S_OK;

  CMediaType mt = CreateBitstreamMediaType(codec, m_bsParser.m_dwSampleRate);
  WAVEFORMATEX* wfe = (WAVEFORMATEX*)mt.Format();

  if(FAILED(hr = ReconnectOutput(dwSize, mt))) {
    return hr;
  }

  IMediaSample *pOut;
  BYTE *pDataOut = NULL;
  if(FAILED(GetDeliveryBuffer(&pOut, &pDataOut))) {
    return E_FAIL;
  }

  REFERENCE_TIME rtStart = m_rtStart, rtStop = AV_NOPTS_VALUE;
  // TrueHD timings
  // Since the SPDIF muxer takes 24 frames and puts them into one IEC61937 frame, we use the cached timestamp from before.
  if (codec == AV_CODEC_ID_TRUEHD) {
    // long-term cache is valid
    if (m_rtBitstreamCache != AV_NOPTS_VALUE)
      rtStart = m_rtBitstreamCache;
    // Duration - stop time of the current frame is valid
    if (rtStopInput != AV_NOPTS_VALUE)
      rtStop = rtStopInput;
    else // no actual time of the current frame, use typical TrueHD frame size, 24 * 0.83333ms
      rtStop = rtStart + (REFERENCE_TIME)(200000 / m_dRate);
    m_rtStart = rtStop;
  } else {
    double dDuration = DBL_SECOND_MULT * (double)m_bsParser.m_dwSamples / m_bsParser.m_dwSampleRate / m_dRate;
    m_dStartOffset += fmod(dDuration, 1.0);

    // Add rounded duration to rtStop
    rtStop = rtStart + (REFERENCE_TIME)(dDuration + 0.5);
    // and unrounded to m_rtStart..
    m_rtStart += (REFERENCE_TIME)dDuration;
    // and accumulate error..
    if (m_dStartOffset > 0.5) {
      m_rtStart++;
      m_dStartOffset -= 1.0;
    }
  }

  REFERENCE_TIME rtJitter = rtStart - m_rtBitstreamCache;
  m_faJitter.Sample(rtJitter);

  REFERENCE_TIME rtJitterMin = m_faJitter.AbsMinimum();
  if (m_settings.AutoAVSync && abs(rtJitterMin) > m_JitterLimit && m_bHasVideo) {
    DbgLog((LOG_TRACE, 10, L"::Deliver(): corrected A/V sync by %I64d", rtJitterMin));
    m_rtStart -= rtJitterMin;
    m_faJitter.OffsetValues(-rtJitterMin);
    m_bDiscontinuity = TRUE;
  }

#ifdef DEBUG
  DbgLog((LOG_CUSTOM5, 20, L"Bitstream Delivery, rtStart(calc): %I64d, rtStart(input): %I64d, duration: %I64d, diff: %I64d", rtStart, m_rtBitstreamCache, rtStop-rtStart, rtJitter));

  if (m_faJitter.CurrentSample() == 0) {
    DbgLog((LOG_TRACE, 20, L"Jitter Stats: min: %I64d - max: %I64d - avg: %I64d", rtJitterMin, m_faJitter.AbsMaximum(), m_faJitter.Average()));
  }
#endif
  m_rtBitstreamCache = AV_NOPTS_VALUE;

  if(m_settings.AudioDelayEnabled) {
    REFERENCE_TIME rtDelay = (REFERENCE_TIME)((m_settings.AudioDelay * 10000i64) / m_dRate);
    rtStart += rtDelay;
    rtStop += rtDelay;
  }

  pOut->SetTime(&rtStart, &rtStop);
  pOut->SetMediaTime(NULL, NULL);

  pOut->SetPreroll(FALSE);
  pOut->SetDiscontinuity(m_bDiscontinuity);
  m_bDiscontinuity = FALSE;
  pOut->SetSyncPoint(TRUE);

  pOut->SetActualDataLength(dwSize);

  memcpy(pDataOut, buffer, dwSize);

  if(hr == S_OK) {
    hr = m_pOutput->GetConnected()->QueryAccept(&mt);
    if (hr == S_FALSE && m_nCodecId == AV_CODEC_ID_DTS && m_bDTSHD) {
      DbgLog((LOG_TRACE, 1, L"DTS-HD Media Type failed with %0#.8x, trying fallback to DTS core", hr));
      m_bForceDTSCore = TRUE;
      UpdateBitstreamContext();
      goto done;
    }
    DbgLog((LOG_TRACE, 1, L"Sending new Media Type (QueryAccept: %0#.8x)", hr));
    m_pOutput->SetMediaType(&mt);
    pOut->SetMediaType(&mt);
  }

  hr = m_pOutput->Deliver(pOut);
  if (FAILED(hr)) {
    DbgLog((LOG_ERROR, 10, L"::DeliverBitstream failed with code: %0#.8x", hr));
  }

done:
  SafeRelease(&pOut);
  return hr;
}
Пример #3
0
HRESULT CAudioDecFilter::Transform(IMediaSample *pIn, IMediaSample *pOut)
{
	// 入力データポインタを取得する
	const DWORD InSize = pIn->GetActualDataLength();
	BYTE *pInData = NULL;
	HRESULT hr = pIn->GetPointer(&pInData);
	if (FAILED(hr))
		return hr;

	{
		CAutoLock Lock(&m_cPropLock);

		/*
			複数の音声フォーマットに対応する場合、この辺りでフォーマットの判定をする
		*/
		if (!m_pDecoder) {
			m_pDecoder = new CAacDecoder();
			m_pDecoder->Open();
		}

		REFERENCE_TIME rtStart, rtEnd;
		hr = pIn->GetTime(&rtStart, &rtEnd);
		if (FAILED(hr))
			rtStart = -1;
		if (pIn->IsDiscontinuity() == S_OK) {
			m_bDiscontinuity = true;
			m_bInputDiscontinuity = true;
		} else if (hr == S_OK || hr == VFW_S_NO_STOP_TIME) {
			if (!m_bJitterCorrection) {
				m_StartTime = rtStart;
			} else if (m_StartTime >= 0
					&& _abs64(rtStart - m_StartTime) > MAX_JITTER) {
				TRACE(TEXT("Resync audio stream time (%lld -> %lld [%f])\n"),
					  m_StartTime, rtStart,
					  (double)(rtStart - m_StartTime) / (double)REFERENCE_TIME_SECOND);
				m_StartTime = rtStart;
			}
		}
		if (m_StartTime < 0 || m_bDiscontinuity) {
			TRACE(TEXT("Initialize audio stream time (%lld)\n"), rtStart);
			m_StartTime = rtStart;
		}

		m_BitRateCalculator.Update(InSize);
	}

	DWORD InDataPos = 0;
	FrameSampleInfo SampleInfo;
	SampleInfo.pData = &m_OutData;

	hr = S_OK;

	while (InDataPos < InSize) {
		{
			CAutoLock Lock(&m_cPropLock);

			CAudioDecoder::DecodeFrameInfo FrameInfo;
			const DWORD DataSize = InSize - InDataPos;
			DWORD DecodeSize = DataSize;
			if (!m_pDecoder->Decode(&pInData[InDataPos], &DecodeSize, &FrameInfo)) {
				if (DecodeSize < DataSize) {
					InDataPos += DecodeSize;
					continue;
				}
				break;
			}
			InDataPos += DecodeSize;

			if (FrameInfo.bDiscontinuity)
				m_bDiscontinuity = true;

			SampleInfo.bMediaTypeChanged = false;

			hr = OnFrame(FrameInfo.pData, FrameInfo.Samples, FrameInfo.Info,
						 &SampleInfo);
		}

		if (SUCCEEDED(hr)) {
			if (SampleInfo.bMediaTypeChanged) {
				hr = ReconnectOutput(SampleInfo.MediaBufferSize, SampleInfo.MediaType);
				if (FAILED(hr))
					break;
				OutputLog(TEXT("出力メディアタイプを更新します。\r\n"));
				hr = m_pOutput->SetMediaType(&SampleInfo.MediaType);
				if (FAILED(hr)) {
					OutputLog(TEXT("出力メディアタイプを設定できません。(%08x)\r\n"), hr);
					break;
				}
				m_MediaType = SampleInfo.MediaType;
				m_bDiscontinuity = true;
				m_bInputDiscontinuity = true;
			}

			IMediaSample *pOutSample = NULL;
			hr = m_pOutput->GetDeliveryBuffer(&pOutSample, NULL, NULL, 0);
			if (FAILED(hr)) {
				OutputLog(TEXT("出力メディアサンプルを取得できません。(%08x)\r\n"), hr);
				break;
			}

			if (SampleInfo.bMediaTypeChanged)
				pOutSample->SetMediaType(&m_MediaType);

			// 出力ポインタ取得
			BYTE *pOutBuff = NULL;
			hr = pOutSample->GetPointer(&pOutBuff);
			if (FAILED(hr)) {
				OutputLog(TEXT("出力サンプルのバッファを取得できません。(%08x)\r\n"), hr);
				pOutSample->Release();
				break;
			}

			::CopyMemory(pOutBuff, m_OutData.GetData(), m_OutData.GetSize());
			pOutSample->SetActualDataLength(m_OutData.GetSize());

			if (m_StartTime >= 0) {
				REFERENCE_TIME rtDuration, rtStart, rtEnd;
				rtDuration = REFERENCE_TIME_SECOND * (LONGLONG)SampleInfo.Samples / FREQUENCY;
				rtStart = m_StartTime;
				m_StartTime += rtDuration;
				// 音ずれ補正用時間シフト
				if (m_DelayAdjustment > 0) {
					// 最大2倍まで時間を遅らせる
					if (rtDuration >= m_DelayAdjustment) {
						rtDuration += m_DelayAdjustment;
						m_DelayAdjustment = 0;
					} else {
						m_DelayAdjustment -= rtDuration;
						rtDuration *= 2;
					}
				} else if (m_DelayAdjustment < 0) {
					// 最短1/2まで時間を早める
					if (rtDuration >= -m_DelayAdjustment * 2) {
						rtDuration += m_DelayAdjustment;
						m_DelayAdjustment = 0;
					} else {
						m_DelayAdjustment += rtDuration;
						rtDuration /= 2;
					}
				} else {
					rtStart += m_Delay;
				}
				rtEnd = rtStart + rtDuration;
				pOutSample->SetTime(&rtStart, &rtEnd);
			}
			pOutSample->SetMediaTime(NULL, NULL);
			pOutSample->SetPreroll(FALSE);
#if 0
			// Discontinuityを設定すると倍速再生がおかしくなる模様
			pOutSample->SetDiscontinuity(m_bDiscontinuity);
#else
			pOutSample->SetDiscontinuity(m_bInputDiscontinuity);
#endif
			m_bDiscontinuity = false;
			m_bInputDiscontinuity = false;
			pOutSample->SetSyncPoint(TRUE);

			hr = m_pOutput->Deliver(pOutSample);
#ifdef _DEBUG
			if (FAILED(hr)) {
				OutputLog(TEXT("サンプルを送信できません。(%08x)\r\n"), hr);
				if (m_bPassthrough && !m_bPassthroughError) {
					m_bPassthroughError = true;
					if (m_pEventHandler)
						m_pEventHandler->OnSpdifPassthroughError(hr);
				}
			}
#endif
			pOutSample->Release();
			if (FAILED(hr))
				break;
		}
	}

	return hr;
}