void CTimeStretchFilter::CreateOutput(UINT32 nInFrames, UINT32 nOutFrames, double dBias, double dAdjustment, double dAVMult, bool bFlushPartialSample)
{
  HRESULT hr = S_OK;
  UINT32 maxBufferFrames = DEFAULT_OUT_BUFFER_SIZE / m_pOutputFormat->Format.nBlockAlign;
  UINT32 nOutFramesTotal = 0;

  while (nOutFrames > 0)
  {
    // try to get an output buffer if none available
    if (!m_pNextOutSample && FAILED(hr = RequestNextOutBuffer(m_rtInSampleTime)))
    {
      Log("CTimeStretchFilter::timestretch thread - Failed to get next output sample!");
      break;
    }

    BYTE* pOutData = NULL;
    m_pNextOutSample->GetPointer(&pOutData);
              
    if (pOutData)
    {
      UINT32 nOffset = m_pNextOutSample->GetActualDataLength();
      UINT32 nOffsetInFrames = nOffset / m_pOutputFormat->Format.nBlockAlign;
                
      if (nOutFrames > maxBufferFrames - nOffsetInFrames)
        nOutFrames = maxBufferFrames - nOffsetInFrames;

      m_pNextOutSample->SetActualDataLength(nOffset + nOutFrames * m_pOutputFormat->Format.nBlockAlign);
      pOutData += nOffset;
      receiveSamplesInternal((short*)pOutData, nOutFrames);
      nOutFramesTotal += nOutFrames;

      if (m_pMediaType)
        m_pNextOutSample->SetMediaType(m_pMediaType);

      OutputSample(bFlushPartialSample);
      nOutFrames = numSamples();
    }
  }

  if (nOutFramesTotal > 0)
  {
    double rtSampleDuration = (double)nInFrames * (double)UNITS / (double)m_pOutputFormat->Format.nSamplesPerSec;
    double rtProcessedSampleDuration = (double)(nOutFramesTotal) * (double)UNITS / (double)m_pOutputFormat->Format.nSamplesPerSec;

    m_pClock->AudioResampled(rtProcessedSampleDuration, rtSampleDuration, dBias, dAdjustment, dAVMult);

    //Log(m_pClock->DebugData());
  }
}
void CTimeStretchFilter::CreateOutput(UINT32 nInFrames, UINT32 nOutFrames, double dBias, double dAdjustment, double dAVMult, bool bFlushPartialSample)
{
  HRESULT hr = S_OK;
  UINT32 maxBufferFrames = m_nOutBufferSize / m_pOutputFormat->Format.nBlockAlign;
  UINT32 nOutFramesTotal = 0;

  while (nOutFrames > 0)
  {
    // try to get an output buffer if none available
    if (!m_pNextOutSample && FAILED(hr = RequestNextOutBuffer(m_rtInSampleTime)))
    {
      Log("CTimeStretchFilter::timestretch thread - Failed to get next output sample!");
      break;
    }

    BYTE* pOutData = NULL;
    m_pNextOutSample->GetPointer(&pOutData);
              
    if (pOutData)
    {
      UINT32 nOffset = m_pNextOutSample->GetActualDataLength();
      UINT32 nOffsetInFrames = nOffset / m_pOutputFormat->Format.nBlockAlign;
                
      if (nOutFrames > maxBufferFrames - nOffsetInFrames)
        nOutFrames = maxBufferFrames - nOffsetInFrames;

      m_pNextOutSample->SetActualDataLength(nOffset + nOutFrames * m_pOutputFormat->Format.nBlockAlign);
      pOutData += nOffset;
      receiveSamplesInternal((short*)pOutData, nOutFrames);
      nOutFramesTotal += nOutFrames;

      if (m_pMediaType)
        m_pNextOutSample->SetMediaType(m_pMediaType);

      OutputSample(bFlushPartialSample);
        
      nOutFrames = numSamples();
    }
  }
}
Exemple #3
0
void EffectAvcCompressor::End()
{
	IAVCSAMPLETYPE left;
	IAVCSAMPLETYPE right = 0;

	// We now need to output any samples still waiting because
	while ( mpBufferList != NULL )
	{
		#ifdef IAVC_INLINE
			// use inline GetNextSample()
			#define IAVC_GETNEXTSAMPLE
			#include "../../lib-src/iAVC/iAVC.cpp"
			#undef  IAVC_SETNEXTSAMPLE
		#else
			// call GetNextSample()
			mAutoVolCtrl.GetNextSample(left, right);
		#endif

		OutputSample ( left, right );
	}
	mpBufferPrevious = NULL;

	EffectSimplePairedTwoTrack<IAVCSAMPLETYPE,AVCCOMPSAMPLETYPE>::End();
}
Exemple #4
0
bool EffectAvcCompressor::ProcessSimplePairedTwoTrack(/*IAVCSAMPLETYPE*/ void *bufferLeft,
													  /*IAVCSAMPLETYPE*/ void *bufferRight, // may be 0
													  sampleCount len)
{
	// build new iAVCBufferList node
	iAVCBufferList *  pBufferNode = new iAVCBufferList;
	if ( mpBufferPrevious != NULL )
		mpBufferPrevious->mpNext = pBufferNode;	// link to end of list
	else
		mpBufferList = pBufferNode;				// have first node in list
	mpBufferPrevious = pBufferNode;				// this node now the last added to list
	pBufferNode->mpNext = NULL;
	pBufferNode->mpLeftBuffer = bufferLeft;
	pBufferNode->mpRightBuffer = bufferRight;
	pBufferNode->mnLen = len;
	pBufferNode->mnNext = 0;

	// process samples in these buffer(s)
	IAVCSAMPLETYPE* typedBufferLeft  = (IAVCSAMPLETYPE*)bufferLeft;
	IAVCSAMPLETYPE* typedBufferRight = (IAVCSAMPLETYPE*)bufferRight;
	sampleCount i;
	IAVCSAMPLETYPE left;
	IAVCSAMPLETYPE right = 0;

	for ( i = 0 ; i < len ; ++i ) {
		left = typedBufferLeft[i];
		if ( typedBufferRight )
			right = typedBufferRight[i];
		#ifdef IAVC_INLINE
			// use inline SetNextSample()
			#define	IAVC_SETNEXTSAMPLE
			#include "../../lib-src/iAVC/iAVC.cpp"
			#undef  IAVC_SETNEXTSAMPLE
			// use inline GetNextSample()
			if ( mnDelay <= 0 )
			{	// get a value only if past desired delay
				#define IAVC_GETNEXTSAMPLE
				#include "../../lib-src/iAVC/iAVC.cpp"
				#undef  IAVC_SETNEXTSAMPLE
			}
		#else
			// call SetNextSample() and GetNextSample()
			mAutoVolCtrl.SetNextSample(left, right);
			if ( mnDelay <= 0 )
			{	// get a value only if past desired delay
				mAutoVolCtrl.GetNextSample(left, right);
			}
		#endif
			if ( mnDelay <= 0 )
			{	// get a value only if past desired delay
				OutputSample ( left, right );
				typedBufferLeft[i] = left;
				if ( typedBufferRight )
					typedBufferRight[i] = right;
			}
			else
			{	// count down the delay amount
				--mnDelay;
			}
	}
	return true;
}