예제 #1
0
STDMETHODIMP CMpcAudioRenderer::Run(REFERENCE_TIME tStart)
{
    HRESULT		hr;

    if (m_State == State_Running) {
        return NOERROR;
    }

    if (m_useWASAPI) {
        hr=CheckAudioClient(m_pWaveFileFormat);
        if (FAILED(hr)) {
            TRACE(_T("CMpcAudioRenderer::Run Error on check audio client\n"));
            return hr;
        }
        // Rather start the client at the last moment when the buffer is fed
        /*hr = pAudioClient->Start();
        if (FAILED (hr))
        {
        TRACE(_T("CMpcAudioRenderer::Run Start error"));
        return hr;
        }*/
    } else {
        if (m_pDSBuffer &&
                m_pPosition &&
                m_pWaveFileFormat &&
                SUCCEEDED(m_pPosition->GetRate(&m_dRate))) {
            if (m_dRate < 1.0) {
                hr = m_pDSBuffer->SetFrequency ((long)(m_pWaveFileFormat->nSamplesPerSec * m_dRate));
                if (FAILED (hr)) {
                    return hr;
                }
            } else {
                hr = m_pDSBuffer->SetFrequency ((long)m_pWaveFileFormat->nSamplesPerSec);
                m_pSoundTouch->setRateChange((float)(m_dRate-1.0)*100);

                if (m_bMuteFastForward) {
                    if (m_dRate == 1.0) {
                        m_pDSBuffer->SetVolume(m_lVolume);
                    } else {
                        m_pDSBuffer->SetVolume(DSBVOLUME_MIN);
                    }
                }
            }
        }

        ClearBuffer();
    }
    hr = CBaseRenderer::Run(tStart);

    return hr;
}
예제 #2
0
HRESULT CMpcAudioRenderer::SetMediaType(const CMediaType *pmt)
{
    if (! pmt) return E_POINTER;
    int				size = 0;
    TRACE(_T("CMpcAudioRenderer::SetMediaType"));

    if (useWASAPI)
    {
        // New media type set but render client already initialized => reset it
        if (pRenderClient!=NULL)
        {
            WAVEFORMATEX	*pNewWf	= (WAVEFORMATEX *) pmt->Format();
            TRACE(_T("CMpcAudioRenderer::SetMediaType Render client already initialized. Reinitialization..."));
            CheckAudioClient(pNewWf);
        }
    }

    if (m_pWaveFileFormat)
    {
        BYTE *p = (BYTE *)m_pWaveFileFormat;
        SAFE_DELETE_ARRAY(p);
    }
    m_pWaveFileFormat=NULL;

    WAVEFORMATEX	*pwf	= (WAVEFORMATEX *) pmt->Format();
    if (pwf!=NULL)
    {
        size	= sizeof(WAVEFORMATEX) + pwf->cbSize;

        m_pWaveFileFormat = (WAVEFORMATEX *)new BYTE[size];
        if (! m_pWaveFileFormat)
            return E_OUTOFMEMORY;

        memcpy(m_pWaveFileFormat, pwf, size);


        if (!useWASAPI && m_pSoundTouch && (pwf->nChannels <= 2))
        {
            m_pSoundTouch->setSampleRate (pwf->nSamplesPerSec);
            m_pSoundTouch->setChannels (pwf->nChannels);
            m_pSoundTouch->setTempoChange (0);
            m_pSoundTouch->setPitchSemiTones(0);
        }
    }

    return CBaseRenderer::SetMediaType (pmt);
}
예제 #3
0
HRESULT	CMpcAudioRenderer::CheckMediaType(const CMediaType *pmt)
{
    HRESULT hr = S_OK;
    if (pmt == NULL) return E_INVALIDARG;
    TRACE(_T("CMpcAudioRenderer::CheckMediaType"));
    WAVEFORMATEX *pwfx = (WAVEFORMATEX *) pmt->Format();

    if (pwfx == NULL) return VFW_E_TYPE_NOT_ACCEPTED;

    if ((pmt->majortype		!= MEDIATYPE_Audio		) ||
            (pmt->formattype	!= FORMAT_WaveFormatEx	))
    {
        TRACE(_T("CMpcAudioRenderer::CheckMediaType Not supported"));
        return VFW_E_TYPE_NOT_ACCEPTED;
    }

    if(useWASAPI)
    {
        hr=CheckAudioClient((WAVEFORMATEX *)NULL);
        if (FAILED(hr))
        {
            TRACE(_T("CMpcAudioRenderer::CheckMediaType Error on check audio client"));
            return hr;
        }
        if (!pAudioClient)
        {
            TRACE(_T("CMpcAudioRenderer::CheckMediaType Error, audio client not loaded"));
            return VFW_E_CANNOT_CONNECT;
        }

        if (pAudioClient->IsFormatSupported(AUDCLNT_SHAREMODE_EXCLUSIVE, pwfx, NULL) != S_OK)
        {
            TRACE(_T("CMpcAudioRenderer::CheckMediaType WASAPI client refused the format"));
            return VFW_E_TYPE_NOT_ACCEPTED;
        }
        TRACE(_T("CMpcAudioRenderer::CheckMediaType WASAPI client accepted the format"));
    }
    else if	(pwfx->wFormatTag	!= WAVE_FORMAT_PCM)
    {
        return VFW_E_TYPE_NOT_ACCEPTED;
    }
    return S_OK;
}
예제 #4
0
HRESULT	CMpcAudioRenderer::DoRenderSampleWasapi(IMediaSample *pMediaSample)
{
    HRESULT	hr	= S_OK;
    REFERENCE_TIME	rtStart			= 0;
    REFERENCE_TIME	rtStop			= 0;
    BYTE *pMediaBuffer		= NULL;
    BYTE *pInputBufferPointer = NULL;
    BYTE *pInputBufferEnd = NULL;
    BYTE *pData;
    bufferSize = pMediaSample->GetActualDataLength();
    const long	lSize = bufferSize;
    pMediaSample->GetTime (&rtStart, &rtStop);

    AM_MEDIA_TYPE *pmt;
    if (SUCCEEDED(pMediaSample->GetMediaType(&pmt)) && pmt!=NULL) {
        CMediaType mt(*pmt);
        if ((WAVEFORMATEXTENSIBLE*)mt.Format() != NULL) {
            hr=CheckAudioClient(&(((WAVEFORMATEXTENSIBLE*)mt.Format())->Format));
        } else {
            hr=CheckAudioClient((WAVEFORMATEX*)mt.Format());
        }
        if (FAILED(hr)) {
            TRACE(_T("CMpcAudioRenderer::DoRenderSampleWasapi Error while checking audio client with input media type\n"));
            return hr;
        }
        DeleteMediaType(pmt);
        pmt=NULL;
    }

    // Initialization
    hr = pMediaSample->GetPointer(&pMediaBuffer);
    if (FAILED (hr)) {
        return hr;
    }

    pInputBufferPointer=&pMediaBuffer[0];
    pInputBufferEnd=&pMediaBuffer[0]+lSize;

    WORD frameSize = m_pWaveFileFormat->nBlockAlign;


    // Sleep for half the buffer duration since last buffer feed
    DWORD currentTime=GetTickCount();
    if (lastBufferTime!=0 && hnsActualDuration!= 0 && lastBufferTime<currentTime && (currentTime-lastBufferTime)<hnsActualDuration) {
        hnsActualDuration=hnsActualDuration-(currentTime-lastBufferTime);
        Sleep(hnsActualDuration);
    }

    // Each loop fills one of the two buffers.
    while (pInputBufferPointer < pInputBufferEnd) {
        UINT32 numFramesPadding=0;
        pAudioClient->GetCurrentPadding(&numFramesPadding);
        UINT32 numFramesAvailable = nFramesInBuffer - numFramesPadding;

        UINT32 nAvailableBytes=numFramesAvailable*frameSize;
        UINT32 nBytesToWrite=nAvailableBytes;
        // More room than enough in the output buffer
        if (nAvailableBytes > (size_t)(pInputBufferEnd - pInputBufferPointer)) {
            nBytesToWrite=pInputBufferEnd - pInputBufferPointer;
            numFramesAvailable=(UINT32)((float)nBytesToWrite/frameSize);
        }

        // Grab the next empty buffer from the audio device.
        hr = pRenderClient->GetBuffer(numFramesAvailable, &pData);
        if (FAILED (hr)) {
            TRACE(_T("CMpcAudioRenderer::DoRenderSampleWasapi GetBuffer failed with size %ld : (error %lx)\n"),nFramesInBuffer,hr);
            return hr;
        }

        // Load the buffer with data from the audio source.
        if (pData != NULL) {

            memcpy(&pData[0], pInputBufferPointer, nBytesToWrite);
            pInputBufferPointer += nBytesToWrite;
        } else {
            TRACE(_T("CMpcAudioRenderer::DoRenderSampleWasapi Output buffer is NULL\n"));
        }

        hr = pRenderClient->ReleaseBuffer(numFramesAvailable, 0); // no flags
        if (FAILED (hr)) {
            TRACE(_T("CMpcAudioRenderer::DoRenderSampleWasapi ReleaseBuffer failed with size %ld (error %lx)\n"),nFramesInBuffer,hr);
            return hr;
        }

        if (!isAudioClientStarted) {
            TRACE(_T("CMpcAudioRenderer::DoRenderSampleWasapi Starting audio client\n"));
            pAudioClient->Start();
            isAudioClientStarted=true;
        }

        if (pInputBufferPointer >= pInputBufferEnd) {
            lastBufferTime=GetTickCount();
            // This is the duration of the filled buffer
            hnsActualDuration=(double)REFTIMES_PER_SEC * numFramesAvailable / m_pWaveFileFormat->nSamplesPerSec;
            // Sleep time is half this duration
            hnsActualDuration=(DWORD)(hnsActualDuration/REFTIMES_PER_MILLISEC/2);
            break;
        }

        // Buffer not completely filled, sleep for half buffer capacity duration
        hnsActualDuration=(double)REFTIMES_PER_SEC * nFramesInBuffer / m_pWaveFileFormat->nSamplesPerSec;
        // Sleep time is half this duration
        hnsActualDuration=(DWORD)(hnsActualDuration/REFTIMES_PER_MILLISEC/2);
        Sleep(hnsActualDuration);
    }
    return hr;
}