Beispiel #1
0
HRESULT
WWMFResampler::Resample(const BYTE *buff, DWORD bytes, WWMFSampleData *sampleData_return)
{
    HRESULT hr = E_FAIL;
    IMFSample *pSample = NULL;
    WWMFSampleData tmpData;
    WWMFSampleData inputData((BYTE*)buff, bytes);
    DWORD dwStatus;
    DWORD cbOutputBytes = (DWORD)((int64_t)bytes * m_outputFormat.BytesPerSec() / m_inputFormat.BytesPerSec());
    // cbOutputBytes must be product of frambytes
    cbOutputBytes = (cbOutputBytes + (m_outputFormat.FrameBytes()-1)) / m_outputFormat.FrameBytes() * m_outputFormat.FrameBytes();
    // add extra receive size
    cbOutputBytes += 16 * m_outputFormat.FrameBytes();

    assert(sampleData_return);
    assert(NULL == sampleData_return->data);

    HRG(ConvertWWSampleDataToMFSample(inputData, &pSample));

    HRG(m_pTransform->GetInputStatus(0, &dwStatus));
    if ( MFT_INPUT_STATUS_ACCEPT_DATA != dwStatus) {
        dprintf("E: ApplyTransform() pTransform->GetInputStatus() not accept data.\n");
        hr = E_FAIL;
        goto end;
    }

    HRG(m_pTransform->ProcessInput(0, pSample, 0));

    // set sampleData_return->bytes = 0
    sampleData_return->Forget();
    for (;;) {
        tmpData.bytes = cbOutputBytes;
        hr = GetSampleDataFromMFTransform(&tmpData);
        if (MF_E_TRANSFORM_NEED_MORE_INPUT == hr) {
            hr = S_OK;
            goto end;
        }
        if (FAILED(hr)) {
            goto end;
        }
        sampleData_return->MoveAdd(tmpData);
        tmpData.Release();
    }

end:
    tmpData.Release();
    inputData.Forget();
    SafeRelease(&pSample);
    return hr;
}
Beispiel #2
0
    /**
     * If this instance is not empty, rhs content is concatenated to this instance. rhs remains untouched.
     * If this instance is empty, rhs content moves to this instance. rhs becomes empty.
     * rhs.Release() must be called to release memory either way!
     */
    HRESULT MoveAdd(WWMFSampleData &rhs) {
        if (bytes != 0) {
            return Add(rhs);
        }

        assert(NULL == data);
        *this = rhs; //< Just copy 8 bytes. It's way faster than Add()
        rhs.Forget();

        return S_OK;
    }
Beispiel #3
0
HRESULT
WWMFResampler::Drain(DWORD resampleInputBytes, WWMFSampleData *sampleData_return)
{
    HRESULT hr = S_OK;
    WWMFSampleData tmpData;
    DWORD cbOutputBytes = (DWORD)((int64_t)resampleInputBytes * m_outputFormat.BytesPerSec() / m_inputFormat.BytesPerSec());
    // cbOutputBytes must be product of frambytes
    cbOutputBytes = (cbOutputBytes + (m_outputFormat.FrameBytes()-1)) / m_outputFormat.FrameBytes() * m_outputFormat.FrameBytes();

    assert(sampleData_return);
    assert(NULL == sampleData_return->data);

    HRG(m_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, NULL));
    HRG(m_pTransform->ProcessMessage(MFT_MESSAGE_COMMAND_DRAIN, NULL));

    // set sampleData_return->bytes = 0
    sampleData_return->Forget();
    for (;;) {
        tmpData.bytes = cbOutputBytes;
        hr = GetSampleDataFromMFTransform(&tmpData);
        if (MF_E_TRANSFORM_NEED_MORE_INPUT == hr) {
            // end
            HRG(m_pTransform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_STREAMING, NULL));
            goto end;
        }
        if (FAILED(hr)) {
            goto end;
        }
        sampleData_return->MoveAdd(tmpData);
        tmpData.Release();
    }

end:
    tmpData.Release();
    return hr;
}
int wmain(int argc, wchar_t *argv[])
{
    // _CrtSetBreakAlloc(35);
    // COM leak cannot be detected by debug heap manager ...
    _CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF);

    HRESULT hr = S_OK;
    bool bCoInitialize = false;
    FILE *fpr = NULL;
    FILE *fpw = NULL;
    errno_t ercd;
    BYTE *buff = NULL;
    DWORD buffBytes = 0;
    DWORD readBytes = 0;
    DWORD remainBytes = 0;
    DWORD expectedOutputDataBytes = 0;
    DWORD result = 0;
    DWORD writeDataTotalBytes = 0;
    int conversionQuality = 60;
    WWMFResampler resampler;
    WWMFPcmFormat inputFormat;
    WWMFPcmFormat outputFormat;
    WWMFSampleData sampleData;

    if (argc != 6) {
        PrintUsage(argv[0]);
        return 1;
    }

    HRG(CoInitializeEx(NULL, COINIT_APARTMENTTHREADED | COINIT_DISABLE_OLE1DDE));
    bCoInitialize = true;

    ercd = _wfopen_s(&fpr, argv[1], L"rb");
    if (0 != ercd) {
        printf("file open error %S\n", argv[1]);
        PrintUsage(argv[0]);
        hr = E_FAIL;
        goto end;
    }

    ercd = _wfopen_s(&fpw, argv[2], L"wb");
    if (0 != ercd) {
        printf("file open error %S\n", argv[2]);
        PrintUsage(argv[0]);
        hr = E_FAIL;
        goto end;
    }

    HRG(ReadWavHeader(fpr, &inputFormat, &remainBytes));

    outputFormat = inputFormat;
    outputFormat.sampleRate = _wtoi(argv[3]);
    outputFormat.bits = (short)_wtoi(argv[4]);

    conversionQuality = _wtoi(argv[5]);

    if (0 == outputFormat.sampleRate ||
        0 == conversionQuality) {
        PrintUsage(argv[0]);
        hr = E_FAIL;
        goto end;
    }

    outputFormat.validBitsPerSample = outputFormat.bits;

    switch (outputFormat.bits) {
    case 16:
    case 24:
        outputFormat.sampleFormat = WWMFBitFormatInt;
        break;
    case 32:
        outputFormat.sampleFormat = WWMFBitFormatFloat;
        break;
    default:
        PrintUsage(argv[0]);
        hr = E_FAIL;
        goto end;
    }

    expectedOutputDataBytes = (int64_t)remainBytes
        * outputFormat.BytesPerSec()
        / inputFormat .BytesPerSec();

    HRG(WriteWavHeader(fpw, outputFormat, expectedOutputDataBytes));

    HRG(resampler.Initialize(inputFormat, outputFormat, conversionQuality));

    buffBytes = 128 * 1024 * inputFormat.FrameBytes();
    buff = new BYTE[buffBytes];

    for (;;) {
        // read PCM data from file
        readBytes = buffBytes;
        if (remainBytes < readBytes) {
            readBytes = remainBytes;
        }
        remainBytes -= readBytes;

        result = fread(buff, 1, readBytes, fpr);
        if (result != readBytes) {
            printf("file read error\n");
            hr = E_FAIL;
            goto end;
        }

        // convert
        HRG(resampler.Resample(buff, readBytes, &sampleData));

        // write to file
        result = fwrite(sampleData.data, 1, sampleData.bytes, fpw);
        if (result != sampleData.bytes) {
            printf("file write error\n");
            hr = E_FAIL;
            goto end;
        }
        writeDataTotalBytes += sampleData.bytes;
        sampleData.Release();

        if (remainBytes == 0) {
            // end
            HRG(resampler.Drain(buffBytes, &sampleData));

            // write remaining PCM data to file
            result = fwrite(sampleData.data, 1, sampleData.bytes, fpw);
            if (result != sampleData.bytes) {
                printf("file write error\n");
                hr = E_FAIL;
                goto end;
            }
            writeDataTotalBytes += sampleData.bytes;
            sampleData.Release();
            break;
        }
    }

    // data chunk align is 2 bytes
    if (writeDataTotalBytes & 1) {
        if (0 != fputc(0, fpw)) {
            printf("file write error\n");
            hr = E_FAIL;
            goto end;
        }
        ++writeDataTotalBytes;
    }
    HRG(FixWavHeader(fpw, writeDataTotalBytes));

    hr = S_OK;

end:
    resampler.Finalize();

    if (bCoInitialize) {
        CoUninitialize();
        bCoInitialize = false;
    }

    delete[] buff;
    buff = NULL;

    if (fpw != NULL) {
        fclose(fpw);
        fpw = NULL;
    }
    if (fpr != NULL) {
        fclose(fpr);
        fpr = NULL;
    }

    return SUCCEEDED(hr) ? 0 : 1;
}
Beispiel #5
0
void AudioDriver::AudioThread()
{
    HANDLE mmcssHandle    = NULL;
    DWORD  mmcssTaskIndex = 0;

    HRESULT hr = CoInitializeEx(NULL, COINIT_MULTITHREADED);
    if (hr != S_OK)
    {
        Log::Print("Unable to initialize COM in render thread: %x\n", hr);
        return;
    }

    // Gain access to the system multimedia audio endpoint and associate an
    // audio client object with it.
    if (InitializeAudioClient() == false)
    {
        goto Exit;
    }

    // Hook up to the Multimedia Class Scheduler Service to prioritise
    // our render activities.
    mmcssHandle = AvSetMmThreadCharacteristics(L"Audio", &mmcssTaskIndex);
    if (mmcssHandle == NULL)
    {
        Log::Print("Unable to enable MMCSS on render thread: %d\n",
                   GetLastError());

        goto Exit;
    }

    // Native events waited on in this thread.
    HANDLE waitArray[2] = {iAudioSessionDisconnectedEvent,
                           iAudioSamplesReadyEvent};

    // Pipeline processing loop.
    try {
        for (;;) {
#ifdef _TIMINGS_DEBUG
            LARGE_INTEGER StartingTime, EndingTime, ElapsedMicroseconds;
            LARGE_INTEGER Frequency;

            QueryPerformanceFrequency(&Frequency);
            QueryPerformanceCounter(&StartingTime);
#endif /* _TIMINGS_DEBUG */

            TUint32 padding                  = 0;

            //
            //  Calculate the number of bytes in the render buffer
            //  for this period.
            //
            //  This is the maximum we will pull from the pipeline.
            //
            //  If the Audio Engine has not been initialized yet stick with
            //  the default value.
            //
            if (iAudioEngineInitialised && ! iAudioSessionDisconnected)
            {
                hr = iAudioClient->GetCurrentPadding(&padding);
                if (hr == S_OK)
                {
                    iRenderBytesThisPeriod = (iBufferSize - padding) *
                                              iFrameSize;
                }
                else
                {
                    Log::Print("ERROR: Couldn't read render buffer padding\n");
                    iRenderBytesThisPeriod = 0;
                }

                iRenderBytesRemaining = iRenderBytesThisPeriod;
            }

            //
            // Process pipeline messages until we've reached the maximum for
            // this period.
            //
            // The pull will block if there are no messages.
            //
            for(;;)
            {
                if (iPlayable != NULL) {
                    ProcessAudio(iPlayable);
                }
                else {
                    Msg* msg = iPipeline.Pull();
                    ASSERT(msg != NULL);
                    msg = msg->Process(*this);
                    ASSERT(msg == NULL);
                }

                //
                // Have we reached the data limit for this period or been told
                // to exit ?
                //
                if (iPlayable != NULL || iQuit)
                {
                    break;
                }
            }

            if (iQuit)
            {
                break;
            }

            // Log some interesting data if we can't fill at least half
            // of the available space in the render buffer.
            if (iRenderBytesThisPeriod * 0.5 < iRenderBytesRemaining)
            {
                Log::Print("Audio period: Requested Bytes [%u] : Returned Bytes"
                           " [%u]\n",
                           iRenderBytesThisPeriod,
                           iRenderBytesThisPeriod - iRenderBytesRemaining);

                if (iPlayable)
                {
                    TUint bytes = iPlayable->Bytes();

                    if (iResamplingInput)
                    {
                        // Calculate the bytes that will be generated by the
                        // translation.
                        long long tmp = (long long)bytes *
                                        (long long)iResampleOutputBps /
                                        (long long)iResampleInputBps;

                        bytes = TUint(tmp);

                        // Round up to the nearest frame.
                        bytes += iMixFormat->nBlockAlign;
                        bytes -= bytes % iMixFormat->nBlockAlign;
                    }

                    Log::Print("  Available Bytes [%u]\n", bytes);
                }
                else
                {
                    Log::Print("  Available Bytes [0]\n");
                }

                if (iAudioEngineInitialised)
                {
                    Log::Print(" Period Start Frames In Buffer [%u]\n",
                               padding);

                    hr = iAudioClient->GetCurrentPadding(&padding);
                    if (hr == S_OK)
                    {
                        Log::Print(" Current Frames In Buffer [%u]\n",
                                   padding);
                    }
                }
            }

#ifdef _TIMINGS_DEBUG
            QueryPerformanceCounter(&EndingTime);
            ElapsedMicroseconds.QuadPart = EndingTime.QuadPart -
                                           StartingTime.QuadPart;

            //
            // We now have the elapsed number of ticks, along with the
            // number of ticks-per-second. We use these values
            // to convert to the number of elapsed microseconds.
            // To guard against loss-of-precision, we convert
            // to microseconds *before* dividing by ticks-per-second.
            //
            ElapsedMicroseconds.QuadPart *= 1000000;
            ElapsedMicroseconds.QuadPart /= Frequency.QuadPart;

            Log::Print("Time To Process Messages This Audio Period [%lld us]\n",
                       ElapsedMicroseconds.QuadPart);
#endif /* _TIMINGS_DEBUG */

            // The audio client isn't capable of playing this stream.
            // Continue to pull from pipeline until the next playable
            // stream is available.
            if (! iStreamFormatSupported)
            {
                continue;
            }

            // The audio session has been disconnected.
            // Continue to pull from pipeline until we are instructed to quit.
            if (iAudioSessionDisconnected)
            {
                continue;
            }

            //
            // Start the Audio client once we have pre-loaded some
            // data to the render buffer.
            //
            // This will prevent any initial audio glitches..
            //
            if (! iAudioClientStarted)
            {
                // There was no data read this period so try again next period.
                if (iRenderBytesThisPeriod == iRenderBytesRemaining)
                {
                    continue;
                }

                hr = iAudioClient->Start();
                if (hr != S_OK)
                {
                    Log::Print("Unable to start render client: %x.\n", hr);
                    break;
                }

                iAudioClientStarted = true;
            }

            // Apply any volume changes
            if (iAudioClientStarted && iVolumeChanged)
            {
                iAudioSessionVolume->SetMasterVolume(iVolumeLevel, NULL);
                iVolumeChanged = false;
            }

            // Wait for a kick from the native audio engine.
            DWORD waitResult =
                WaitForMultipleObjects(2, waitArray, FALSE, INFINITE);

            switch (waitResult) {
                case WAIT_OBJECT_0 + 0:     // iAudioSessionDisconnectedEvent

                    // Stop the audio client
                    iAudioClient->Stop();
                    iAudioClient->Reset();
                    iAudioClientStarted = false;

                    iAudioSessionDisconnected = true;
                    break;
                case WAIT_OBJECT_0 + 1:     // iAudioSamplesReadyEvent
                    break;
                default:
                    Log::Print("ERROR: Unexpected event received  [%d]\n",
                               waitResult);
            }
        }
    }
    catch (ThreadKill&) {}

Exit:
    // Complete any previous resampling session.
    if (iResamplingInput)
    {
        WWMFSampleData sampleData;

        hr = iResampler.Drain((iBufferSize * iFrameSize), &sampleData);

        if (hr == S_OK)
        {
            Log::Print("Resampler drained correctly [%d bytes].\n",
                       sampleData.bytes);

            sampleData.Release();
        }
        else
        {
            Log::Print("Resampler drain failed.\n");
        }
    }

    iResampler.Finalize();

    // Now we've stopped reading the pipeline, stop the native audio.
    StopAudioEngine();

    // Free up native resources.
    ShutdownAudioEngine();

    //  Unhook from MMCSS.
    AvRevertMmThreadCharacteristics(mmcssHandle);

    CoUninitialize();
}
Beispiel #6
0
void AudioDriver::ProcessAudio(MsgPlayable* aMsg)
{
    BYTE    *pData;
    HRESULT  hr;
    TUint    bytes;

    iPlayable = NULL;

    // If the native audio system is not available yet just throw
    // the data away.
    if (! iStreamFormatSupported || iAudioSessionDisconnected)
    {
        aMsg->RemoveRef();
        return;
    }

    bytes = aMsg->Bytes();

    if (iResamplingInput)
    {
        // Calculate the bytes that will be generated by the translation.
        long long tmp = (long long)bytes * (long long)iResampleOutputBps /
                        (long long)iResampleInputBps;

        bytes = TUint(tmp);

        // Round up to the nearest frame.
        bytes += iMixFormat->nBlockAlign;
        bytes -= bytes % iMixFormat->nBlockAlign;
    }

    TUint framesToWrite = bytes / iFrameSize;

    if (bytes > iRenderBytesRemaining)
    {
        // We've passed enough data for this period. Hold on to the data
        // for the next render period.
        iPlayable = aMsg;
        return;
    }

    hr = iRenderClient->GetBuffer(framesToWrite, &pData);
    if (hr != S_OK)
    {
        Log::Print("ERROR: Can't get render buffer");

        switch (hr)
        {
            case AUDCLNT_E_BUFFER_ERROR:
                Log::Print("[AUDCLNT_E_BUFFER_ERROR]\n");
                break;
            case AUDCLNT_E_BUFFER_TOO_LARGE:
                Log::Print("[AUDCLNT_E_BUFFER_TOO_LARGE]: %d\n", framesToWrite);
                break;
            case AUDCLNT_E_BUFFER_SIZE_ERROR:
                Log::Print("[AUDCLNT_E_BUFFER_SIZE_ERROR]\n");
                break;
            case AUDCLNT_E_OUT_OF_ORDER:
                Log::Print("[AUDCLNT_E_OUT_OF_ORDER]\n");
                break;
            case AUDCLNT_E_DEVICE_INVALIDATED:
                Log::Print("[AUDCLNT_E_DEVICE_INVALIDATED]\n");
                break;
            case AUDCLNT_E_BUFFER_OPERATION_PENDING:
                Log::Print("[AUDCLNT_E_BUFFER_OPERATION_PENDING]\n");
                break;
            case AUDCLNT_E_SERVICE_NOT_RUNNING:
                Log::Print("[AUDCLNT_E_SERVICE_NOT_RUNNING]\n");
                break;
            case E_POINTER:
                Log::Print("[E_POINTER]\n");
                break;
            default:
                Log::Print("[UNKNOWN]\n");
                break;
        }

        // Can't get render buffer. Hold on to the data for the next
        // render period.
        iPlayable = aMsg;
        iRenderClient->ReleaseBuffer(0, 0);
        return;
    }

    // Get the message data. This converts the pipeline data into a format
    // suitable for the native audio system.
    ProcessorPcmBufWASAPI pcmProcessor;

    aMsg->Read(pcmProcessor);

    // Modify sample rate/bit to match system mix format, if required.
    if (iResamplingInput)
    {
        WWMFSampleData sampleData;

        hr = iResampler.Resample(pcmProcessor.Ptr(),
                                 aMsg->Bytes(),
                                 &sampleData);

        if (hr == S_OK)
        {
            // Copy to the render buffer.
            CopyMemory(pData, sampleData.data, sampleData.bytes);

            framesToWrite = sampleData.bytes / iFrameSize;

            // Release the render buffer.
            hr = iRenderClient->ReleaseBuffer(framesToWrite, 0);

            if (hr != S_OK)
            {
                Log::Print("ReleaseBuffer failed Reserved [%d] Written [%d]\n",
                           bytes, sampleData.bytes);
                Log::Print("aMsg [%d] InBps [%d] OutBps [%d]\n",
                            aMsg->Bytes(),
                            iResampleInputBps ,
                            iResampleOutputBps);
            }

            iRenderBytesRemaining -= sampleData.bytes;

            sampleData.Release();
        }
        else
        {
            Log::Print("ERROR: ProcessFragment16: Resample failed.\n");
        }
    }
    else
    {
        Brn buf(pcmProcessor.Buf());

        // Copy to the render buffer.
        CopyMemory(pData, buf.Ptr(), buf.Bytes());

        framesToWrite = buf.Bytes() / iFrameSize;

        // Release the render buffer.
        hr = iRenderClient->ReleaseBuffer(framesToWrite, 0);

        if (hr != S_OK)
        {
            Log::Print("ReleaseBuffer failed Reserverd [%d] Written [%d]\n",
                       bytes, buf.Bytes());
        }

        iRenderBytesRemaining -= buf.Bytes();
    }

    // Release the source buffer.
    aMsg->RemoveRef();
}
Beispiel #7
0
TBool AudioDriver::CheckMixFormat(TUint aSampleRate,
                                  TUint aNumChannels,
                                  TUint aBitDepth)
{
    HRESULT       hr;
    WAVEFORMATEX *closestMix;
    WAVEFORMATEX  savedMixFormat;
    TBool         retVal = false;

    // Complete any previous resampling session.
    if (iResamplingInput)
    {
        WWMFSampleData sampleData;

        hr = iResampler.Drain((iBufferSize * iFrameSize), &sampleData);

        if (hr == S_OK)
        {
            Log::Print("Resampler drained correctly [%d bytes].\n",
                       sampleData.bytes);

            sampleData.Release();
        }
        else
        {
            Log::Print("Resample drain failed.\n");
        }

        iResampler.Finalize();
    }

    iResamplingInput  = false;

    // Verify the Audio Engine supports the stream format.
    if (iMixFormat == NULL)
    {
        hr = iAudioClient->GetMixFormat(&iMixFormat);
        if (hr != S_OK)
        {
            Log::Print("ERROR: Could not obtain mix system format.\n");
            return false;
        }
    }

    savedMixFormat = *iMixFormat;

    iMixFormat->wFormatTag      = WAVE_FORMAT_PCM;
    iMixFormat->nChannels       = (WORD)aNumChannels;
    iMixFormat->nSamplesPerSec  = aSampleRate;
    iMixFormat->nBlockAlign     = WORD((aNumChannels * aBitDepth)/8);
    iMixFormat->nAvgBytesPerSec = DWORD(aSampleRate * iMixFormat->nBlockAlign);
    iMixFormat->wBitsPerSample  = (WORD)aBitDepth;
    iMixFormat->cbSize          = 0;

    hr = iAudioClient->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED,
                                         iMixFormat,
                                        &closestMix);

    if (hr != S_OK)
    {
        // The stream format isn't suitable as it stands.
        //
        // Use a media foundation translation to convert to the current
        // mix format.

        //
        // Load the active mix format.
        //
        CoTaskMemFree(iMixFormat);

        hr = iAudioClient->GetMixFormat(&iMixFormat);

        if (hr == S_OK)
        {
            iMixFormat->wFormatTag = WAVE_FORMAT_PCM;
            iMixFormat->cbSize     = 0;

            // Confirm the mix format s valid.
            CoTaskMemFree(closestMix);

            hr = iAudioClient->IsFormatSupported(AUDCLNT_SHAREMODE_SHARED,
                                                 iMixFormat,
                                                &closestMix);

            if (hr != S_OK)
            {
                Log::Print("ERROR: Cannot obtain valid mix format for stream "
                           "translation\n");

                retVal = false;

                goto end;
            }

            // Setup the translation.

            // Input stream format.
            WWMFPcmFormat inputFormat;

            inputFormat.sampleFormat       = WWMFBitFormatInt;
            inputFormat.nChannels          = (WORD)aNumChannels;
            inputFormat.sampleRate         = aSampleRate;
            inputFormat.bits               = (WORD)aBitDepth;
            inputFormat.validBitsPerSample = (WORD)aBitDepth;

            // System mix format.
            WWMFPcmFormat outputFormat;

            outputFormat.sampleFormat       = WWMFBitFormatInt;
            outputFormat.nChannels          = iMixFormat->nChannels;
            outputFormat.sampleRate         = iMixFormat->nSamplesPerSec;
            outputFormat.bits               = iMixFormat->wBitsPerSample;
            outputFormat.validBitsPerSample = iMixFormat->wBitsPerSample;

            // Store bytes per second values for later calculations around
            // the amount of data generated by the translation.
            iResampleInputBps  = inputFormat.BytesPerSec();
            iResampleOutputBps = outputFormat.BytesPerSec();

            if (iResampler.Initialize(inputFormat,
                                      outputFormat, 60) == S_OK)
            {
                iResamplingInput  = true;
                retVal            = true;
            }
            else
            {
                Log::Print("ERROR: Stream Transaltion Failed.\n");

                Log::Print("Transalte From:\n\n");

                Log::Print("\tSample Rate:        %6u\n", aSampleRate);
                Log::Print("\tNumber Of Channels: %6u\n", aNumChannels);
                Log::Print("\tBit Depth:          %6u\n", aBitDepth);

                Log::Print("Translate To:\n\n");

                Log::Print("\tSample Rate:        %6u\n",
                           iMixFormat->nSamplesPerSec);
                Log::Print("\tNumber Of Channels: %6u\n",
                           iMixFormat->nChannels);
                Log::Print("\tBit Depth:          %6u\n",
                           iMixFormat->wBitsPerSample);
            }
        }
    }
    else
    {
        retVal = true;
    }

end:
    CoTaskMemFree(closestMix);

    return retVal;
}
Beispiel #8
0
HRESULT
WWPlayPcmGroup::DoResample(WWPcmFormat &targetFmt, int conversionQuality)
{
    HRESULT hr = S_OK;
    WWMFResampler resampler;
    size_t n = m_playPcmDataList.size();
    const int PROCESS_FRAMES = 128 * 1024;
    BYTE *buff = new BYTE[PROCESS_FRAMES * m_pcmFormat.BytesPerFrame()];
    std::list<size_t> toPcmDataIdxList;
    size_t numConvertedPcmData = 0;
    assert(1 <= conversionQuality && conversionQuality <= 60);

    if (nullptr == buff) {
        hr = E_OUTOFMEMORY;
        goto end;
    }

    // 共有モードのサンプルレート変更。
    HRG(resampler.Initialize(
        WWMFPcmFormat(
            (WWMFBitFormatType)WWPcmDataSampleFormatTypeIsFloat(m_pcmFormat.sampleFormat),
            (WORD)m_pcmFormat.numChannels,
            (WORD)WWPcmDataSampleFormatTypeToBitsPerSample(m_pcmFormat.sampleFormat),
            m_pcmFormat.sampleRate,
            0, //< TODO: target dwChannelMask
            (WORD)WWPcmDataSampleFormatTypeToValidBitsPerSample(m_pcmFormat.sampleFormat)),
        WWMFPcmFormat(
            WWMFBitFormatFloat,
            (WORD)targetFmt.numChannels,
            32,
            targetFmt.sampleRate,
            0, //< TODO: target dwChannelMask
            32),
        conversionQuality));

    for (size_t i=0; i<n; ++i) {
        WWPcmData *pFrom = &m_playPcmDataList[i];
        WWPcmData pcmDataTo;

        if (!pcmDataTo.Init(pFrom->id, targetFmt.sampleFormat, targetFmt.numChannels,
                (int64_t)(((double)targetFmt.sampleRate / m_pcmFormat.sampleRate) * pFrom->nFrames),
                targetFmt.numChannels * WWPcmDataSampleFormatTypeToBitsPerSample(targetFmt.sampleFormat)/8, WWPcmDataContentMusicData, m_pcmFormat.streamType)) {
            dprintf("E: %s malloc failed. pcm id=%d\n", __FUNCTION__, pFrom->id);
            hr = E_OUTOFMEMORY;
            goto end;
        }
        m_playPcmDataList.push_back(pcmDataTo);
        pFrom = &m_playPcmDataList[i];

        toPcmDataIdxList.push_back(n+i);

        dprintf("D: pFrom stream=%p nFrames=%lld\n", pFrom->stream, pFrom->nFrames);

        for (size_t posFrames=0; ; posFrames += PROCESS_FRAMES) {
            WWMFSampleData mfSampleData;
            DWORD consumedBytes = 0;

            int buffBytes = pFrom->GetBufferData(posFrames * m_pcmFormat.BytesPerFrame(), PROCESS_FRAMES * m_pcmFormat.BytesPerFrame(), buff);
            dprintf("D: pFrom->GetBufferData posBytes=%Iu bytes=%d rv=%d\n",
                    posFrames * m_pcmFormat.BytesPerFrame(), PROCESS_FRAMES * m_pcmFormat.BytesPerFrame(), buffBytes);
            if (0 == buffBytes) {
                break;
            }

            HRG(resampler.Resample(buff, buffBytes, &mfSampleData));
            dprintf("D: resampler.Resample mfSampleData.bytes=%u\n",
                    mfSampleData.bytes);
            consumedBytes = 0;
            while (0 < toPcmDataIdxList.size() && consumedBytes < mfSampleData.bytes) {
                size_t toIdx = toPcmDataIdxList.front();
                WWPcmData *pTo = &m_playPcmDataList[toIdx];
                assert(pTo);
                int rv = pTo->FillBufferAddData(&mfSampleData.data[consumedBytes], mfSampleData.bytes - consumedBytes);
                dprintf("D: consumedBytes=%d/%d FillBufferAddData() pTo->stream=%p pTo->nFrames=%lld rv=%d\n",
                        consumedBytes, mfSampleData.bytes, pTo->stream, pTo->nFrames, rv);
                consumedBytes += rv;
                if (0 == rv) {
                    pTo->FillBufferEnd();
                    ++numConvertedPcmData;
                    toPcmDataIdxList.pop_front();
                }
            }
            mfSampleData.Release();
        }
        pFrom->Term();
    }

    {
        WWMFSampleData mfSampleData;
        DWORD consumedBytes = 0;

        HRG(resampler.Drain(PROCESS_FRAMES * m_pcmFormat.BytesPerFrame(), &mfSampleData));
        consumedBytes = 0;
        while (0 < toPcmDataIdxList.size() && consumedBytes < mfSampleData.bytes) {
            size_t toIdx = toPcmDataIdxList.front();
            WWPcmData *pTo = &m_playPcmDataList[toIdx];
            assert(pTo);
            int rv = pTo->FillBufferAddData(&mfSampleData.data[consumedBytes], mfSampleData.bytes - consumedBytes);
            consumedBytes += rv;
            if (0 == rv) {
                pTo->FillBufferEnd();
                ++numConvertedPcmData;
                toPcmDataIdxList.pop_front();
            }
        }
        mfSampleData.Release();
    }

    while (0 < toPcmDataIdxList.size()) {
        size_t toIdx = toPcmDataIdxList.front();
        WWPcmData *pTo = &m_playPcmDataList[toIdx];
        assert(pTo);

        pTo->FillBufferEnd();
        if (0 == pTo->nFrames) {
            hr = E_FAIL;
            goto end;
        }
        ++numConvertedPcmData;
        toPcmDataIdxList.pop_front();
    }

    assert(n == numConvertedPcmData);

    for (size_t i=0; i<n; ++i) {
        m_playPcmDataList[i] = m_playPcmDataList[n+i];
        m_playPcmDataList[n+i].Forget();
    }

    m_playPcmDataList.resize(numConvertedPcmData);

    // update pcm format info
    m_pcmFormat.sampleFormat  = targetFmt.sampleFormat;
    m_pcmFormat.sampleRate    = targetFmt.sampleRate;
    m_pcmFormat.numChannels   = targetFmt.numChannels;
    m_pcmFormat.dwChannelMask = targetFmt.dwChannelMask;

    // reduce volume level when out of range sample value is found
    {
        float maxV = 0.0f;
        float minV = 0.0f;
        const float  SAMPLE_VALUE_MAX_FLOAT  =  1.0f;
        const float  SAMPLE_VALUE_MIN_FLOAT  = -1.0f;

        for (size_t i=0; i<n; ++i) {
            float currentMax = 0.0f;
            float currentMin = 0.0f;
            m_playPcmDataList[i].FindSampleValueMinMax(&currentMin, &currentMax);
            if (currentMin < minV) {
                minV = currentMin;
            }
            if (maxV < currentMax) {
                maxV = currentMax;
            }
        }

        float scale = 1.0f;
        if (SAMPLE_VALUE_MAX_FLOAT < maxV) {
            scale = SAMPLE_VALUE_MAX_FLOAT / maxV;
        }
        if (minV < SAMPLE_VALUE_MIN_FLOAT && SAMPLE_VALUE_MIN_FLOAT / minV < scale) {
            scale = SAMPLE_VALUE_MIN_FLOAT / minV;
        }
        if (scale < 1.0f) {
            for (size_t i=0; i<n; ++i) {
                m_playPcmDataList[i].ScaleSampleValue(scale);
            }
        }
    }

end:
    resampler.Finalize();
    delete [] buff;
    buff = nullptr;
    return hr;
}