Exemplo n.º 1
0
// Stop recording speech
int StopRecord(HWAVEIN* hWaveIn, MMTIME* mmTime)
{
	int res;

	res = waveInGetPosition(*hWaveIn, mmTime, sizeof(MMTIME));
	if(res != MMSYSERR_NOERROR)
	{
		_debug_print("Get Position of wave in FAILED!",1);
		return -1;
	}
	else
	{
		_debug_print("Get Position of wave in SUCCEED!");
	}

	res = waveInStop(*hWaveIn);
	if(res != MMSYSERR_NOERROR)
	{
		_debug_print("Stop recording FAILED!",1);
		return -1;
	}
	else
	{
		_debug_print("Stop recording SUCCEED!");
	}

	res = waveInReset(*hWaveIn);
	if(res != MMSYSERR_NOERROR)
	{
		_debug_print("Reset wave in memory FAILED!",1);
		return -1;
	}
	else
	{
		_debug_print("Reset wave in memory SUCCEED!");
	}

	return 0;
}
Exemplo n.º 2
0
int getwave(void)
{
	int r;
	static int pos=-1024;

	if (ifp == NULL)
	{
		// WaveIn
		do
		{
			mmr = waveInGetPosition(hwi,&mmt, sizeof(mmt));
			now_ptr = mmt.u.cb;
			if (now_ptr == BUFSIZE)
			{
				die("\nWAV Buffer Full!\n");
			}
			
			if (kbhit())
			{
				die("\nAbort...\n");
			}
			Sleep(0);
		} while (now_ptr<(read_ptr+0x1000));

#ifndef USE_RECORD_16BIT
//		8bit
		r = record_buf[read_ptr++];
#else
//		16bit 
		r = (record_buf[read_ptr+1] << 8) | record_buf[read_ptr];
		read_ptr += 2;
		r >>= 8;
#endif
	}
	else
	{
		// from WAV File
		if ( pos+1024 <= ftell(ifp) )
Exemplo n.º 3
0
static void check_position(int device, HWAVEIN win, DWORD bytes,
                           LPWAVEFORMATEX pwfx )
{
    MMTIME mmtime;
    MMRESULT rc;
    DWORD returned;

    mmtime.wType = TIME_BYTES;
    rc=waveInGetPosition(win, &mmtime, sizeof(mmtime));
    ok(rc==MMSYSERR_NOERROR,
       "waveInGetPosition(%s): rc=%s\n",dev_name(device),wave_in_error(rc));
    if (mmtime.wType != TIME_BYTES && winetest_debug > 1)
        trace("waveInGetPosition(%s): TIME_BYTES not supported, returned %s\n",
              dev_name(device),wave_time_format(mmtime.wType));
    returned = time_to_bytes(&mmtime, pwfx);
    ok(returned == bytes, "waveInGetPosition(%s): returned %d bytes, "
       "should be %d\n", dev_name(device), returned, bytes);

    mmtime.wType = TIME_SAMPLES;
    rc=waveInGetPosition(win, &mmtime, sizeof(mmtime));
    ok(rc==MMSYSERR_NOERROR,
       "waveInGetPosition(%s): rc=%s\n",dev_name(device),wave_in_error(rc));
    if (mmtime.wType != TIME_SAMPLES && winetest_debug > 1)
        trace("waveInGetPosition(%s): TIME_SAMPLES not supported, "
              "returned %s\n",dev_name(device),wave_time_format(mmtime.wType));
    returned = time_to_bytes(&mmtime, pwfx);
    ok(returned == bytes, "waveInGetPosition(%s): returned %d samples, "
       "should be %d\n", dev_name(device), bytes_to_samples(returned, pwfx),
       bytes_to_samples(bytes, pwfx));

    mmtime.wType = TIME_MS;
    rc=waveInGetPosition(win, &mmtime, sizeof(mmtime));
    ok(rc==MMSYSERR_NOERROR,
       "waveInGetPosition(%s): rc=%s\n",dev_name(device),wave_in_error(rc));
    if (mmtime.wType != TIME_MS && winetest_debug > 1)
        trace("waveInGetPosition(%s): TIME_MS not supported, returned %s\n",
              dev_name(device), wave_time_format(mmtime.wType));
    returned = time_to_bytes(&mmtime, pwfx);
    ok(returned == bytes, "waveInGetPosition(%s): returned %d ms, "
       "should be %d\n", dev_name(device), bytes_to_ms(returned, pwfx),
       bytes_to_ms(bytes, pwfx));

    mmtime.wType = TIME_SMPTE;
    rc=waveInGetPosition(win, &mmtime, sizeof(mmtime));
    ok(rc==MMSYSERR_NOERROR,
       "waveInGetPosition(%s): rc=%s\n",dev_name(device),wave_in_error(rc));
    if (mmtime.wType != TIME_SMPTE && winetest_debug > 1)
        trace("waveInGetPosition(%s): TIME_SMPTE not supported, returned %s\n",
              dev_name(device),wave_time_format(mmtime.wType));
    returned = time_to_bytes(&mmtime, pwfx);
    ok(returned == bytes, "waveInGetPosition(%s): SMPTE test failed\n",
       dev_name(device));

    mmtime.wType = TIME_MIDI;
    rc=waveInGetPosition(win, &mmtime, sizeof(mmtime));
    ok(rc==MMSYSERR_NOERROR,
       "waveInGetPosition(%s): rc=%s\n",dev_name(device),wave_in_error(rc));
    if (mmtime.wType != TIME_MIDI && winetest_debug > 1)
        trace("waveInGetPosition(%s): TIME_MIDI not supported, returned %s\n",
              dev_name(device),wave_time_format(mmtime.wType));
    returned = time_to_bytes(&mmtime, pwfx);
    ok(returned == bytes, "waveInGetPosition(%s): MIDI test failed\n",
       dev_name(device));

    mmtime.wType = TIME_TICKS;
    rc=waveInGetPosition(win, &mmtime, sizeof(mmtime));
    ok(rc==MMSYSERR_NOERROR,
       "waveInGetPosition(%s): rc=%s\n",dev_name(device),wave_in_error(rc));
    if (mmtime.wType != TIME_TICKS && winetest_debug > 1)
        trace("waveInGetPosition(%s): TIME_TICKS not supported, returned %s\n",
              dev_name(device),wave_time_format(mmtime.wType));
    returned = time_to_bytes(&mmtime, pwfx);
    ok(returned == bytes, "waveInGetPosition(%s): TICKS test failed\n",
       dev_name(device));
}
Exemplo n.º 4
0
static void wave_in_test_deviceIn(int device, LPWAVEFORMATEX pwfx, DWORD format, DWORD flags, LPWAVEINCAPS pcaps)
{
    HWAVEIN win;
    HANDLE hevent;
    WAVEHDR frag;
    MMRESULT rc;
    DWORD res;
    MMTIME mmt;
    WORD nChannels = pwfx->nChannels;
    WORD wBitsPerSample = pwfx->wBitsPerSample;
    DWORD nSamplesPerSec = pwfx->nSamplesPerSec;

    hevent=CreateEvent(NULL,FALSE,FALSE,NULL);
    ok(hevent!=NULL,"CreateEvent(): error=%d\n",GetLastError());
    if (hevent==NULL)
        return;

    win=NULL;
    rc=waveInOpen(&win,device,pwfx,(DWORD_PTR)hevent,0,CALLBACK_EVENT|flags);
    /* Note: Win9x doesn't know WAVE_FORMAT_DIRECT */
    ok(rc==MMSYSERR_NOERROR || rc==MMSYSERR_BADDEVICEID ||
       rc==MMSYSERR_NOTENABLED || rc==MMSYSERR_NODRIVER ||
       rc==MMSYSERR_ALLOCATED ||
       ((rc==WAVERR_BADFORMAT || rc==MMSYSERR_NOTSUPPORTED) &&
       (flags & WAVE_FORMAT_DIRECT) && !(pcaps->dwFormats & format)) ||
       ((rc==WAVERR_BADFORMAT || rc==MMSYSERR_NOTSUPPORTED) &&
       (!(flags & WAVE_FORMAT_DIRECT) || (flags & WAVE_MAPPED)) &&
       !(pcaps->dwFormats & format)) ||
       (rc==MMSYSERR_INVALFLAG && (flags & WAVE_FORMAT_DIRECT)),
       "waveInOpen(%s): format=%dx%2dx%d flags=%x(%s) rc=%s\n",
       dev_name(device),pwfx->nSamplesPerSec,pwfx->wBitsPerSample,
       pwfx->nChannels,CALLBACK_EVENT|flags,
       wave_open_flags(CALLBACK_EVENT|flags),wave_in_error(rc));
    if ((rc==WAVERR_BADFORMAT || rc==MMSYSERR_NOTSUPPORTED) &&
       (flags & WAVE_FORMAT_DIRECT) && (pcaps->dwFormats & format))
        trace(" Reason: The device lists this format as supported in it's "
              "capabilities but opening it failed.\n");
    if ((rc==WAVERR_BADFORMAT || rc==MMSYSERR_NOTSUPPORTED) &&
       !(pcaps->dwFormats & format))
        trace("waveInOpen(%s): format=%dx%2dx%d %s rc=%s failed but format "
              "not supported so OK.\n",dev_name(device),pwfx->nSamplesPerSec,
              pwfx->wBitsPerSample,pwfx->nChannels,
              flags & WAVE_FORMAT_DIRECT ? "flags=WAVE_FORMAT_DIRECT" :
              flags & WAVE_MAPPED ? "flags=WAVE_MAPPED" : "", mmsys_error(rc));
    if (rc!=MMSYSERR_NOERROR) {
        CloseHandle(hevent);
        return;
    }
    res=WaitForSingleObject(hevent,1000);
    ok(res==WAIT_OBJECT_0,"WaitForSingleObject failed for open\n");

    ok(pwfx->nChannels==nChannels &&
       pwfx->wBitsPerSample==wBitsPerSample &&
       pwfx->nSamplesPerSec==nSamplesPerSec,
       "got the wrong format: %dx%2dx%d instead of %dx%2dx%d\n",
       pwfx->nSamplesPerSec, pwfx->wBitsPerSample,
       pwfx->nChannels, nSamplesPerSec, wBitsPerSample, nChannels);

    /* Check that the position is 0 at start */
    check_position(device, win, 0, pwfx);

    frag.lpData=HeapAlloc(GetProcessHeap(), 0, pwfx->nAvgBytesPerSec);
    frag.dwBufferLength=pwfx->nAvgBytesPerSec;
    frag.dwBytesRecorded=0;
    frag.dwUser=0;
    frag.dwFlags=0;
    frag.dwLoops=0;
    frag.lpNext=0;

    rc=waveInPrepareHeader(win, &frag, sizeof(frag));
    ok(rc==MMSYSERR_NOERROR, "waveInPrepareHeader(%s): rc=%s\n",
       dev_name(device),wave_in_error(rc));
    ok(frag.dwFlags&WHDR_PREPARED,"waveInPrepareHeader(%s): prepared flag "
       "not set\n",dev_name(device));

    if (winetest_interactive && rc==MMSYSERR_NOERROR) {
        trace("Recording for 1 second at %5dx%2dx%d %s %s\n",
              pwfx->nSamplesPerSec, pwfx->wBitsPerSample,pwfx->nChannels,
              get_format_str(pwfx->wFormatTag),
              flags & WAVE_FORMAT_DIRECT ? "WAVE_FORMAT_DIRECT" :
              flags & WAVE_MAPPED ? "WAVE_MAPPED" : "");
        rc=waveInAddBuffer(win, &frag, sizeof(frag));
        ok(rc==MMSYSERR_NOERROR,"waveInAddBuffer(%s): rc=%s\n",
           dev_name(device),wave_in_error(rc));

        /* Check that the position is 0 at start */
        check_position(device, win, 0, pwfx);

        rc=waveInStart(win);
        ok(rc==MMSYSERR_NOERROR,"waveInStart(%s): rc=%s\n",
           dev_name(device),wave_in_error(rc));

        res = WaitForSingleObject(hevent,1200);
        ok(res==WAIT_OBJECT_0,"WaitForSingleObject failed for header\n");
        ok(frag.dwFlags&WHDR_DONE,"WHDR_DONE not set in frag.dwFlags\n");
        ok(frag.dwBytesRecorded==pwfx->nAvgBytesPerSec,
           "frag.dwBytesRecorded=%d, should=%d\n",
           frag.dwBytesRecorded,pwfx->nAvgBytesPerSec);

        mmt.wType = TIME_SAMPLES;
        rc=waveInGetPosition(win, &mmt, sizeof(mmt));
        ok(rc==MMSYSERR_NOERROR,"waveInGetPosition(%s): rc=%s\n",
           dev_name(device),wave_in_error(rc));
        ok(mmt.u.cb == frag.dwBytesRecorded, "Got wrong position: %u\n", mmt.u.cb);

        /* stop playing on error */
        if (res!=WAIT_OBJECT_0) {
            rc=waveInStop(win);
            ok(rc==MMSYSERR_NOERROR,
               "waveInStop(%s): rc=%s\n",dev_name(device),wave_in_error(rc));
        }
    }

    rc=waveInUnprepareHeader(win, &frag, sizeof(frag));
    ok(rc==MMSYSERR_NOERROR,"waveInUnprepareHeader(%s): rc=%s\n",
       dev_name(device),wave_in_error(rc));

    rc=waveInClose(win);
    ok(rc==MMSYSERR_NOERROR,
       "waveInClose(%s): rc=%s\n",dev_name(device),wave_in_error(rc));
    res=WaitForSingleObject(hevent,1000);
    ok(res==WAIT_OBJECT_0,"WaitForSingleObject failed for close\n");

    if (winetest_interactive)
    {
        /*
         * Now play back what we recorded
         */
        HWAVEOUT wout;

        trace("Playing back recorded sound\n");
        rc=waveOutOpen(&wout,WAVE_MAPPER,pwfx,(DWORD_PTR)hevent,0,CALLBACK_EVENT);
        ok(rc==MMSYSERR_NOERROR || rc==MMSYSERR_BADDEVICEID ||
           rc==MMSYSERR_NOTENABLED || rc==MMSYSERR_NODRIVER ||
           rc==MMSYSERR_ALLOCATED ||
           ((rc==WAVERR_BADFORMAT || rc==MMSYSERR_NOTSUPPORTED) &&
            !(pcaps->dwFormats & format)),
           "waveOutOpen(%s) format=%dx%2dx%d flags=%x(%s) rc=%s\n",
           dev_name(device),pwfx->nSamplesPerSec,pwfx->wBitsPerSample,
           pwfx->nChannels,CALLBACK_EVENT|flags,
           wave_open_flags(CALLBACK_EVENT),wave_out_error(rc));
        if (rc==MMSYSERR_NOERROR)
        {
            rc=waveOutPrepareHeader(wout, &frag, sizeof(frag));
            ok(rc==MMSYSERR_NOERROR,"waveOutPrepareHeader(%s): rc=%s\n",
               dev_name(device),wave_out_error(rc));

            if (rc==MMSYSERR_NOERROR)
            {
                WaitForSingleObject(hevent,INFINITE);
                rc=waveOutWrite(wout, &frag, sizeof(frag));
                ok(rc==MMSYSERR_NOERROR,"waveOutWrite(%s): rc=%s\n",
                   dev_name(device),wave_out_error(rc));
                WaitForSingleObject(hevent,INFINITE);

                rc=waveOutUnprepareHeader(wout, &frag, sizeof(frag));
                ok(rc==MMSYSERR_NOERROR,"waveOutUnprepareHeader(%s): rc=%s\n",
                   dev_name(device),wave_out_error(rc));
            }
            rc=waveOutClose(wout);
            ok(rc==MMSYSERR_NOERROR,"waveOutClose(%s): rc=%s\n",
               dev_name(device),wave_out_error(rc));
        }
        else
            trace("Unable to play back the recorded sound\n");
    }

    HeapFree(GetProcessHeap(), 0, frag.lpData);
    CloseHandle(hevent);
}
Exemplo n.º 5
0
// this is run by a java thread; the context needs to be the JNI environment
// pointer valid for the thread.
static void PV_AudioWaveInFrameThread(void* threadContext)
{
    WAVEHDR         waveHeader[BAE_WAVEIN_NUM_BUFFERS];
    MMTIME          audioStatus;

    long            count, currentPos, lastPos, framesToRead, bytesToRead, buffersToRead, error;

    long            waveHeaderCount;    // current index in the array of waveheaders
    LPWAVEHDR       pCurrentWaveHdr;

    bytesToRead = g_audioBytesPerBuffer; 
    framesToRead = g_audioFramesPerBuffer;  

    memset(&waveHeader, 0, sizeof(WAVEHDR) * BAE_WAVEIN_NUM_BUFFERS);
    memset(&audioStatus, 0, (long)sizeof(MMTIME));
    audioStatus.wType = TIME_BYTES; // get byte position
    
    error = waveInGetPosition(g_captureSound, &audioStatus, sizeof(MMTIME));
    currentPos = audioStatus.u.cb;
    lastPos = currentPos + g_audioBytesPerBuffer;

    // set up all the capture buffers
    for (count = 0; count < BAE_WAVEIN_NUM_BUFFERS; count++)
    {
        waveHeader[count].lpData = (char *)g_audioBufferBlock[count];
        waveHeader[count].dwBufferLength = g_audioBytesPerBuffer;
        waveHeader[count].dwFlags       = 0;
        waveHeader[count].dwLoops       = 0;
        error = waveInPrepareHeader(g_captureSound, &waveHeader[count], (long)sizeof(WAVEHDR));
    }

    // add all the capture buffers
    for (count = 0; count < BAE_WAVEIN_NUM_BUFFERS; count++)
    {
        error = waveInAddBuffer(g_captureSound, &waveHeader[count], sizeof(WAVEHDR));
    }


    // now run this loop to do the capture.  
    // we wait for enough samples to be captured to fill one capture buffer,
    // callback with the captured data, and put the buffer back in the queue.

    waveHeaderCount = 0; // which buffer we're processing
    while (g_captureShutdown == FALSE)
    {   
        // wait for the device to record enough data to fill our capture buffer

        while (currentPos < lastPos)
        {
            if (g_captureShutdown == TRUE)
            {
                break;
            }

            BAE_SleepFrameThread(threadContext, BAE_WAVEIN_SOUND_PERIOD);       // in ms
                
            error = waveInGetPosition(g_captureSound, &audioStatus, sizeof(MMTIME));
            currentPos = audioStatus.u.cb;
        }

        lastPos += bytesToRead;


        if (g_captureShutdown == FALSE)
        {
            // then process the captured data

            // this is the data buffer for the current capture buffer
            pCurrentWaveHdr = &waveHeader[waveHeaderCount];

            if (pCurrentWaveHdr->dwFlags & WHDR_DONE)
            {
                DWORD dwBytesRecorded = pCurrentWaveHdr->dwBytesRecorded;
                LPSTR lpData = pCurrentWaveHdr->lpData;

                // callback with the captured data
                (*g_captureDoneProc)(threadContext, DATA_READY_CAPTURE, &lpData, (void *)&dwBytesRecorded);

                // add the buffer back into the queue
                error = waveInAddBuffer(g_captureSound, pCurrentWaveHdr, sizeof(WAVEHDR));

                // increment to the next wavehdr
                waveHeaderCount++;
                if (waveHeaderCount == BAE_WAVEIN_NUM_BUFFERS)
                {
                    waveHeaderCount = 0;
                }
            }
        }
    } // while

    waveInReset(g_captureSound);        // stop all audio before unpreparing headers

    // unprepare headers
    for (count = 0; count < BAE_WAVEIN_NUM_BUFFERS; count++)
    {
        error = waveInUnprepareHeader(g_captureSound, &waveHeader[count], (long)sizeof(WAVEHDR));
    }
    // do this here, when we can't call it anymore.
    g_captureDoneProc = NULL;
}
Exemplo n.º 6
0
static	DWORD	widGetPosition(WAVEMAPDATA* wim, LPMMTIME lpTime, DWORD dwParam2)
{
    DWORD       val;
    MMTIME      timepos;
    TRACE("(%p %p %08x)\n", wim, lpTime, dwParam2);

    timepos = *lpTime;

    /* For TIME_MS, we're going to recalculate using TIME_BYTES */
    if (lpTime->wType == TIME_MS)
        timepos.wType = TIME_BYTES;

    /* This can change timepos.wType if the requested type is not supported */
    val = waveInGetPosition(wim->u.in.hInnerWave, &timepos, dwParam2);

    if (timepos.wType == TIME_BYTES)
    {
        DWORD dwInnerSamplesPerOuter = wim->nSamplesPerSecInner / wim->nSamplesPerSecOuter;
        if (dwInnerSamplesPerOuter > 0)
        {
            DWORD dwInnerBytesPerSample = wim->avgSpeedInner / wim->nSamplesPerSecInner;
            DWORD dwInnerBytesPerOuterSample = dwInnerBytesPerSample * dwInnerSamplesPerOuter;
            DWORD remainder = 0;

            /* If we are up sampling (going from lower sample rate to higher),
            **   we need to make a special accommodation for times when we've
            **   written a partial output sample.  This happens frequently
            **   to us because we use msacm to do our up sampling, and it
            **   will up sample on an unaligned basis.
            ** For example, if you convert a 2 byte wide 8,000 'outer'
            **   buffer to a 2 byte wide 48,000 inner device, you would
            **   expect 2 bytes of input to produce 12 bytes of output.
            **   Instead, msacm will produce 8 bytes of output.
            **   But reporting our position as 1 byte of output is
            **   nonsensical; the output buffer position needs to be
            **   aligned on outer sample size, and aggressively rounded up.
            */
            remainder = timepos.u.cb % dwInnerBytesPerOuterSample;
            if (remainder > 0)
            {
                timepos.u.cb -= remainder;
                timepos.u.cb += dwInnerBytesPerOuterSample;
            }
        }

        lpTime->u.cb = MulDiv(timepos.u.cb, wim->avgSpeedOuter, wim->avgSpeedInner);

        /* Once we have the TIME_BYTES right, we can easily convert to TIME_MS */
        if (lpTime->wType == TIME_MS)
            lpTime->u.ms = MulDiv(lpTime->u.cb, 1000, wim->avgSpeedOuter);
        else
            lpTime->wType = TIME_BYTES;
    }
    else if (lpTime->wType == TIME_SAMPLES && timepos.wType == TIME_SAMPLES)
        lpTime->u.sample = MulDiv(timepos.u.sample, wim->nSamplesPerSecOuter, wim->nSamplesPerSecInner);
    else
        /* other time types don't require conversion */
        lpTime->u = timepos.u;

    return val;
}