Exemple #1
0
// stop the capture hardware
// returns 0 for success, -1 for failure
int HAE_StopAudioCapture(void* context) {
    audio_info_t sunAudioHeader;
    INT32 error = -1;

    //fprintf(stderr, ">> HAE_API_SolarisOS_Capture: HAE_StopAudioCapture()\n");
	
    if (g_captureSound) {
	g_captureShutdown = TRUE;

	// stop streaming data
	error = HAE_PauseAudioCapture();

	// wait for thread to complete
	while (g_activeWaveInThread) {
	    HAE_SleepFrameThread(context, 10);
	}
    }

    // $$kk: 04.13.99: should do this regardless of error value??
    if (error == 0) {
	// destroy the audio capture thread
	error = HAE_DestroyFrameThread(NULL);
    }

    //fprintf(stderr, "<< HAE_API_SolarisOS_Capture: HAE_StopAudioCapture() returning %d\n", error);
    return (error == 0) ? 0 : -1;
}
Exemple #2
0
// Release and free audio card.
// return 0 if ok, -1 if failed.
int HAE_ReleaseAudioCard(void *context) {
    int ctr = 50;
    g_shutDownDoubleBuffer = TRUE;	// signal thread to stop
    HAE_DestroyFrameThread(context);

    // $$fb 2002-04-17: wait until PV_AudioWaveOutFrameThread exits
    // fix for 4498848 Sound causes crashes on Linux
    ctr=50;
    while (g_activeDoubleBuffer && --ctr) {
	TRACE1("Waiting %d...\r", ctr);
	// the following call MUST allow the FrameThread to continue 
	// (i.e. to exit the PV_AudioWaveOutFrameThread function)
	HAE_SleepFrameThread(context, HAE_SOLARIS_SOUND_PERIOD);
    }
    if (!ctr) {
	ERROR0("Timed out waiting for frame thread to die!\n");
    }

    if (g_waveDevice) {
	close(g_waveDevice);
	g_waveDevice = 0;
    }

    if (g_audioBufferBlock) {
	HAE_Deallocate(g_audioBufferBlock);
	g_audioBufferBlock = NULL;
    }
#ifdef USE_RAWDATA_CHECK
    HAE_FileClose(debugrawfile); debugrawfile = 0;
#endif
    return 0;
}
Exemple #3
0
// stop the capture hardware
int HAE_StopAudioCapture(void* context) {
    MMRESULT	theErr;
    int i;

    TRACE0("> HAE_StopAudioCapture\n");
    if (g_captureSound) {
	    // tell the thread to die
	    // the thread will also reset the device
	    g_captureShutdown = TRUE;

	    // stop streaming data
	    theErr = HAE_PauseAudioCapture();

	    // destroy the audio capture thread. 
    
	    /* $$fb:
	     * this is a dummy operation! It wouldn't even
	     * distinguish between playback thread and capture thread...
	     */
	    HAE_DestroyFrameThread(NULL);

	    //printf("  waiting for thread to complete\n");
	    // wait for thread to complete
	    while (g_activeWaveInThread) {
		HAE_SleepFrameThread(context, 10);
	    }

	    // deallocate the capture data buffers
	    for (i = 0; i < HAE_WAVEIN_NUM_BUFFERS; i++) {
		HAE_Deallocate(g_audioBufferBlock[i]);
	    } // for
	}
    TRACE0("< HAE_StopAudioCapture\n");
    return 0;
}
Exemple #4
0
// Release and free audio card.
// return 0 if ok, -1 if failed.
int HAE_ReleaseAudioCard(void *context)
{
    int ctr;
    g_shutDownDoubleBuffer = TRUE;	/* signal thread to stop */
    HAE_DestroyFrameThread(context);
    // $$fb 2002-04-17: wait until PV_AudioWaveOutFrameThread exits
    // fix for 4498848 Sound causes crashes on Linux
    ctr=50;
    while (g_activeDoubleBuffer && --ctr) {
        TRACE1("Waiting %d...\r", ctr);
        // the following call MUST allow the FrameThread to continue
        // (i.e. to exit the PV_AudioWaveOutFrameThread function)
        HAE_SleepFrameThread(context, HAE_LINUX_SOUND_PERIOD);
    }
    if (!ctr) {
        ERROR0("Timed out waiting for frame thread to die!\n");
    }

    HAE_CloseSoundCard(0); // Close for playback

    if (g_audioBufferBlock) {
        HAE_Deallocate(g_audioBufferBlock);
        g_audioBufferBlock = NULL;
    }

    return 0;
}
Exemple #5
0
// Release and free audio card.
// return 0 if ok, -1 if failed.
int HAE_ReleaseAudioCapture(void *context) {
    TRACE0("> HAE_ReleaseAudioCapture\n");
    if (g_captureSound) {
	    // play it safe: destroy thread if not already done
	    if (!g_captureShutdown) {
		HAE_StopAudioCapture(context);
	    }

	    //printf("  WaveInClose\n");
	    while (waveInClose(g_captureSound) == WAVERR_STILLPLAYING) {
		    HAE_SleepFrameThread(context, 10); // in millis
		}
	    g_waveInStarted = FALSE;
	    g_captureSound = NULL;
	}
    TRACE0("< HAE_ReleaseAudioCapture\n");
    return 0;
}
Exemple #6
0
// This proc drives audio capture.  This proc both feeds buffers to the device, into 
// which data is captured, and makes callbacks to deliver the captured data.  
// It may be run in a separate thread.  In the case of Solaris, a separate thread is
// required  because read() is (supposed to be) a blocking call.  
// When used with a Java thread, the context needs to be the  JNI environment pointer 
// valid for the thread running the proc.
void PV_AudioWaveInFrameThread(void* context) {
    audio_info_t	sunAudioHeader;
    uint_t			deviceByteBufferSize;
    char			*pFillBuffer;
    int				currentBytesRead;
    int				totalBytesRead;
    INT32			currentPos, lastPos;
    INT32			count;

    INT32			framesToRead;			// number of sample frames to read per callback
    INT32			bytesToRead;			// number of bytes to read per callback
    INT32			buffersToRead;			// number of buffers to read from the device per callback
    INT32			bytesToReadPerBuffer;	// number of bytes to read from the device per buffer


    // $$kk: 10.14.98: need to make sure our capture buffer isn't larger than the system
    // one; otherwise we will overflow and miss data

    //fprintf(stderr, "> PV_AudioWaveInFrameThread, context: %d\n", context);

    g_activeWaveInThread = TRUE;

    ioctl(g_captureSound, AUDIO_GETINFO, &sunAudioHeader);
    deviceByteBufferSize = sunAudioHeader.record.buffer_size;	
    buffersToRead = g_captureByteBufferSize / deviceByteBufferSize;

    if (buffersToRead == 0) {
	buffersToRead = 1;		
	bytesToReadPerBuffer = g_captureByteBufferSize;
    } else {
	bytesToReadPerBuffer = deviceByteBufferSize;
    }

    bytesToRead = buffersToRead * bytesToReadPerBuffer; 
    framesToRead = bytesToRead / (g_channels * g_bitSize / 8);


    // flush anything we might have accumulated in the capture queue before starting the capture thread
    ioctl(g_captureSound, I_FLUSH, FLUSHR);

    currentPos = sunAudioHeader.record.samples;

    // $$kk: 11.03.98: this is a bit of a hack. 
    // we're not keeping track of our position very well.
    // here, if the device is not newly opened, we make sure to back off
    // our currentPos enough that we can reach our lastPos.  otherwise, 
    // when the device is paused and resumed (when the capture stream is
    // stopped and restarted), we get stuck in the while (currentPos < lastPos)
    // loop below....
    if ( (currentPos - framesToRead) >= 0 ) {
	currentPos -= framesToRead;
    }

    lastPos = currentPos + framesToRead;

    while (g_captureShutdown == FALSE) {	
	pFillBuffer = (char *)g_captureBufferBlock;	
	totalBytesRead = 0;
	currentBytesRead = 0;


	// $$kk: 08.12.99: i'm taking this code out for now.  it's deadlock
	// prone and increases latency.  without it, we make a lot more 
	// callbacks.  is the performance hit worth it?  perhaps!
	/*
	  // wait for the device to record enough data to fill our capture buffer
	  while (currentPos < lastPos) {
	  if (g_captureShutdown == TRUE) {
				// do this here, when we can't call it anymore.
				g_captureDoneProc = NULL;

				// release read buffer
				HAE_Deallocate(g_captureBufferBlock);
				return;
				}

				// $$kk: 04.23.99: if we are paused, update the pos, drop out of this loop, 
				// and block down below on the read instead.  otherwise we might get stuck
				// here forever even once the device is resumed.

				if (g_paused)  {
				lastPos = currentPos;
				break;
				}

				HAE_SleepFrameThread(context, HAE_SOLARIS_SOUND_CAPTURE_PERIOD);		// in ms
				
				ioctl(g_captureSound, AUDIO_GETINFO, &sunAudioHeader);
				currentPos = sunAudioHeader.record.samples;
				}

				lastPos += framesToRead;
	*/

	// now read the data from the device record buffer
		
	for (count = 0; count < buffersToRead; count++) {
	    // This is a blocking call on Solaris unless the device is set to a non-blocking mode.  
	    // It returns the number of bytes read or an error code.
	    // $$kk: 10.13.98: this is not blocking for me, even when i don't open the device
	    // with O_NONBLOCK....  what is the difference between O_NONBLOCK and O_NDELAY?
	    currentBytesRead = read(g_captureSound, pFillBuffer, bytesToReadPerBuffer);
	    pFillBuffer += currentBytesRead;
	    totalBytesRead += currentBytesRead;
	}
		
	// callback to deliver the captured data
		
	if (totalBytesRead > 0) {
	    // now callback with captured data
	    (*g_captureDoneProc)(context, DATA_READY_CAPTURE, &g_captureBufferBlock, &totalBytesRead);		
	} else {
	    HAE_SleepFrameThread(context, HAE_SOLARIS_SOUND_CAPTURE_PERIOD);		// in ms
	}
    }
    g_activeWaveInThread = FALSE;
}
Exemple #7
0
// $$kk: 08.12.98 merge: changed this method to do convert to unsigned data if required by audio hardware 
void PV_AudioWaveOutFrameThread(void* context) {
    audio_info_t sunAudioHeader;
    char *pFillBuffer;
    INT32 count, currentPos, lastPos, sampleFrameSize;
    UINT32 startTime, stopTime, fillTime;
    int i;
    int rc;
    int bytesWritten;
    int bytesToWrite;


    ioctl(g_waveDevice, AUDIO_GETINFO, &sunAudioHeader);

    // calculate sample size for convertion of bytes to sample frames
    sampleFrameSize = 1;
		
    if (g_bitSize == 16) {
	sampleFrameSize *= 2;
    }

    if (g_channels == 2) {
	sampleFrameSize *= 2;
    }


    lastPos = sunAudioHeader.play.samples - ((g_audioByteBufferSize * HAE_SOLARIS_FRAMES_PER_BLOCK * 2) / sampleFrameSize);

    if (g_audioBufferBlock) {
	while ( (g_activeDoubleBuffer) && (g_shutDownDoubleBuffer == FALSE) ) {

	    /* put sync count and XMicroseconds into relation */
	    /* could be improved by using actual device sample count */
	    g_checkpointMicros = XMicroseconds();
	    g_checkpointSyncCount = GM_GetSyncTimeStamp();

	    // Generate HAE_SOLARIS_FRAMES_PER_BLOCK frames of audio
	    pFillBuffer = (char *)g_audioBufferBlock;	
	    for (count = 0; count < HAE_SOLARIS_FRAMES_PER_BLOCK; count++) {
				// Generate one frame audio
		HAE_BuildMixerSlice(context, pFillBuffer, g_audioByteBufferSize,
				    g_audioFramesToGenerate);

		pFillBuffer += g_audioByteBufferSize;

		if (g_shutDownDoubleBuffer) {
		    break;	// time to quit
		}
	    }

	    // $$kk
	    // for some solaris drivers, we must supply unsigned data when rendering 8 bit data
	    if (g_convertUnsigned && (g_bitSize == 8)) {
		pFillBuffer = (char *)g_audioBufferBlock;	
		for (i = 0; i < (g_audioByteBufferSize * HAE_SOLARIS_FRAMES_PER_BLOCK); i++) {
		    *pFillBuffer = (*pFillBuffer >= 0) ? (0x80 | *pFillBuffer) : (0x7F & *pFillBuffer);
		    pFillBuffer++;
		}
	    }
	
	    // $$jb: Changing the write() loop to handle cases when the 
	    // device is unavailable, or we can't write our entire buffer
	    bytesWritten = 0;
	    bytesToWrite = (g_audioByteBufferSize * HAE_SOLARIS_FRAMES_PER_BLOCK);
	    while( bytesToWrite > 0 )  {
		//$$fb don't write when it's time to quit.
		if( g_shutDownDoubleBuffer) {
		    break;
		}
		rc = write(g_waveDevice, ((char *)g_audioBufferBlock+bytesWritten), (size_t)bytesToWrite);
		if ( rc > 0 ) {
#ifdef USE_RAWDATA_CHECK
		    if (debugrawfile) {
			HAE_WriteFile(debugrawfile, ((char *)g_audioBufferBlock+bytesWritten), rc);
		    }
#endif
		    bytesWritten += rc;
		    bytesToWrite -= rc;
		} else {
                                // $$jb:  This happens when the device buffers cannot
                                // be written to.  Make sure we're not shutting down and 
                                // sleep a bit so that we don't completely hog the CPU
		    if( g_shutDownDoubleBuffer == FALSE ) {
			HAE_SleepFrameThread(context, HAE_SOLARIS_SOUND_PERIOD);
		    } else {
			break;
		    }
		} 
	    }


	    // O.k. We're done for now.
	    // Let the rest of the system know we're done ....

	    ioctl(g_waveDevice, AUDIO_GETINFO, &sunAudioHeader);
	    currentPos = sunAudioHeader.play.samples;

	    // $$jb: We have successfully written all our bytes.  
	    // If we encountered a problem while writing, play.error will be 1.
	    // This should be reset.
	    if( sunAudioHeader.play.error != 0 ) {
		AUDIO_INITINFO(&sunAudioHeader);
		sunAudioHeader.play.error = 0;
		ioctl(g_waveDevice, AUDIO_SETINFO, &sunAudioHeader);
	    }

			
	    // $$kk: 03.21.00: make sure we sleep at least once so that other threads can run.
	    // this is part of the fix for bug #4318062: "MixerSourceLine.drain hangs after
	    // repeated use."
	    //while ((currentPos < lastPos) && (g_shutDownDoubleBuffer == FALSE))
	    do {
		HAE_SleepFrameThread(context, HAE_SOLARIS_SOUND_PERIOD);		// in ms
				
		ioctl(g_waveDevice, AUDIO_GETINFO, &sunAudioHeader);
		currentPos = sunAudioHeader.play.samples;

                                // $$jb: Removing the bit of code that breaks out
                                // of this timing loop on sunAudioHeader.play.error != 0.
	    }
	    while ((currentPos < lastPos) &&
		   (lastPos - currentPos < (1 << 28)) && /* see note A */
		   (g_shutDownDoubleBuffer == FALSE));

	    // Note A: $$ay: Additional safeguard for wraparound of sample
	    // ------  count from 1 << 32 - 1.  Make sure the difference is
	    //         not a huge value
			
	    lastPos += (g_audioByteBufferSize * HAE_SOLARIS_FRAMES_PER_BLOCK) / sampleFrameSize;
	    // ... and reschedule ourselves.	
	}

	g_activeDoubleBuffer = FALSE;
    }
}
Exemple #8
0
// this is run by a java thread; the context needs to be the JNI environment
// pointer valid for the thread.
void PV_AudioWaveInFrameThread(void* context) {
    WAVEHDR	waveHeader[HAE_WAVEIN_NUM_BUFFERS];
    long	count, framesToRead, bytesToRead, error;
    long	waveHeaderCount = 0;	// current index in the array of waveheaders
    LPWAVEHDR	pCurrentWaveHdr;
    DWORD dwBytesRecorded;
    LPSTR lpData;

    TRACE0("> PV_AudioWaveInFrameThread\n");

    g_activeWaveInThread = TRUE;
    
    bytesToRead = g_audioBytesPerBuffer;
    framesToRead = g_audioFramesPerBuffer;

    memset(&waveHeader, 0, sizeof(WAVEHDR) * HAE_WAVEIN_NUM_BUFFERS);
    
    // set up all the capture buffers
    for (count = 0; count < HAE_WAVEIN_NUM_BUFFERS; count++) {
	    waveHeader[count].lpData = (char *)g_audioBufferBlock[count];
	    waveHeader[count].dwBufferLength = g_audioBytesPerBuffer;
	    waveHeader[count].dwFlags 		= 0;
	    waveHeader[count].dwLoops 		= 0;
	    error = waveInPrepareHeader(g_captureSound, &waveHeader[count], (INT32)sizeof(WAVEHDR));
    }
    
    /* loop for flushes */
    while (g_captureShutdown == FALSE) {
	
	// add all the capture buffers
	for (count = 0; count < HAE_WAVEIN_NUM_BUFFERS; count++) {
	    waveInAddBuffer(g_captureSound, &waveHeader[count], sizeof(WAVEHDR));
	}
	if (g_flushMode == FLUSHMODE_FLUSHED) {
	    if (g_waveInStarted) {
		waveInStart(g_captureSound);
	    }
	    g_flushMode = FLUSHMODE_NONE;
	}


	// now run this loop to do the capture.
	// we wait for enough samples to be captured to fill one capture buffer,
	// callback with the captured data, and put the buffer back in the queue.

	waveHeaderCount = 0; // which buffer we're processing
	while (g_captureShutdown == FALSE) {
	    TRACE0("  PV_AudioWaveInFrameThread: in loop\n");
	    // wait for the device to record enough data to fill our capture buffer

	    // this is the data buffer for the current capture buffer
	    pCurrentWaveHdr = &waveHeader[waveHeaderCount];

	    while ((g_flushMode == FLUSHMODE_FLUSHING)
	           || ((!(pCurrentWaveHdr->dwFlags & WHDR_DONE)) 
	                 && (g_captureShutdown == FALSE)) ) {
		//printf("  PV_AudioWaveInFrameThread: sleep\n");
		HAE_SleepFrameThread(context, HAE_WAVEIN_SOUND_PERIOD);		// in ms
	    }
	    if (g_flushMode == FLUSHMODE_FLUSHED) {
		/* discard all buffers by bailing out to
		 * the outer loop in order to 
		 * - re-add all buffers 
		 * - and restart the device
		 */
		break;
	    }

	    // then process the captured data
	    if (g_captureShutdown == FALSE
	        && pCurrentWaveHdr->dwFlags & WHDR_DONE) {
		dwBytesRecorded = pCurrentWaveHdr->dwBytesRecorded;
		lpData = pCurrentWaveHdr->lpData;

		// callback with the captured data
		//printf("  PV_AudioWaveInFrameThread: callback\n");
		(*g_captureDoneProc)(context, DATA_READY_CAPTURE, &lpData, (void *)&dwBytesRecorded);

		// add the buffer back into the queue
		//printf("  PV_AudioWaveInFrameThread: in addBuffer\n");
		error = waveInAddBuffer(g_captureSound, pCurrentWaveHdr, sizeof(WAVEHDR));
		// increment to the next wavehdr
		waveHeaderCount++;
		if (waveHeaderCount == HAE_WAVEIN_NUM_BUFFERS) {
		    waveHeaderCount = 0;
		}
	    }
	} // while (inner loop)
    } // while (outer loop to support flush())
	

    //printf("  PV_AudioWaveInFrameThread: reset\n");
    waveInReset(g_captureSound); // stop all audio before unpreparing headers
    
    /* send all pending captured buffers to the app */
    count = 0;
    for (count = 0; count < HAE_WAVEIN_NUM_BUFFERS; count++) {
	pCurrentWaveHdr = &waveHeader[waveHeaderCount];
	if (pCurrentWaveHdr->dwFlags & WHDR_DONE) {
	    dwBytesRecorded = pCurrentWaveHdr->dwBytesRecorded;
	    lpData = pCurrentWaveHdr->lpData;
	    //printf("  PV_AudioWaveInFrameThread: callback\n");
	    (*g_captureDoneProc)(context, DATA_READY_CAPTURE, &lpData, (void *)&dwBytesRecorded);
	} else {
	    break;
	}
	waveHeaderCount++;
	if (waveHeaderCount == HAE_WAVEIN_NUM_BUFFERS) {
	    waveHeaderCount = 0;
	}
    }

    // unprepare headers
    for (count = 0; count < HAE_WAVEIN_NUM_BUFFERS; count++) {
	waveInUnprepareHeader(g_captureSound, &waveHeader[count], (INT32)sizeof(WAVEHDR));
    }
    // do this here, when we can't call it anymore.
    g_captureDoneProc = NULL;
    TRACE0("< PV_AudioWaveInFrameThread\n");
    g_activeWaveInThread = FALSE;
}
Exemple #9
0
// $$kk: 08.12.98 merge: changed this method to do convert to unsigned data if required by audio hardware
void PV_AudioWaveOutFrameThread(void* context)
{
    char			*pFillBuffer;
    long			count, currentPos, lastPos, sampleFrameSize;
    int				i;
    int rc;
    int bytesWritten;
    int bytesToWrite;
    //int avail;
    count_info audio_info;

    ioctl(g_waveDevice, SNDCTL_DSP_GETOPTR, &audio_info);

    // calculate sample size for convertion of bytes to sample frames
    sampleFrameSize = 1;

    if (g_bitSize == 16) {
        sampleFrameSize *= 2;
    }

    if (g_channels == 2) {
        sampleFrameSize *= 2;
    }

    // $$ay - sample count is in bytes for linux and not in samples
    lastPos = audio_info.bytes - ((g_audioByteBufferSize * HAE_LINUX_FRAMES_PER_BLOCK * 2));

    if (g_audioBufferBlock) {
        while ((g_activeDoubleBuffer) && (g_shutDownDoubleBuffer == FALSE)) {

            /* put sync count and XMicroseconds into relation */
            /* could be improved by using actual device sample count */
            g_checkpointMicros = XMicroseconds();
            g_checkpointSyncCount = GM_GetSyncTimeStamp();

            // Generate HAE_LINUX_FRAMES_PER_BLOCK frames of audio
            pFillBuffer = (char *)g_audioBufferBlock;
            for (count = 0; count < HAE_LINUX_FRAMES_PER_BLOCK; count++) {
                // Generate one frame audio
                HAE_BuildMixerSlice(context, pFillBuffer,
                                    g_audioByteBufferSize,
                                    g_audioFramesToGenerate);
                pFillBuffer += g_audioByteBufferSize;
                if (g_shutDownDoubleBuffer) {
                    break;	// time to quit
                }
            }

            // $$kk
            // for some solaris drivers, we must supply unsigned data when rendering 8 bit data
            if (g_convertUnsigned && (g_bitSize == 8)) {
                pFillBuffer = (char *)g_audioBufferBlock;
                for (i = 0; i < (g_audioByteBufferSize * HAE_LINUX_FRAMES_PER_BLOCK); i++) {
                    *pFillBuffer = (*pFillBuffer >= 0) ? (0x80 | *pFillBuffer) : (0x7F & *pFillBuffer);
                    pFillBuffer++;
                }
            }

            // $$jb: Changing the write() loop to handle cases when the
            // device is unavailable, or we can't write our entire buffer
            bytesWritten = 0;
            bytesToWrite = (g_audioByteBufferSize * HAE_LINUX_FRAMES_PER_BLOCK);

            while ( bytesToWrite > 0 ) {
#ifdef TODO
                // $$ay:  AARGH!!! Linux forces read to be non-blocking when opened for DUPLEX
                if (!g_openForCapture && g_supportsDuplex) {
                    int k, avail;

                    ioctl(g_waveDevice, SNDCTL_DSP_GETBLKSIZE, &avail);
                    k = read(g_waveDevice, dummyBuffer, avail);
                    //printf("AvailToRead = %d, Read = %d\n", avail, k);
                    //k = read(g_waveDevice, dummyBuffer, sizeof(dummyBuffer));
                    //printf("Read = %d\n", k);
                }
                ioctl(g_waveDevice, SNDCTL_DSP_GETBLKSIZE, &avail);
                if (bytesToWrite > avail)
                    rc = write(g_waveDevice, ((char *)g_audioBufferBlock+bytesWritten), avail);
                else
                    rc = write(g_waveDevice, ((char *)g_audioBufferBlock+bytesWritten), bytesToWrite);
                //printf("Wrote %d bytes\n", rc);
#endif

                //$$fb don't write when it's time to quit.
                if( g_shutDownDoubleBuffer) {
                    break;
                }
                rc = write(g_waveDevice, ((char *)g_audioBufferBlock+bytesWritten), bytesToWrite);
                if ( rc > 0 ) {
                    bytesWritten += rc;
                    bytesToWrite -= rc;
                } else {
                    // $$jb:  This happens when the device buffers cannot
                    // be written to.  Make sure we're not shutting down and
                    // sleep a bit so that we don't completely hog the CPU
                    if( g_shutDownDoubleBuffer == FALSE ) {
                        HAE_SleepFrameThread(context, HAE_LINUX_SOUND_PERIOD);
                    } else {
                        break;
                    }
                }
            }


            // O.k. We're done for now.
            // Let the rest of the system know we're done ....

            ioctl(g_waveDevice, SNDCTL_DSP_GETOPTR, &audio_info);
            currentPos = audio_info.bytes;
            // $$jb: We have successfully written all our bytes.
            // If we encountered a problem while writing, play.error will be 1.
            // This should be reset.

#ifdef TODO
            if ( sunAudioHeader.play.error != 0 ) {
                AUDIO_INITINFO(&sunAudioHeader);
                sunAudioHeader.play.error = 0;
                ioctl(g_waveDevice, AUDIO_SETINFO, &sunAudioHeader);
            }
#endif

            while ((currentPos < lastPos) && (g_shutDownDoubleBuffer == FALSE))	{
                HAE_SleepFrameThread(context, HAE_LINUX_SOUND_PERIOD);		// in ms

                ioctl(g_waveDevice, SNDCTL_DSP_GETOPTR, &audio_info);
                currentPos = audio_info.bytes;

                // $$jb: Removing the bit of code that breaks out
                // of this timing loop on sunAudioHeader.play.error != 0.
            }

            lastPos += (g_audioByteBufferSize * HAE_LINUX_FRAMES_PER_BLOCK);
            // ... and reschedule ourselves.
        }
        TRACE0("g_activeDoubleBuffer = FALSE;\n");
        g_activeDoubleBuffer = FALSE;
    }
}
Exemple #10
0
// This proc drives audio capture.  This proc both feeds buffers to the device, into
// which data is captured, and makes callbacks to deliver the captured data.
// It may be run in a separate thread.  In the case of Solaris, a separate thread is
// required  because read() is (supposed to be) a blocking call.
// When used with a Java thread, the context needs to be the  JNI environment pointer
// valid for the thread running the proc.
void PV_AudioWaveInFrameThread(void* context) {

    uint_t			deviceByteBufferSize;
    char			*pFillBuffer;
    audio_buf_info info;
    int				currentBytesRead;
    int				totalBytesRead;
    long			buffersToRead;			// number of buffers to read from the device per callback
    long			bytesToReadPerBuffer;	// number of bytes to read from the device per buffer
    int firstTime = TRUE;

    //fprintf(stderr, "> PV_AudioWaveInFrameThread\n");

    g_activeWaveInThread = TRUE;

    ioctl(g_waveDevice, SNDCTL_DSP_GETBLKSIZE, &deviceByteBufferSize);

    buffersToRead = g_captureByteBufferSize / deviceByteBufferSize;

    if (buffersToRead == 0) {
	buffersToRead = 1;
	bytesToReadPerBuffer = g_captureByteBufferSize / 2;
    } else {
	bytesToReadPerBuffer = deviceByteBufferSize;
    }

    // flush anything we might have accumulated in the capture queue before starting the capture thread
    // $$ay:
    HAE_FlushAudioCapture();
    g_flushMode = FLUSHMODE_NONE;

    while (!g_captureShutdown) {
	pFillBuffer = (char *)g_captureBufferBlock;
	totalBytesRead = 0;
	currentBytesRead = 0;

	// now read the data from the device record buffer
	while (!g_captureShutdown && totalBytesRead < buffersToRead * bytesToReadPerBuffer) {
	    info.bytes = 0;
	    if (ioctl(g_waveDevice, SNDCTL_DSP_GETISPACE, &info) >= 0) {
		//printf("  dev/dsp has %d bytes available\n", (int) info.bytes);
	    	if (g_captureShutdown) {
		    /* read the remaining data */
		    bytesToReadPerBuffer = info.bytes;
		    if (bytesToReadPerBuffer > g_captureByteBufferSize - totalBytesRead) {
		    	bytesToReadPerBuffer = g_captureByteBufferSize - totalBytesRead;
		    }
		}
		if (firstTime) {
		    /* need to trigger start of device with first read? */
		    info.bytes = bytesToReadPerBuffer;
		    firstTime = FALSE;
		}
		if (info.bytes >= bytesToReadPerBuffer
		    && g_flushMode == FLUSHMODE_NONE) {
		    // It returns the number of bytes read or an error code.
		    currentBytesRead = read(g_waveDevice, pFillBuffer, bytesToReadPerBuffer);
		    //printf("  read %d bytes\n", currentBytesRead);
		    if (currentBytesRead > 0) {
			pFillBuffer += currentBytesRead;
			totalBytesRead += currentBytesRead;
		    }
		} else {
		    if (g_flushMode == FLUSHMODE_FLUSHED) {
			break;
		    }
		    HAE_SleepFrameThread(context, HAE_LINUX_SOUND_CAPTURE_PERIOD);
		}
	    } else if (!g_captureShutdown) {
		/* what to do here ? */
		HAE_SleepFrameThread(context, HAE_LINUX_SOUND_CAPTURE_PERIOD);
	    }
	}
	if (g_flushMode == FLUSHMODE_FLUSHED) {
	    /* prevent callback */
	    g_flushMode = FLUSHMODE_NONE;
	    //printf("capture frame thread: discarding %d bytes in response to flush\n", totalBytesRead);
	} else if (totalBytesRead > 0) {
	    // callback to deliver the captured data
	    (*g_captureDoneProc)(context, DATA_READY_CAPTURE, &g_captureBufferBlock, &totalBytesRead);
	}
    } // while
    //fprintf(stderr, "< PV_AudioWaveInFrameThread\n");
    g_activeWaveInThread = FALSE;
}