void primeBuffer() { OSStatus status; for (int i = 0; i < kNumberBuffers; ++i) { // 2 status = AudioQueueAllocateBuffer ( // 3 aqData.mQueue, // 4 aqData.bufferByteSize, // 5 &aqData.mBuffers[i] // 6 ); checkStatus(status); HandleOutputBuffer ( // 7 &aqData, // 8 aqData.mQueue, // 9 aqData.mBuffers[i] // 10 ); } #if 1 status = AudioQueuePrime ( aqData.mQueue, kNumberBuffers, NULL ); checkStatus(status); #endif }
void AudioOutputDeviceCoreAudio::FillBuffers() { for (int i = 0; i < uiBufferNumber; ++i) { HandleOutputBuffer ( &aqPlayerState, aqPlayerState.mQueue, aqPlayerState.mBuffers[i] ); } }
void CAudioQueueManager::setupQueue() { OSStatus res = AudioQueueNewOutput(&_dataFormat, HandleOutputBuffer, this, CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0, &_queue); for (int i = 0; i < kNumberBuffers; i++) { res = AudioQueueAllocateBuffer(_queue, _bytesPerFrame, &_buffers[i]); HandleOutputBuffer(this, _queue, _buffers[i]); } if (_autoStart) { _isRunning = true; res = AudioQueueStart(_queue, NULL); } _isInitialized = true; }
void GbApuEmulator::beginApuPlayback() { // Reset the APU and Buffer // gbAPU->reset(false,0); gbAPU->reset(); blipBuffer->clear(true); // Prime the playback buffer for (int i = 0; i < NUM_BUFFERS; ++i) { HandleOutputBuffer(gbAPUState, gbAPUState->queue, gbAPUState->buffers[i]); } AudioQueuePrime(gbAPUState->queue, 0, NULL); gbAPUState->isRunning = true; AudioQueueStart(gbAPUState->queue, NULL); }
void playFile(const char* filePath) { CFURLRef audioFileURL = CFURLCreateFromFileSystemRepresentation(NULL, (UInt8*) filePath, strlen (filePath), false); OSStatus result = AudioFileOpenURL(audioFileURL, fsRdPerm, 0, &aqData.mAudioFile); CFRelease (audioFileURL); UInt32 dataFormatSize = sizeof (aqData.mDataFormat); AudioFileGetProperty(aqData.mAudioFile, kAudioFilePropertyDataFormat, &dataFormatSize, &aqData.mDataFormat); AudioQueueNewOutput(&aqData.mDataFormat, HandleOutputBuffer, &aqData, CFRunLoopGetCurrent(), kCFRunLoopCommonModes, 0, &aqData.mQueue); UInt32 maxPacketSize; UInt32 propertySize = sizeof (maxPacketSize); AudioFileGetProperty(aqData.mAudioFile, kAudioFilePropertyPacketSizeUpperBound, &propertySize, &maxPacketSize); DeriveBufferSize(&aqData.mDataFormat, maxPacketSize, 0.5, &aqData.bufferByteSize, &aqData.mNumPacketsToRead); bool isFormatVBR = (aqData.mDataFormat.mBytesPerPacket == 0 || aqData.mDataFormat.mFramesPerPacket == 0); if (isFormatVBR) { // LOG("%s\n","VBR"); aqData.mPacketDescs = (AudioStreamPacketDescription*) malloc (aqData.mNumPacketsToRead * sizeof (AudioStreamPacketDescription)); } else { aqData.mPacketDescs = NULL; } UInt32 cookieSize = sizeof (UInt32); bool couldNotGetProperty = AudioFileGetPropertyInfo (aqData.mAudioFile, kAudioFilePropertyMagicCookieData, &cookieSize, NULL); if (!couldNotGetProperty && cookieSize) { char* magicCookie = (char *) malloc (cookieSize); AudioFileGetProperty (aqData.mAudioFile, kAudioFilePropertyMagicCookieData, &cookieSize, magicCookie); AudioQueueSetProperty (aqData.mQueue, kAudioQueueProperty_MagicCookie, magicCookie, cookieSize); free (magicCookie); } aqData.mCurrentPacket = 0; aqData.mIsRunning = true; //LOG("%d\n", aqData.mNumPacketsToRead); for (int i = 0; i < kNumberBuffers; ++i) { AudioQueueAllocateBuffer (aqData.mQueue, aqData.bufferByteSize, &aqData.mBuffers[i]); HandleOutputBuffer (&aqData, aqData.mQueue, aqData.mBuffers[i]); } Float32 gain = 1.0; // Optionally, allow user to override gain setting here AudioQueueSetParameter (aqData.mQueue, kAudioQueueParam_Volume, gain); //LOG("%s\n","Starting play"); // IMPORTANT NOTE : This value must be set // Before the call to HandleOutputBuffer //a qData.mIsRunning = true; AudioQueueStart (aqData.mQueue, NULL); }