// ---------------------------------------------------------- OSStatus PullCallback(void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData) // ---------------------------------------------------------- { InputContext * ctx = static_cast<InputContext *>(inRefCon); size_t buffersToCopy = std::min<size_t>(ioData->mNumberBuffers, ctx->circularBuffers.size()); for(int i = 0; i < buffersToCopy; i++) { int32_t circBufferSize; Float32 * circBufferTail = (Float32 *) TPCircularBufferTail(&ctx->circularBuffers[i], &circBufferSize); bool circBufferHasEnoughSamples = circBufferSize / sizeof(Float32) >= inNumberFrames ? true : false; if(!circBufferHasEnoughSamples) { // clear buffer, so bytes that don't get written are silence instead of noise memset(ioData->mBuffers[i].mData, 0, ioData->mBuffers[i].mDataByteSize); } size_t bytesToConsume = min(ioData->mBuffers[i].mDataByteSize, (UInt32)circBufferSize); memcpy(ioData->mBuffers[i].mData, circBufferTail, bytesToConsume); TPCircularBufferConsume(&ctx->circularBuffers[i], bytesToConsume); } return noErr; }
static UInt32 _TPCircularBufferPeek(TPCircularBuffer *buffer, AudioTimeStamp *outTimestamp, const AudioStreamBasicDescription *audioFormat, UInt32 contiguousToleranceSampleTime) { int32_t availableBytes; TPCircularBufferABLBlockHeader *block = (TPCircularBufferABLBlockHeader*)TPCircularBufferTail(buffer, &availableBytes); if ( !block ) return 0; assert(!((unsigned long)block & 0xF) /* Beware unaligned accesses */); if ( outTimestamp ) { memcpy(outTimestamp, &block->timestamp, sizeof(AudioTimeStamp)); } void *end = (char*)block + availableBytes; UInt32 byteCount = 0; while ( 1 ) { byteCount += block->bufferList.mBuffers[0].mDataByteSize; TPCircularBufferABLBlockHeader *nextBlock = (TPCircularBufferABLBlockHeader*)((char*)block + block->totalLength); if ( (void*)nextBlock >= end || (contiguousToleranceSampleTime != UINT32_MAX && labs((long)(nextBlock->timestamp.mSampleTime - (block->timestamp.mSampleTime + (block->bufferList.mBuffers[0].mDataByteSize / audioFormat->mBytesPerFrame)))) > contiguousToleranceSampleTime) ) { break; } assert(!((unsigned long)nextBlock & 0xF) /* Beware unaligned accesses */); block = nextBlock; } return byteCount / audioFormat->mBytesPerFrame; }
void TPCircularBufferConsumeNextBufferListPartial(TPCircularBuffer *buffer, int framesToConsume, AudioStreamBasicDescription *audioFormat) { assert(framesToConsume >= 0); int32_t dontcare; TPCircularBufferABLBlockHeader *block = (TPCircularBufferABLBlockHeader*)TPCircularBufferTail(buffer, &dontcare); if ( !block ) return; assert(!((unsigned long)block & 0xF)); // Beware unaligned accesses int bytesToConsume = framesToConsume * audioFormat->mBytesPerFrame; if ( bytesToConsume == block->bufferList.mBuffers[0].mDataByteSize ) { TPCircularBufferConsumeNextBufferList(buffer); return; } for ( int i=0; i<block->bufferList.mNumberBuffers; i++ ) { assert(bytesToConsume <= block->bufferList.mBuffers[i].mDataByteSize && (char*)block->bufferList.mBuffers[i].mData + bytesToConsume <= (char*)block+block->totalLength); block->bufferList.mBuffers[i].mData = (char*)block->bufferList.mBuffers[i].mData + bytesToConsume; block->bufferList.mBuffers[i].mDataByteSize -= bytesToConsume; } if ( block->timestamp.mFlags & kAudioTimeStampSampleTimeValid ) { block->timestamp.mSampleTime += framesToConsume; } if ( block->timestamp.mFlags & kAudioTimeStampHostTimeValid ) { if ( !__secondsToHostTicks ) { mach_timebase_info_data_t tinfo; mach_timebase_info(&tinfo); __secondsToHostTicks = 1.0 / (((double)tinfo.numer / tinfo.denom) * 1.0e-9); } block->timestamp.mHostTime += ((double)framesToConsume / audioFormat->mSampleRate) * __secondsToHostTicks; } }
void ca_Flush(audio_output_t *p_aout, bool wait) { struct aout_sys_common *p_sys = (struct aout_sys_common *) p_aout->sys; if (wait) { int32_t i_bytes; while (TPCircularBufferTail(&p_sys->circular_buffer, &i_bytes) != NULL) { /* Calculate the duration of the circular buffer, in order to wait * for the render thread to play it all */ const mtime_t i_frame_us = FramesToUs(p_sys, BytesToFrames(p_sys, i_bytes)) + 10000; msleep(i_frame_us / 2); } } else { /* flush circular buffer if data is left */ TPCircularBufferClear(&p_sys->circular_buffer); } }
/* Called from render callbacks. No lock, wait, and IO here */ void ca_Render(audio_output_t *p_aout, uint8_t *p_output, size_t i_requested) { struct aout_sys_common *p_sys = (struct aout_sys_common *) p_aout->sys; if (atomic_load_explicit(&p_sys->b_paused, memory_order_relaxed)) { memset(p_output, 0, i_requested); return; } /* Pull audio from buffer */ int32_t i_available; void *p_data = TPCircularBufferTail(&p_sys->circular_buffer, &i_available); if (i_available < 0) i_available = 0; size_t i_tocopy = __MIN(i_requested, (size_t) i_available); if (i_tocopy > 0) { memcpy(p_output, p_data, i_tocopy); TPCircularBufferConsume(&p_sys->circular_buffer, i_tocopy); } /* Pad with 0 */ if (i_requested > i_tocopy) { atomic_fetch_add(&p_sys->i_underrun_size, i_requested - i_tocopy); memset(&p_output[i_tocopy], 0, i_requested - i_tocopy); } }
int ca_TimeGet(audio_output_t *p_aout, mtime_t *delay) { struct aout_sys_common *p_sys = (struct aout_sys_common *) p_aout->sys; int32_t i_bytes; TPCircularBufferTail(&p_sys->circular_buffer, &i_bytes); int64_t i_frames = BytesToFrames(p_sys, i_bytes); *delay = FramesToUs(p_sys, i_frames) + p_sys->i_dev_latency_us; return 0; }
static OSStatus rdpsnd_ios_render_cb( void *inRefCon, AudioUnitRenderActionFlags __unused *ioActionFlags, const AudioTimeStamp __unused *inTimeStamp, UInt32 inBusNumber, UInt32 __unused inNumberFrames, AudioBufferList *ioData ) { unsigned int i; if (inBusNumber != 0) { return noErr; } rdpsndIOSPlugin *p = THIS(inRefCon); for (i = 0; i < ioData->mNumberBuffers; i++) { AudioBuffer* target_buffer = &ioData->mBuffers[i]; int32_t available_bytes = 0; const void *buffer = TPCircularBufferTail(&p->buffer, &available_bytes); if (buffer != NULL && available_bytes > 0) { const int bytes_to_copy = MIN((int32_t)target_buffer->mDataByteSize, available_bytes); memcpy(target_buffer->mData, buffer, bytes_to_copy); target_buffer->mDataByteSize = bytes_to_copy; TPCircularBufferConsume(&p->buffer, bytes_to_copy); } else { target_buffer->mDataByteSize = 0; AudioOutputUnitStop(p->audio_unit); p->is_playing = 0; } } return noErr; }
void TPCircularBufferConsumeNextBufferListPartial(TPCircularBuffer *buffer, int framesToConsume, const AudioStreamBasicDescription *audioFormat) { assert(framesToConsume >= 0); int32_t dontcare; TPCircularBufferABLBlockHeader *block = (TPCircularBufferABLBlockHeader*)TPCircularBufferTail(buffer, &dontcare); if ( !block ) return; assert(!((unsigned long)block & 0xF)); // Beware unaligned accesses int bytesToConsume = (int)min(framesToConsume * audioFormat->mBytesPerFrame, block->bufferList.mBuffers[0].mDataByteSize); if ( bytesToConsume == block->bufferList.mBuffers[0].mDataByteSize ) { TPCircularBufferConsumeNextBufferList(buffer); return; } for ( int i=0; i<block->bufferList.mNumberBuffers; i++ ) { assert(bytesToConsume <= block->bufferList.mBuffers[i].mDataByteSize); block->bufferList.mBuffers[i].mData = (char*)block->bufferList.mBuffers[i].mData + bytesToConsume; block->bufferList.mBuffers[i].mDataByteSize -= bytesToConsume; } if ( block->timestamp.mFlags & kAudioTimeStampSampleTimeValid ) { block->timestamp.mSampleTime += framesToConsume; } if ( block->timestamp.mFlags & kAudioTimeStampHostTimeValid ) { if ( !__secondsToHostTicks ) { mach_timebase_info_data_t tinfo; mach_timebase_info(&tinfo); __secondsToHostTicks = 1.0 / (((double)tinfo.numer / tinfo.denom) * 1.0e-9); } block->timestamp.mHostTime += ((double)framesToConsume / audioFormat->mSampleRate) * __secondsToHostTicks; } // Reposition block forward, just before the audio data, ensuring 16-byte alignment TPCircularBufferABLBlockHeader *newBlock = (TPCircularBufferABLBlockHeader*)(((unsigned long)block + bytesToConsume) & ~0xFul); memmove(newBlock, block, sizeof(TPCircularBufferABLBlockHeader) + (block->bufferList.mNumberBuffers-1)*sizeof(AudioBuffer)); int32_t bytesFreed = (int32_t)newBlock - (int32_t)block; newBlock->totalLength -= bytesFreed; TPCircularBufferConsume(buffer, bytesFreed); }
AudioBufferList *TPCircularBufferNextBufferListAfter(TPCircularBuffer *buffer, AudioBufferList *bufferList, AudioTimeStamp *outTimestamp) { int32_t availableBytes; void *tail = TPCircularBufferTail(buffer, &availableBytes); void *end = (char*)tail + availableBytes; assert((void*)bufferList > (void*)tail && (void*)bufferList < end); TPCircularBufferABLBlockHeader *originalBlock = (TPCircularBufferABLBlockHeader*)((char*)bufferList - offsetof(TPCircularBufferABLBlockHeader, bufferList)); assert(!((unsigned long)originalBlock & 0xF) /* Beware unaligned accesses */); TPCircularBufferABLBlockHeader *nextBlock = (TPCircularBufferABLBlockHeader*)((char*)originalBlock + originalBlock->totalLength); if ( (void*)nextBlock >= end ) return NULL; assert(!((unsigned long)nextBlock & 0xF) /* Beware unaligned accesses */); if ( outTimestamp ) { memcpy(outTimestamp, &nextBlock->timestamp, sizeof(AudioTimeStamp)); } return &nextBlock->bufferList; }
UInt32 TPCircularBufferPeek(TPCircularBuffer *buffer, AudioTimeStamp *outTimestamp, AudioStreamBasicDescription *audioFormat) { UInt32 frameCount = 0; int32_t availableBytes; AudioTimeStamp *timestamp = TPCircularBufferTail(buffer, &availableBytes); if ( timestamp && outTimestamp ) { *outTimestamp = *timestamp; } if ( !timestamp ) return 0; void *end = (char*)timestamp + availableBytes; while ( (void*)timestamp < end ) { UInt32 *lengthInBytes = (UInt32*)(timestamp+1); AudioBufferList *bufferList = (AudioBufferList*)(lengthInBytes+1); frameCount += bufferList->mBuffers[0].mDataByteSize / audioFormat->mBytesPerFrame; timestamp = (AudioTimeStamp*)((char*)(lengthInBytes+1) + *lengthInBytes); } return frameCount; }
UInt32 TPCircularBufferPeekContiguousWrapped(TPCircularBuffer *buffer, AudioTimeStamp *outTimestamp, const AudioStreamBasicDescription *audioFormat, UInt32 contiguousToleranceSampleTime, UInt32 wrapPoint) { int32_t availableBytes; TPCircularBufferABLBlockHeader *block = (TPCircularBufferABLBlockHeader*)TPCircularBufferTail(buffer, &availableBytes); if ( !block ) return 0; assert(!((unsigned long)block & 0xF) /* Beware unaligned accesses */); if ( outTimestamp ) { memcpy(outTimestamp, &block->timestamp, sizeof(AudioTimeStamp)); } void *end = (char*)block + availableBytes; UInt32 byteCount = 0; while ( 1 ) { byteCount += block->bufferList.mBuffers[0].mDataByteSize; TPCircularBufferABLBlockHeader *nextBlock = (TPCircularBufferABLBlockHeader*)((char*)block + block->totalLength); if ( (void*)nextBlock >= end ) { break; } if ( contiguousToleranceSampleTime != UINT32_MAX ) { UInt32 frames = block->bufferList.mBuffers[0].mDataByteSize / audioFormat->mBytesPerFrame; Float64 nextTime = block->timestamp.mSampleTime + frames; if ( wrapPoint && nextTime > wrapPoint ) nextTime = fmod(nextTime, wrapPoint); Float64 diff = fabs(nextBlock->timestamp.mSampleTime - nextTime); if ( diff > contiguousToleranceSampleTime && (!wrapPoint || fabs(diff-wrapPoint) > contiguousToleranceSampleTime) ) { break; } } assert(!((unsigned long)nextBlock & 0xF) /* Beware unaligned accesses */); block = nextBlock; } return byteCount / audioFormat->mBytesPerFrame; }
void TPCircularBufferClear(TPCircularBuffer *buffer) { int32_t fillCount; if ( TPCircularBufferTail(buffer, &fillCount) ) { TPCircularBufferConsume(buffer, fillCount); } }
//This callback is used to feed the AU buffers static OSStatus rdpsnd_ios_render_cb( void *inRefCon, AudioUnitRenderActionFlags *ioActionFlags, const AudioTimeStamp *inTimeStamp, UInt32 inBusNumber, UInt32 inNumberFrames, AudioBufferList *ioData ) { unsigned int i; if (inBusNumber != 0) { return noErr; } rdpsndIOSPlugin *p = THIS(inRefCon); //printf("Playing %d frames... ", (unsigned int)inNumberFrames); //pthread_mutex_lock(&p->bMutex); for (i = 0; i < ioData->mNumberBuffers; i++) { //printf("buf%d ", i); /*printf("buf size = %d (%lums) ", (unsigned int)ioData->mBuffers[i].mDataByteSize, (ioData->mBuffers[i].mDataByteSize * 1000) / p->bpsAvg); */ AudioBuffer* target_buffer = &ioData->mBuffers[i]; int32_t available_bytes = 0; const void *buffer = TPCircularBufferTail(&p->buffer, &available_bytes); if (buffer != NULL && available_bytes > 0) { const int bytes_to_copy = MIN((int32_t)target_buffer->mDataByteSize, available_bytes); memcpy(target_buffer->mData, buffer, bytes_to_copy); target_buffer->mDataByteSize = bytes_to_copy; TPCircularBufferConsume(&p->buffer, bytes_to_copy); p->frameCnt += inNumberFrames; } else { *ioActionFlags = *ioActionFlags | kAudioUnitRenderAction_OutputIsSilence; //FIXME: force sending of any remaining items in queue if (Queue_Count(p->waveQ) > 0) { p->frameCnt += 1000000; } //in case we didnt get a post render callback first (observed) rdpsnd_count_frames(p); target_buffer->mDataByteSize = 0; AudioOutputUnitStop(p->audio_unit); //p->is_playing = 0; rdpsnd_set_isPlaying(p, FALSE); printf("Buffer is empty with frameCnt:%d(uderrun)\n", p->frameCnt); } } //pthread_mutex_unlock(&p->bMutex); return noErr; }