//Allocate Audio Buffer List(s) to hold the data from input.
OSStatus CAPlayThrough::SetupBuffers()
{
	OSStatus err = noErr;
	UInt32 bufferSizeFrames,bufferSizeBytes,propsize;
	
	CAStreamBasicDescription asbd,asbd_dev1_in,asbd_dev2_out;			
	Float64 rate=0;
	
	//Get the size of the IO buffer(s)
	UInt32 propertySize = sizeof(bufferSizeFrames);
	err = AudioUnitGetProperty(mInputUnit, kAudioDevicePropertyBufferFrameSize, kAudioUnitScope_Global, 0, &bufferSizeFrames, &propertySize);
	bufferSizeBytes = bufferSizeFrames * sizeof(Float32);
		
	//Get the Stream Format (Output client side)
	propertySize = sizeof(asbd_dev1_in);
	err = AudioUnitGetProperty(mInputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 1, &asbd_dev1_in, &propertySize);
	//printf("=====Input DEVICE stream format\n" );	
	//asbd_dev1_in.Print();
	
	//Get the Stream Format (client side)
	propertySize = sizeof(asbd);
	err = AudioUnitGetProperty(mInputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &asbd, &propertySize);		
	//printf("=====current Input (Client) stream format\n");	
	//asbd.Print();	

	//Get the Stream Format (Output client side)
	propertySize = sizeof(asbd_dev2_out);
	err = AudioUnitGetProperty(mOutputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &asbd_dev2_out, &propertySize);
	//printf("=====Output (Device) stream format\n");	
	//asbd_dev2_out.Print();
	
	//////////////////////////////////////
	//Set the format of all the AUs to the input/output devices channel count
	//For a simple case, you want to set this to the lower of count of the channels
	//in the input device vs output device
	//////////////////////////////////////
	asbd.mChannelsPerFrame =((asbd_dev1_in.mChannelsPerFrame < asbd_dev2_out.mChannelsPerFrame) ?asbd_dev1_in.mChannelsPerFrame :asbd_dev2_out.mChannelsPerFrame) ;
	//printf("Info: Input Device channel count=%ld\t Input Device channel count=%ld\n",asbd_dev1_in.mChannelsPerFrame,asbd_dev2_out.mChannelsPerFrame);	
	//printf("Info: CAPlayThrough will use %ld channels\n",asbd.mChannelsPerFrame);	

	
	// We must get the sample rate of the input device and set it to the stream format of AUHAL
	propertySize = sizeof(Float64);
	AudioDeviceGetProperty(mInputDevice.mID, 0, 1, kAudioDevicePropertyNominalSampleRate, &propertySize, &rate);
	asbd.mSampleRate =rate;
	propertySize = sizeof(asbd);
	
	//Set the new formats to the AUs...
	err = AudioUnitSetProperty(mInputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &asbd, propertySize);
	checkErr(err);	
	err = AudioUnitSetProperty(mVarispeedUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &asbd, propertySize);
	checkErr(err);
	
	//Set the correct sample rate for the output device, but keep the channel count the same
	propertySize = sizeof(Float64);
	AudioDeviceGetProperty(mOutputDevice.mID, 0, 0, kAudioDevicePropertyNominalSampleRate, &propertySize, &rate);
	asbd.mSampleRate =rate;
	propertySize = sizeof(asbd);
	//Set the new audio stream formats for the rest of the AUs...
	err = AudioUnitSetProperty(mVarispeedUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &asbd, propertySize);
	checkErr(err);	
	
	//set the audio stream to the mixer to be the same on both sides
	err = AudioUnitSetProperty(mMixerUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &asbd, propertySize);
	checkErr(err);	
	err = AudioUnitSetProperty(mMixerUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &asbd, propertySize);
	checkErr(err);	
	
	err = AudioUnitSetProperty(mOutputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &asbd, propertySize);
	checkErr(err);

	// turn metering ON for the mixer
	UInt32 data = 1;
	err = AudioUnitSetProperty(mMixerUnit, kAudioUnitProperty_MeteringMode, kAudioUnitScope_Global, 0, &data, propertySize);
	checkErr(err);

	//calculate number of buffers from channels
	propsize = offsetof(AudioBufferList, mBuffers[0]) + (sizeof(AudioBuffer) *asbd.mChannelsPerFrame);

	//malloc buffer lists
	mInputBuffer = (AudioBufferList *)malloc(propsize);
	mInputBuffer->mNumberBuffers = asbd.mChannelsPerFrame;
	
	//pre-malloc buffers for AudioBufferLists
	for(UInt32 i =0; i< mInputBuffer->mNumberBuffers ; i++) {
		mInputBuffer->mBuffers[i].mNumberChannels = 1;
		mInputBuffer->mBuffers[i].mDataByteSize = bufferSizeBytes;
		mInputBuffer->mBuffers[i].mData = malloc(bufferSizeBytes);
	}
	
	//Alloc ring buffer that will hold data between the two audio devices
	mBuffer = new CARingBuffer();	
	mBuffer->Allocate(asbd.mChannelsPerFrame, asbd.mBytesPerFrame, bufferSizeFrames * 20);
	
    return err;
}
//Allocate Audio Buffer List(s) to hold the data from input.
OSStatus CAPlayThrough::SetupBuffers()
{
	OSStatus err = noErr;
	UInt32 bufferSizeFrames,bufferSizeBytes,propsize;
	
	CAStreamBasicDescription asbd,asbd_dev1_in,asbd_dev2_out;			
	Float64 rate=0;
	
	//Get the size of the IO buffer(s)
	UInt32 propertySize = sizeof(bufferSizeFrames);
	err = AudioUnitGetProperty(mInputUnit, kAudioDevicePropertyBufferFrameSize, kAudioUnitScope_Global, 0, &bufferSizeFrames, &propertySize);
	bufferSizeBytes = bufferSizeFrames * sizeof(Float32);
    CAPT_DEBUG( "Input device buffer size is %ld frames.\n", bufferSizeFrames );
    
    UInt32 outBufferSizeFrames;
	propertySize = sizeof(outBufferSizeFrames);
	err = AudioUnitGetProperty(mOutputUnit, kAudioDevicePropertyBufferFrameSize, kAudioUnitScope_Global, 0, &outBufferSizeFrames, &propertySize);
    CAPT_DEBUG( "Output device buffer size is %ld frames.\n", outBufferSizeFrames );    
    
    outBufferSizeFrames = bufferSizeFrames;
	propertySize = sizeof(outBufferSizeFrames);
	err = AudioUnitSetProperty(mVarispeedUnit, kAudioDevicePropertyBufferFrameSize, kAudioUnitScope_Global, 0, &outBufferSizeFrames, propertySize);
    CAPT_DEBUG( "Varispeed device buffer size is %ld frames.\n", outBufferSizeFrames );    
		
	//Get the Stream Format (Output client side)
	propertySize = sizeof(asbd_dev1_in);
	err = AudioUnitGetProperty(mInputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 1, &asbd_dev1_in, &propertySize);
//	CAPT_DEBUG("=====Input DEVICE stream format\n" );	
//	asbd_dev1_in.Print();
	
	//Get the Stream Format (client side)
	propertySize = sizeof(asbd);
	err = AudioUnitGetProperty(mInputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &asbd, &propertySize);		
//	CAPT_DEBUG("=====current Input (Client) stream format\n");	
//	asbd.Print();	

	//Get the Stream Format (Output client side)
	propertySize = sizeof(asbd_dev2_out);
	err = AudioUnitGetProperty(mOutputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &asbd_dev2_out, &propertySize);
//	CAPT_DEBUG("=====Output (Device) stream format\n");	
//	asbd_dev2_out.Print();
	
	//////////////////////////////////////
	//Set the format of all the AUs to the input/output devices channel count
	//For a simple case, you want to set this to the lower of count of the channels
	//in the input device vs output device
	//////////////////////////////////////
	asbd.mChannelsPerFrame =((asbd_dev1_in.mChannelsPerFrame < asbd_dev2_out.mChannelsPerFrame) ?asbd_dev1_in.mChannelsPerFrame :asbd_dev2_out.mChannelsPerFrame) ;
	CAPT_DEBUG("Info: Input Device channel count=%ld\t Input Device channel count=%ld\n",asbd_dev1_in.mChannelsPerFrame,asbd_dev2_out.mChannelsPerFrame);	
	CAPT_DEBUG("Info: CAPlayThrough will use %ld channels\n",asbd.mChannelsPerFrame);	

	
	// We must get the sample rate of the input device and set it to the stream format of AUHAL
	propertySize = sizeof(Float64);
	AudioDeviceGetProperty(mInputDevice.mID, 0, 1, kAudioDevicePropertyNominalSampleRate, &propertySize, &rate);
	asbd.mSampleRate =rate;
	propertySize = sizeof(asbd);
	
	//Set the new formats to the AUs...
	err = AudioUnitSetProperty(mInputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 1, &asbd, propertySize);
	checkErr(err);	
	err = AudioUnitSetProperty(mVarispeedUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &asbd, propertySize);
	checkErr(err);
	
	//Set the correct sample rate for the output device, but keep the channel count the same
	propertySize = sizeof(Float64);
	AudioDeviceGetProperty(mOutputDevice.mID, 0, 0, kAudioDevicePropertyNominalSampleRate, &propertySize, &rate);
	asbd.mSampleRate =rate;
	propertySize = sizeof(asbd);
	//Set the new audio stream formats for the rest of the AUs...
	err = AudioUnitSetProperty(mVarispeedUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Output, 0, &asbd, propertySize);
	checkErr(err);	
	err = AudioUnitSetProperty(mOutputUnit, kAudioUnitProperty_StreamFormat, kAudioUnitScope_Input, 0, &asbd, propertySize);
	checkErr(err);

	//calculate number of buffers from channels
	propsize = offsetof(AudioBufferList, mBuffers[0]) + (sizeof(AudioBuffer) *asbd.mChannelsPerFrame);

	//malloc buffer lists
	mInputBuffer = (AudioBufferList *)malloc(propsize);
	mInputBuffer->mNumberBuffers = asbd.mChannelsPerFrame;
	
	//pre-malloc buffers for AudioBufferLists
	for(UInt32 i =0; i< mInputBuffer->mNumberBuffers ; i++) {
		mInputBuffer->mBuffers[i].mNumberChannels = 1;
		mInputBuffer->mBuffers[i].mDataByteSize = bufferSizeBytes;
		mInputBuffer->mBuffers[i].mData = malloc(bufferSizeBytes);
	}
	
	//Alloc ring buffer that will hold data between the two audio devices
	mBuffer = new CARingBuffer();	
	mBuffer->Allocate(asbd.mChannelsPerFrame, asbd.mBytesPerFrame, bufferSizeFrames * 20 );

// Some test code to run the ring through its paces...
//
//    CARingBuffer::SampleTime readTime = 0;
//    CARingBuffer::SampleTime writeTime = 0;
//    for ( int i = 0; i < 32768; i++ ) {
//        printf( "run buf %d\n", i );
//        UInt32 writeLength = 512;
//        UInt32 readLength = 256;//(i%2==0) ? 440 : 441;
//        OSStatus err = mBuffer->Store( mInputBuffer, writeLength, writeTime );
//        if( err != kCARingBufferError_OK )
//        {
//            printf( "HIT ERR IN Store()\n" );
//        }
//        writeTime += writeLength;
//        
//        err = mBuffer->Fetch( mInputBuffer, readLength, readTime );
//        if(err != kCARingBufferError_OK)
//        {
//            printf( "HIT ERR IN Fetch()\n" );
//            MakeBufferSilent (mInputBuffer);
//            SInt64 bufferStartTime, bufferEndTime;
//            mBuffer->GetTimeBounds(bufferStartTime, bufferEndTime);
//        }        
//        readTime += readLength;
//    }    
	
    return err;
}