Exemple #1
0
//--------------------------------------------------------------------------------
OSStatus	BS2B::Initialize()
{
	OSStatus status = AUEffectBase::Initialize();
    
	if (status == noErr)
	{
        // What does this do?
		const AudioUnitElement elem = 0;
		Reset(kAudioUnitScope_Global, elem);
        
		if ( GetStreamFormat(kAudioUnitScope_Input, elem).mChannelsPerFrame != GetStreamFormat(kAudioUnitScope_Output, elem).mChannelsPerFrame )
		{
			if ( ProcessesInPlace() )
			{
				SetProcessesInPlace(false);
				PropertyChanged(kAudioUnitProperty_InPlaceProcessing, kAudioUnitScope_Global, elem);
			}
		}
	}
    
	return status;
}
//--------------------------------------------------------------------------------
OSStatus	Stereo::Initialize()
{
	OSStatus status = AUEffectBase::Initialize();

	if (status == noErr)
	{
		bufsize = (long) (kBufferSize_Seconds * GetSampleRate());
		buffer = (float*) malloc(bufsize * sizeof(float));

		const AudioUnitElement elem = 0;
		Reset(kAudioUnitScope_Global, elem);

		if ( GetStreamFormat(kAudioUnitScope_Input, elem).mChannelsPerFrame != GetStreamFormat(kAudioUnitScope_Output, elem).mChannelsPerFrame )
		{
			if ( ProcessesInPlace() )
			{
				SetProcessesInPlace(false);
				PropertyChanged(kAudioUnitProperty_InPlaceProcessing, kAudioUnitScope_Global, elem);
			}
		}
	}

	return status;
}
Exemple #3
0
OSStatus 	AUEffectBase::Render(	AudioUnitRenderActionFlags &ioActionFlags,
											const AudioTimeStamp &		inTimeStamp,
											UInt32						nFrames)
{
	if (!HasInput(0))
		return kAudioUnitErr_NoConnection;

	OSStatus result = noErr;

	result = mMainInput->PullInput(ioActionFlags, inTimeStamp, 0 /* element */, nFrames);

	if (result == noErr)
	{
		if(ProcessesInPlace() && mMainOutput->WillAllocateBuffer())
		{
			mMainOutput->SetBufferList(mMainInput->GetBufferList() );
		}

		if (ShouldBypassEffect())
		{
			// leave silence bit alone

			if(!ProcessesInPlace() )
			{
				mMainInput->CopyBufferContentsTo (mMainOutput->GetBufferList());
			}
		}
		else
		{
			if(mParamList.size() == 0 )
			{
				// this will read/write silence bit
				result = ProcessBufferLists(ioActionFlags, mMainInput->GetBufferList(), mMainOutput->GetBufferList(), nFrames);
			}
			else
			{
				// deal with scheduled parameters...

				AudioBufferList &inputBufferList = mMainInput->GetBufferList();
				AudioBufferList &outputBufferList = mMainOutput->GetBufferList();

				ScheduledProcessParams processParams;
				processParams.actionFlags = &ioActionFlags;
				processParams.inputBufferList = &inputBufferList;
				processParams.outputBufferList = &outputBufferList;

				// divide up the buffer into slices according to scheduled params then
				// do the DSP for each slice (ProcessScheduledSlice() called for each slice)
				result = ProcessForScheduledParams(	mParamList,
													nFrames,
													&processParams );


				// fixup the buffer pointers to how they were before we started
				UInt32 channelSize = nFrames * mBytesPerFrame;
				for(unsigned int i = 0; i < inputBufferList.mNumberBuffers; i++ ) {
					UInt32 size = inputBufferList.mBuffers[i].mNumberChannels * channelSize;
					inputBufferList.mBuffers[i].mData = (char *)inputBufferList.mBuffers[i].mData - size;
					inputBufferList.mBuffers[i].mDataByteSize = size;
				}

				for(unsigned int i = 0; i < outputBufferList.mNumberBuffers; i++ ) {
					UInt32 size = outputBufferList.mBuffers[i].mNumberChannels * channelSize;
					outputBufferList.mBuffers[i].mData = (char *)outputBufferList.mBuffers[i].mData - size;
					outputBufferList.mBuffers[i].mDataByteSize = size;
				}
			}
		}

		if ( (ioActionFlags & kAudioUnitRenderAction_OutputIsSilence) && !ProcessesInPlace() )
		{
			AUBufferList::ZeroBuffer(mMainOutput->GetBufferList() );
		}
	}

	return result;
}
OSStatus Talkbox::Render(AudioUnitRenderActionFlags & ioActionFlags, const AudioTimeStamp & inTimeStamp, UInt32 inFramesToProcess)
{
	if (! HasInput(0) )
{
//fprintf(stderr, "no input bus 0 connection\n");
		return kAudioUnitErr_NoConnection;
}

	OSStatus status = noErr;
	AUOutputElement * theOutput = GetOutput(0);	// throws if error

	AUInputElement * theInput = GetInput(0);
	status = theInput->PullInput(ioActionFlags, inTimeStamp, 0 /* element */, inFramesToProcess);
	if (status != noErr)
		return status;
	
	if (status == noErr)
	{
		if ( ProcessesInPlace() )
		{
			theOutput->SetBufferList( theInput->GetBufferList() );
		}

		AUInputElement * theInput2 = NULL;
		try
		{
			if ( HasInput(1) )
				theInput2 = GetInput(1);
		}
		catch (...) { theInput2 = NULL; }
		if (theInput2 != NULL)
		{
			bool mainInputSilentFlag = (ioActionFlags & kAudioUnitRenderAction_OutputIsSilence) ? true : false;
			status = theInput2->PullInput(ioActionFlags, inTimeStamp, 1 /* element */, inFramesToProcess);
//if (result != noErr) fprintf(stderr, "PullInput(bus 1) error %ld\n", result);
			if ( !mainInputSilentFlag && (ioActionFlags & kAudioUnitRenderAction_OutputIsSilence) )
				ioActionFlags &= ~kAudioUnitRenderAction_OutputIsSilence;
		}
		else
{
			status = kAudioUnitErr_NoConnection;
//fprintf(stderr, "could not access input bus 1 connection\n");
}

		if ( ShouldBypassEffect() || (status != noErr) )
		{
			status = noErr;
			// leave silence bit alone
			if (! ProcessesInPlace() )
			{
				theInput->CopyBufferContentsTo( theOutput->GetBufferList() );
			}
		}
		else
		{
			for (UInt32 i=0; i < numAllocatedChannels; i++)
			{
				dspKernels[i]->Process(theInput->GetChannelData(i), 
										theInput2->GetChannelData(i), 
										theOutput->GetChannelData(i), 
										inFramesToProcess);
			}
		}
	}

	return status;
}