Beispiel #1
0
OSStatus	CAAUProcessor::PostProcess (AudioBufferList 	*ioData, 
										UInt32 				&ioNumFrames, 
										bool				&outIsSilence,
										bool 				&outDone)
{
	if (IsOfflineAU() || !IsOfflineContext()) 
		return kAudioUnitErr_CannotDoInCurrentContext;
	
	outDone = false;
	
		// we've got less samples to process than we've been asked to process
	if (mTailSamplesRemaining <= SInt32(ioNumFrames)) {
		outDone = true;
		ioNumFrames = mTailSamplesRemaining > 0 ? mTailSamplesRemaining : 0;
		SetBufferListToNumFrames (*ioData, ioNumFrames);
		if (ioNumFrames == 0)
			return noErr;
	}
	
	AudioUnitRenderActionFlags renderFlags = 0;
	OSStatus result;
	ca_require_noerr (result = mUnit.Render (&renderFlags, &mRenderTimeStamp, 0, ioNumFrames, ioData), home);
	mRenderTimeStamp.mSampleTime += ioNumFrames;
	mTailSamplesRemaining -= ioNumFrames;
	outIsSilence = (renderFlags & kAudioUnitRenderAction_OutputIsSilence);
			
	if (outDone) {
		ca_require_noerr (result = SetInputCallback (mUnit, mUserCallback), home);
		mUnit.GlobalReset (); //flush this out, as we're done with this phase
	}
home:
	return result;
}		
void	PrintMatrixMixerVolumes (FILE* file, AudioUnit au)
{
	UInt32 dims[2];
	UInt32 theSize =  sizeof(UInt32) * 2;
	Float32 *theVols = NULL;
	OSStatus result;
	
// this call will fail if the unit is NOT initialized as it would present an incomplete state	
	ca_require_noerr (result = AudioUnitGetProperty (au, kAudioUnitProperty_MatrixDimensions,	
							kAudioUnitScope_Global, 0, dims, &theSize), home);

	theSize = ((dims[0] + 1) * (dims[1] + 1)) * sizeof(Float32);
	
	theVols	= static_cast<Float32*> (malloc (theSize));
	
	ca_require_noerr (result = AudioUnitGetProperty (au, kAudioUnitProperty_MatrixLevels,	
							kAudioUnitScope_Global, 0, theVols, &theSize), home);

home:
	if (result) {
		if (theVols)
			free(theVols);
		return;
	}
	
	theSize /= sizeof(Float32);
	
	unsigned int inputs = dims[0];
	unsigned int outputs = dims[1];

	fprintf (file, "\tInput Channels = %d, Output Channels = %d\n", (int)dims[0], (int)dims[1]);
	PrintBuses (file, "Input", au, kAudioUnitScope_Input);
	PrintBuses (file, "Output", au, kAudioUnitScope_Output);
	fprintf (file, "\tGlobal Volume: %.3f\n", theVols [theSize - 1]);
	for (unsigned int i = 0; i < (inputs + 1); ++i) {
		if (i < inputs) {
			fprintf (file, "\t%.3f   ", theVols[(i + 1) * (outputs + 1) - 1]);
			
			for (unsigned int j = 0; j < outputs; ++j)
				fprintf (file, "(%.3f) ", theVols[(i * (outputs  + 1)) + j]);
		} else {
			fprintf (file, "\t        ");
			for (unsigned int j = 0; j < outputs; ++j)
				fprintf (file, " %.3f  ", theVols[(i * (outputs + 1)) + j]);
		}
		fprintf (file, "\n");
	}

#if 0
	for (unsigned int i = 0; i < theSize; ++i)
		printf ("%f, ", theVols[i]);
#endif
	free(theVols);
}
Beispiel #3
0
OSStatus ComponentBase_GetComponentDescription (const AudioComponentInstance & inInstance, AudioComponentDescription & outDesc)
{
		// we prefer to use the new API. If it is not available however, we have to go back to using the ComponentMgr one. 
	typedef AudioComponent (*AudioComponentInstanceGetComponentProc) (AudioComponentInstance);
	static AudioComponentInstanceGetComponentProc aciGCProc = NULL;
	
	typedef OSStatus (*AudioComponentGetDescriptionProc)(AudioComponent, AudioComponentDescription *);
	static AudioComponentGetDescriptionProc acGDProc = NULL;
	
	typedef OSErr (*GetComponentInfoProc)(Component, ComponentDescription *, void*, void*, void*);
	static GetComponentInfoProc gciProc = NULL;

	static int doneInit = 0;
	if (doneInit == 0) {
		doneInit = 1;
		bool loadCMgr = true;
		
		void* theImage = dlopen("/System/Library/Frameworks/AudioUnit.framework/AudioUnit", RTLD_LAZY);
		if (theImage != NULL)
		{
			//	we assume that all routine names passed here have a leading underscore which gets shaved
			//	off when passed to dlsym
			aciGCProc = (AudioComponentInstanceGetComponentProc)dlsym (theImage, "AudioComponentInstanceGetComponent");
			if (aciGCProc) {
				acGDProc = (AudioComponentGetDescriptionProc)dlsym (theImage, "AudioComponentGetDescription");
				if (acGDProc)
					loadCMgr = false;
			}
		}
		if (loadCMgr) {
			theImage = dlopen("/System/Library/Frameworks/CoreServices.framework/CoreServices", RTLD_LAZY);
			if (theImage != NULL)
			{	
				gciProc = (GetComponentInfoProc)dlsym (theImage, "GetComponentInfo");
			}
		}
	}
	
	OSStatus result = noErr;
	if (acGDProc && aciGCProc) {
		AudioComponent comp = (*aciGCProc)(inInstance);
		XAssert (comp);
		if (comp) {
			ca_require_noerr(result = (*acGDProc)(comp, &outDesc), home);
		} else
			ca_require_noerr(result = -1, home);
	
	} else if (gciProc) {
		// in this case we know that inInstance is directly castable to a Component
		ca_require_noerr(result = (*gciProc)((Component)inInstance, (ComponentDescription*)&outDesc, NULL, NULL, NULL), home);
	}
home:
	return result;
}
Beispiel #4
0
OSStatus		CAAUProcessor::Reinitialize (UInt32 inNewMaxFrames)
{
	OSStatus result;
	CAStreamBasicDescription inputDesc, outputDesc;
	
	ca_require_noerr (result = mUnit.GetFormat (kAudioUnitScope_Input, 0, inputDesc), home);
	ca_require_noerr (result = mUnit.GetFormat (kAudioUnitScope_Output, 0, outputDesc), home);
	
	ca_require_noerr (result = DoInitialisation (inputDesc, outputDesc, mNumInputSamples, inNewMaxFrames), home);
	
home:
	return result;
}
Beispiel #5
0
OSStatus 	Preroll (CAAudioUnit & inAU, UInt32 inFrameSize)
{
	CAStreamBasicDescription desc;
	OSStatus result = inAU.GetFormat (kAudioUnitScope_Input, 0, desc);
	bool hasInput = false;
			//we have input	
	if (result == noErr) 
	{
		sRenderCallback.inputProc = PrerollRenderProc;
		sRenderCallback.inputProcRefCon = 0;
		
		result = inAU.SetProperty (kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 
								0, &sRenderCallback, sizeof(sRenderCallback));
		if (result) return result;
		hasInput = true;
	}
	
	AudioUnitRenderActionFlags flags = 0;
	AudioTimeStamp time;
	memset (&time, 0, sizeof(time));
	time.mFlags = kAudioTimeStampSampleTimeValid;

	CAStreamBasicDescription outputFormat;
	ca_require_noerr (result = inAU.GetFormat (kAudioUnitScope_Output, 0, outputFormat), home);
	{
		AUOutputBL list (outputFormat, inFrameSize);
		list.Prepare ();
		
		ca_require_noerr (result = inAU.Render (&flags, &time, 0, inFrameSize, list.ABL()), home);
	}

home:
	if (hasInput) {
            // remove our installed callback
		sRenderCallback.inputProc = 0;
		sRenderCallback.inputProcRefCon = 0;
		
		inAU.SetProperty (kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 
								0, &sRenderCallback, sizeof(sRenderCallback));
	}
	return result;
}
Beispiel #6
0
AudioComponentDescription ComponentBase::GetComponentDescription() const
{
	AudioComponentDescription desc;
	OSStatus result;
	
#if CA_DO_NOT_USE_AUDIO_COMPONENT
	ca_require_noerr (result = ComponentBase_GetComponentDescription (mComponentInstance, desc), home);
#else
	AudioComponent comp = AudioComponentInstanceGetComponent(mComponentInstance);
	XAssert (comp);
	if (comp) {
		ca_require_noerr(result = AudioComponentGetDescription(comp, &desc), home);
	} else
		ca_require_noerr(result = -1, home);
#endif

home:
	if (result)
		memset (&desc, 0, sizeof(AudioComponentDescription));
	return desc;
}
Beispiel #7
0
OSStatus 	CAAUProcessor::OfflineAUPreflight (UInt32 inNumFrames, bool &outIsDone)
{
	if (!IsOfflineAU())
		return -50/*paramErr*/;
	if (mNumInputSamples == 0)
		return -50/*paramErr*/;

	UInt32 preflightRequirements;
	UInt32 size = sizeof(preflightRequirements);
	OSStatus result;
	ca_require_noerr (result = mUnit.GetProperty (kAudioUnitOfflineProperty_PreflightRequirements,
												kAudioUnitScope_Global, 0,
												&preflightRequirements, &size), home);
												
		// 0 indicates none, otherwise optional or required -> we do it for either
	if (preflightRequirements) 
	{
		AudioUnitRenderActionFlags renderFlags = kAudioOfflineUnitRenderAction_Preflight;
		mPreflightABL->Prepare();
		ca_require_noerr (result = mUnit.Render (&renderFlags, &mRenderTimeStamp, 0, inNumFrames, mPreflightABL->ABL()), home);
		mRenderTimeStamp.mSampleTime += inNumFrames;
		
		if (renderFlags & kAudioOfflineUnitRenderAction_Complete) {
			outIsDone = true;
			mRenderTimeStamp.mSampleTime = 0;
			mPreflightDone = true;
			mLastPercentReported = 0;
		}
	}
	else
	{
		outIsDone = true;
		mRenderTimeStamp.mSampleTime = 0;
		mPreflightDone = true;
		mLastPercentReported = 0;
	}
	
home:
	return result;
}
OSStatus PrintBuses (FILE* file, const char* str, AudioUnit au, AudioUnitScope inScope)
{
	OSStatus result;
	UInt32 busCount;
	UInt32 theSize = sizeof(busCount);

	ca_require_noerr (result = AudioUnitGetProperty (au, kAudioUnitProperty_ElementCount,	
							inScope, 0, &busCount, &theSize), home);
		
	fprintf (file, "\t%s Elements:\n\t\t", str);
	for (UInt32 i = 0; i < busCount; ++i) {
		Float32 val;
		ca_require_noerr (result = AudioUnitGetParameter (au, kMatrixMixerParam_Enable, inScope, i, &val), home);
		UInt32 numChans;
		ca_require_noerr (result = NumberChannels (au, inScope, i, numChans), home);
		char frameCharStart = (val != 0 ? '[' : '{');
		char frameCharEnd = (val != 0 ? ']' : '}');
		fprintf (file, "%d:%c%d, %c%c  ", (int)i, frameCharStart, (int)numChans, (val != 0 ? 'T' : 'F'), frameCharEnd);
	}
	fprintf (file, "\n");
home:
	return result;
}
Beispiel #9
0
OSStatus	CAAUProcessor::Preflight (bool inProcessPreceedingTail)
{
		//we're preflighting again, so reset ourselves
	if (mPreflightDone) {
		mPreflightDone = false;
		// the time stamp we use with the AU Render - only sample count is valid
		memset (&mRenderTimeStamp, 0, sizeof(mRenderTimeStamp));
		mRenderTimeStamp.mFlags = kAudioTimeStampSampleTimeValid;
		mUnit.GlobalReset();
	}

	Float64 sampleRate;
	OSStatus result = mUnit.GetSampleRate (kAudioUnitScope_Output, 0, sampleRate);
	CalculateRemainderSamples (sampleRate);

	UInt32 numFrames = MaxFramesPerRender();
	if (numFrames == 0)
		return kAudioUnitErr_InvalidProperty;
	
	if (!IsOfflineAU()) 
	{
		if ((IsOfflineContext() == false && inProcessPreceedingTail) || IsOfflineContext())
		{
			// re-establish the user's input callback
			ca_require_noerr (result = SetInputCallback (mUnit, mUserCallback), home);

			// Consume the number of input samples indicated by the AU's latency or tail
			// based on whether the AU is being used in an offline context or not.
			UInt32 latSamps = IsOfflineContext() ? mLatencySamples : mTailSamples;	
			
			while (latSamps > 0)
			{
				if (latSamps < numFrames)
					numFrames = latSamps;
					
					// process the samples (the unit's input callback will read the samples
					// from the file and convert them to float for processing
				AudioUnitRenderActionFlags renderFlags = 0;
				mPreflightABL->Prepare();
				ca_require_noerr (result = mUnit.Render (&renderFlags, &mRenderTimeStamp, 0, numFrames, mPreflightABL->ABL()), home);
		
				mRenderTimeStamp.mSampleTime += numFrames;
				latSamps -= numFrames;
			}
			if (IsOfflineContext())
				mRenderTimeStamp.mSampleTime = mLatencySamples;
		}
		else
		{
			// processing real-time but not processing preceeding tail, so we should preroll the AU
			ca_require_noerr (result = Preroll(mUnit, numFrames), home);
			
			// re-establish the user's input callback
			ca_require_noerr (result = SetInputCallback (mUnit, mUserCallback), home);
			
			mRenderTimeStamp.mSampleTime = 0;
		}
	}
#if !TARGET_OS_IPHONE
	else
	{
			// re-establish the user's input callback
		ca_require_noerr (result = SetInputCallback (mUnit, mUserCallback), home);
		
		UInt32 preflightRequirements;
		UInt32 size; size = sizeof(preflightRequirements);
		ca_require_noerr (result = mUnit.GetProperty (kAudioUnitOfflineProperty_PreflightRequirements,
												kAudioUnitScope_Global, 0,
												&preflightRequirements, &size), home);
												
			// 0 indicates none, otherwise optional or required -> we do it for either
		if (preflightRequirements) 
		{
			for (;;) {
				// here we need to do the preflight loop - we don't expect any data back, but have to 
				// give the offline unit all of its input data to allow it to prepare its processing
				AudioUnitRenderActionFlags renderFlags = kAudioOfflineUnitRenderAction_Preflight;
				mPreflightABL->Prepare();
				ca_require_noerr (result = mUnit.Render (&renderFlags, &mRenderTimeStamp, 0, numFrames, mPreflightABL->ABL()), home);
				mRenderTimeStamp.mSampleTime += numFrames;
		
				if (renderFlags & kAudioOfflineUnitRenderAction_Complete)
					break;
			}
		}
		// the time stamp we use with the AU Render - only sample count is valid
		mRenderTimeStamp.mSampleTime = 0;
	}
#endif

	if (result == noErr) {
		mPreflightDone = true;
	}
	
home:
	return result;
}
Beispiel #10
0
OSStatus		CAAUProcessor::DoInitialisation (const CAStreamBasicDescription 	&inInputFormat,
												const CAStreamBasicDescription 		&inOutputFormat,
												UInt64								inNumInputSamples,
												UInt32 								inMaxFrames)
{
	OSStatus result;
	
	if (inNumInputSamples == 0 && IsOfflineAU())
		return kAudioUnitErr_InvalidOfflineRender;
		
	mNumInputSamples = inNumInputSamples;
	
		// first check that we can do this number of channels
	if (mUnit.CanDo (inInputFormat.NumberChannels(), inOutputFormat.NumberChannels()) == false)
		ca_require_noerr (result = kAudioUnitErr_FailedInitialization, home);
	
	// just uninitialise the AU as a matter of course
	ca_require_noerr (result = mUnit.Uninitialize(), home);

	ca_require_noerr (result = mUnit.SetFormat (kAudioUnitScope_Input, 0, inInputFormat), home); 
	ca_require_noerr (result = mUnit.SetFormat (kAudioUnitScope_Output, 0, inOutputFormat), home); 
	ca_require_noerr (result = SetMaxFramesPerRender (inMaxFrames), home);
	
#if !TARGET_OS_IPHONE
		// if we're any AU but an offline AU, we should tell it that we've processing offline
	if (!IsOfflineAU()) {
		UInt32 isOffline = (IsOfflineContext() ? 1 : 0);
			// don't care whether this succeeds of fails as many AU's don't care about this
			// but the ones that do its important that they are told their render context
		mUnit.SetProperty (kAudioUnitProperty_OfflineRender, kAudioUnitScope_Global, 0, &isOffline, sizeof(isOffline));
	} else {
			// tell the offline unit how many input samples we wish to process...
		mUnit.SetProperty (kAudioUnitOfflineProperty_InputSize,
												kAudioUnitScope_Global, 0,
												&mNumInputSamples, sizeof(mNumInputSamples));
	}
#endif
	
	ca_require_noerr (result = mUnit.Initialize(), home);

	ca_require_noerr (result = SetInputCallback (mUnit, mUserCallback), home);
	
	// finally reset our time stamp
	// the time stamp we use with the AU Render - only sample count is valid
	memset (&mRenderTimeStamp, 0, sizeof(mRenderTimeStamp));
	mRenderTimeStamp.mFlags = kAudioTimeStampSampleTimeValid;

	// now, if we're NOT an offline AU, preflighting is not required
	// if we are an offline AU, we should preflight.. an offline AU will tell us when its preflighting is done
	mPreflightDone = false;

	if (mPreflightABL) {
		delete mPreflightABL;
		mPreflightABL = NULL;
	}
	
	mPreflightABL = new AUOutputBL (inOutputFormat);

	mLastPercentReported = 0;
	
home:
	return result;
}