void CCoreAudioUnit::Close() { if (!m_Initialized && !m_audioUnit) return; if (m_renderProc) SetInputSource(NULL); Stop(); if (m_busNumber != INVALID_BUS) { OSStatus ret = AUGraphDisconnectNodeInput(m_audioGraph, m_audioNode, m_busNumber); if (ret) { CLog::Log(LOGERROR, "CCoreAudioUnit::Close: Unable to disconnect AudioUnit. Error = %s", GetError(ret).c_str()); } ret = AUGraphRemoveNode(m_audioGraph, m_audioNode); if (ret) { CLog::Log(LOGERROR, "CCoreAudioUnit::Close: Unable to disconnect AudioUnit. Error = %s", GetError(ret).c_str()); } } AUGraphUpdate(m_audioGraph, NULL); m_Initialized = false; m_audioUnit = NULL; m_audioNode = NULL; m_pSource = NULL; }
AudioUnitNode::~AudioUnitNode() { AUGraphRemoveNode(getAUGraph(), getAUNode()); }
bool FCoreAudioSoundSource::DetachFromAUGraph() { AURenderCallbackStruct Input; Input.inputProc = NULL; Input.inputProcRefCon = NULL; SAFE_CA_CALL( AudioUnitSetProperty( SourceUnit, kAudioUnitProperty_SetRenderCallback, kAudioUnitScope_Input, 0, &Input, sizeof( Input ) ) ); if( StreamSplitterNode ) { SAFE_CA_CALL( AUGraphDisconnectNodeInput( AudioDevice->GetAudioUnitGraph(), StreamSplitterNode, 0 ) ); } if( ReverbNode ) { SAFE_CA_CALL( AUGraphDisconnectNodeInput( AudioDevice->GetAudioUnitGraph(), ReverbNode, 0 ) ); } if( RadioNode ) { SAFE_CA_CALL( AUGraphDisconnectNodeInput( AudioDevice->GetAudioUnitGraph(), RadioNode, 0 ) ); } if( EQNode ) { SAFE_CA_CALL( AUGraphDisconnectNodeInput( AudioDevice->GetAudioUnitGraph(), EQNode, 0 ) ); } if( StreamMergerNode ) { SAFE_CA_CALL( AUGraphDisconnectNodeInput( AudioDevice->GetAudioUnitGraph(), StreamMergerNode, 0 ) ); } if( AudioChannel ) { if( Buffer->NumChannels < 3 ) { SAFE_CA_CALL( AUGraphDisconnectNodeInput( AudioDevice->GetAudioUnitGraph(), AudioDevice->GetMixer3DNode(), MixerInputNumber ) ); AudioDevice->SetFreeMixer3DInput( MixerInputNumber ); } else { SAFE_CA_CALL( AUGraphDisconnectNodeInput( AudioDevice->GetAudioUnitGraph(), AudioDevice->GetMatrixMixerNode(), MixerInputNumber ) ); AudioDevice->SetFreeMatrixMixerInput( MixerInputNumber ); } } if( StreamMergerNode ) { SAFE_CA_CALL( AUGraphRemoveNode( AudioDevice->GetAudioUnitGraph(), StreamMergerNode ) ); } if( EQNode ) { SAFE_CA_CALL( AUGraphRemoveNode( AudioDevice->GetAudioUnitGraph(), EQNode ) ); } if( RadioNode ) { SAFE_CA_CALL( AUGraphRemoveNode( AudioDevice->GetAudioUnitGraph(), RadioNode ) ); } if( ReverbNode ) { SAFE_CA_CALL( AUGraphRemoveNode( AudioDevice->GetAudioUnitGraph(), ReverbNode ) ); } if( StreamSplitterNode ) { SAFE_CA_CALL( AUGraphRemoveNode( AudioDevice->GetAudioUnitGraph(), StreamSplitterNode ) ); } if( AudioChannel ) { SAFE_CA_CALL( AUGraphRemoveNode( AudioDevice->GetAudioUnitGraph(), SourceNode ) ); } SAFE_CA_CALL( AUGraphUpdate( AudioDevice->GetAudioUnitGraph(), NULL ) ); AudioConverterDispose( CoreAudioConverter ); CoreAudioConverter = NULL; StreamMergerNode = 0; StreamMergerUnit = NULL; EQNode = 0; EQUnit = NULL; RadioNode = 0; RadioUnit = NULL; ReverbNode = 0; ReverbUnit = NULL; StreamSplitterNode = 0; StreamSplitterUnit = NULL; SourceNode = 0; SourceUnit = NULL; MixerInputNumber = -1; GAudioChannels[AudioChannel] = NULL; AudioChannel = 0; return true; }
OSStatus SetUpGraph (AUGraph &inGraph, UInt32 numFrames, Float64 &sampleRate, bool isOffline) { OSStatus result = noErr; AudioUnit outputUnit = 0; AUNode outputNode; // the frame size is the I/O size to the device // the device is going to run at a sample rate it is set at // so, when we set this, we also have to set the max frames for the graph nodes UInt32 nodeCount; FailIf ((result = AUGraphGetNodeCount (inGraph, &nodeCount)), home, "AUGraphGetNodeCount"); for (int i = 0; i < (int)nodeCount; ++i) { AUNode node; FailIf ((result = AUGraphGetIndNode(inGraph, i, &node)), home, "AUGraphGetIndNode"); AudioComponentDescription desc; AudioUnit unit; FailIf ((result = AUGraphNodeInfo(inGraph, node, &desc, &unit)), home, "AUGraphNodeInfo"); if (desc.componentType == kAudioUnitType_Output) { if (outputUnit == 0) { outputUnit = unit; FailIf ((result = AUGraphNodeInfo(inGraph, node, 0, &outputUnit)), home, "AUGraphNodeInfo"); if (!isOffline) { // these two properties are only applicable if its a device we're playing too FailIf ((result = AudioUnitSetProperty (outputUnit, kAudioDevicePropertyBufferFrameSize, kAudioUnitScope_Output, 0, &numFrames, sizeof(numFrames))), home, "AudioUnitSetProperty: kAudioDevicePropertyBufferFrameSize"); FailIf ((result = AudioUnitAddPropertyListener (outputUnit, kAudioDeviceProcessorOverload, OverlaodListenerProc, 0)), home, "AudioUnitAddPropertyListener: kAudioDeviceProcessorOverload"); // if we're rendering to the device, then we render at its sample rate UInt32 theSize; theSize = sizeof(sampleRate); FailIf ((result = AudioUnitGetProperty (outputUnit, kAudioUnitProperty_SampleRate, kAudioUnitScope_Output, 0, &sampleRate, &theSize)), home, "AudioUnitGetProperty: kAudioUnitProperty_SampleRate"); } else { // remove device output node and add generic output FailIf ((result = AUGraphRemoveNode (inGraph, node)), home, "AUGraphRemoveNode"); desc.componentSubType = kAudioUnitSubType_GenericOutput; FailIf ((result = AUGraphAddNode (inGraph, &desc, &node)), home, "AUGraphAddNode"); FailIf ((result = AUGraphNodeInfo(inGraph, node, NULL, &unit)), home, "AUGraphNodeInfo"); outputUnit = unit; outputNode = node; // we render the output offline at the desired sample rate FailIf ((result = AudioUnitSetProperty (outputUnit, kAudioUnitProperty_SampleRate, kAudioUnitScope_Output, 0, &sampleRate, sizeof(sampleRate))), home, "AudioUnitSetProperty: kAudioUnitProperty_SampleRate"); } // ok, lets start the loop again now and do it all... i = -1; } } else { // we only have to do this on the output side // as the graph's connection mgmt will propogate this down. if (outputUnit) { // reconnect up to the output unit if we're offline if (isOffline && desc.componentType != kAudioUnitType_MusicDevice) { FailIf ((result = AUGraphConnectNodeInput (inGraph, node, 0, outputNode, 0)), home, "AUGraphConnectNodeInput"); } FailIf ((result = AudioUnitSetProperty (unit, kAudioUnitProperty_SampleRate, kAudioUnitScope_Output, 0, &sampleRate, sizeof(sampleRate))), home, "AudioUnitSetProperty: kAudioUnitProperty_SampleRate"); } } FailIf ((result = AudioUnitSetProperty (unit, kAudioUnitProperty_MaximumFramesPerSlice, kAudioUnitScope_Global, 0, &numFrames, sizeof(numFrames))), home, "AudioUnitSetProperty: kAudioUnitProperty_MaximumFramesPerSlice"); } home: return result; }
virtual ~AudioSampleProcessingNode() { AUGraphRemoveNode(getAUGraph(), getAUNode()); };
JNIEXPORT jint JNICALL Java_com_apple_audio_toolbox_AUGraph_AUGraphRemoveNode (JNIEnv *, jclass, jint inGraph, jint inNode) { return (jint)AUGraphRemoveNode((AUGraph)inGraph, (AUNode)inNode); }