// Sets up |output| iff buffers are set in event handlers. void DispatchAudioProcessEvent(ScriptProcessorNode* aNode, AudioChunk* aOutput) { AudioContext* context = aNode->Context(); if (!context) { return; } AutoJSAPI jsapi; if (NS_WARN_IF(!jsapi.Init(aNode->GetOwner()))) { return; } JSContext* cx = jsapi.cx(); uint32_t inputChannelCount = aNode->ChannelCount(); // Create the input buffer RefPtr<AudioBuffer> inputBuffer; if (mInputBuffer) { ErrorResult rv; inputBuffer = AudioBuffer::Create(context->GetOwner(), inputChannelCount, aNode->BufferSize(), context->SampleRate(), mInputBuffer.forget(), rv); if (rv.Failed()) { rv.SuppressException(); return; } } // Ask content to produce data in the output buffer // Note that we always avoid creating the output buffer here, and we try to // avoid creating the input buffer as well. The AudioProcessingEvent class // knows how to lazily create them if needed once the script tries to access // them. Otherwise, we may be able to get away without creating them! RefPtr<AudioProcessingEvent> event = new AudioProcessingEvent(aNode, nullptr, nullptr); event->InitEvent(inputBuffer, inputChannelCount, mPlaybackTime); aNode->DispatchTrustedEvent(event); // Steal the output buffers if they have been set. // Don't create a buffer if it hasn't been used to return output; // FinishProducingOutputBuffer() will optimize output = null. // GetThreadSharedChannelsForRate() may also return null after OOM. if (event->HasOutputBuffer()) { ErrorResult rv; AudioBuffer* buffer = event->GetOutputBuffer(rv); // HasOutputBuffer() returning true means that GetOutputBuffer() // will not fail. MOZ_ASSERT(!rv.Failed()); *aOutput = buffer->GetThreadSharedChannelsForRate(cx); MOZ_ASSERT(aOutput->IsNull() || aOutput->mBufferFormat == AUDIO_FORMAT_FLOAT32, "AudioBuffers initialized from JS have float data"); } }
/* static */ already_AddRefed<AudioNodeExternalInputStream> AudioNodeExternalInputStream::Create(MediaStreamGraph* aGraph, AudioNodeEngine* aEngine) { AudioContext* ctx = aEngine->NodeMainThread()->Context(); MOZ_ASSERT(NS_IsMainThread()); MOZ_ASSERT(aGraph->GraphRate() == ctx->SampleRate()); RefPtr<AudioNodeExternalInputStream> stream = new AudioNodeExternalInputStream(aEngine, aGraph->GraphRate()); stream->mSuspendedCount += ctx->ShouldSuspendNewStream(); aGraph->AddStream(stream); return stream.forget(); }
/* static */ already_AddRefed<AudioBuffer> AudioBuffer::Constructor(const GlobalObject& aGlobal, AudioContext& aAudioContext, const AudioBufferOptions& aOptions, ErrorResult& aRv) { if (!aOptions.mNumberOfChannels) { aRv.Throw(NS_ERROR_DOM_INDEX_SIZE_ERR); return nullptr; } float sampleRate = aOptions.mSampleRate.WasPassed() ? aOptions.mSampleRate.Value() : aAudioContext.SampleRate(); return Create(&aAudioContext, aOptions.mNumberOfChannels, aOptions.mLength, sampleRate, aRv); }