void ConvolverNode::SetBuffer(JSContext* aCx, AudioBuffer* aBuffer, ErrorResult& aRv) { if (aBuffer) { switch (aBuffer->NumberOfChannels()) { case 1: case 2: case 4: // Supported number of channels break; default: aRv.Throw(NS_ERROR_DOM_SYNTAX_ERR); return; } } mBuffer = aBuffer; // Send the buffer to the stream AudioNodeStream* ns = static_cast<AudioNodeStream*>(mStream.get()); MOZ_ASSERT(ns, "Why don't we have a stream here?"); if (mBuffer) { uint32_t length = mBuffer->Length(); nsRefPtr<ThreadSharedFloatArrayBufferList> data = mBuffer->GetThreadSharedChannelsForRate(aCx); if (data && length < WEBAUDIO_BLOCK_SIZE) { // For very small impulse response buffers, we need to pad the // buffer with 0 to make sure that the Reverb implementation // has enough data to compute FFTs from. length = WEBAUDIO_BLOCK_SIZE; nsRefPtr<ThreadSharedFloatArrayBufferList> paddedBuffer = new ThreadSharedFloatArrayBufferList(data->GetChannels()); float* channelData = (float*) malloc(sizeof(float) * length * data->GetChannels()); for (uint32_t i = 0; i < data->GetChannels(); ++i) { PodCopy(channelData + length * i, data->GetData(i), mBuffer->Length()); PodZero(channelData + length * i + mBuffer->Length(), WEBAUDIO_BLOCK_SIZE - mBuffer->Length()); paddedBuffer->SetData(i, (i == 0) ? channelData : nullptr, free, channelData); } data = paddedBuffer; } SendInt32ParameterToStream(ConvolverNodeEngine::BUFFER_LENGTH, length); SendDoubleParameterToStream(ConvolverNodeEngine::SAMPLE_RATE, mBuffer->SampleRate()); ns->SetBuffer(data.forget()); } else { ns->SetBuffer(nullptr); } }
void OscillatorNode::SendPeriodicWaveToStream() { NS_ASSERTION(mType == OscillatorType::Custom, "Sending custom waveform to engine thread with non-custom type"); AudioNodeStream* ns = static_cast<AudioNodeStream*>(mStream.get()); MOZ_ASSERT(ns, "Missing node stream."); MOZ_ASSERT(mPeriodicWave, "Send called without PeriodicWave object."); SendInt32ParameterToStream(OscillatorNodeEngine::PERIODICWAVE, mPeriodicWave->DataLength()); nsRefPtr<ThreadSharedFloatArrayBufferList> data = mPeriodicWave->GetThreadSharedBuffer(); ns->SetBuffer(data.forget()); }
void AudioBufferSourceNode::SendBufferParameterToStream(JSContext* aCx) { AudioNodeStream* ns = mStream; if (!ns) { return; } if (mBuffer) { RefPtr<ThreadSharedFloatArrayBufferList> data = mBuffer->GetThreadSharedChannelsForRate(aCx); ns->SetBuffer(data.forget()); if (mStartCalled) { SendOffsetAndDurationParametersToStream(ns); } } else { ns->SetInt32Parameter(BUFFEREND, 0); ns->SetBuffer(nullptr); MarkInactive(); } }