void StarSeemTest::star_external_ffem() { QFETCH(QString, data); AudioBuffer c; QVERIFY2(c.read(data), ("Failed reading test wave; " + data).toUtf8().data()); /*=== WORLD による分析 ===*/ int timeLength = GetSamplesForDIO(c.format().sampleRate(), c.length(), msFramePeriod); int fftLength = GetFFTSizeForStar(c.format().sampleRate()); double *f0 = new double[timeLength]; double *t = new double[timeLength]; double **specgram = new double*[timeLength]; specgram[0] = new double[timeLength * (fftLength / 2 + 1)]; for(int i = 1; i < timeLength; i++) { specgram[i] = specgram[0] + i * (fftLength / 2 + 1); } Dio(c.data()[0], c.length(), c.format().sampleRate(), msFramePeriod, t, f0); Star(c.data()[0], c.length(), c.format().sampleRate(), msFramePeriod, f0, timeLength, specgram); /*=== WORLD による分析ここまで ===*/ /*=== StarSeem による分析 === */ Envelope *e = new Envelope; QVERIFY2(DioFfem().estimate(e, c.data()[0], c.length(), c.format().sampleRate(), msFramePeriod), "Failed dio FFEM"); // Envelope が正しいか確認する for(int i = 0; i < e->size(); i++) { QVERIFY2(e->value(i) == f0[i], "Error; invalid f0 envelope"); } Specgram *testset = new Specgram; QVERIFY2(StarSeem(e).estimate(testset, c.data()[0], c.length(), fftLength, c.format().sampleRate(), msFramePeriod), "Failed STAR SEEM"); /*=== StarSeem による分析ここまで === */ // 比較開始 for(int t = 0; t < timeLength; t++) { for(int f = 0; f <= fftLength / 2; f++) { if(testset->value(t, f) != specgram[t][f]) { QString s; s = "t = " + QString::number(t) + ", f = " + QString::number(f); s += " actual :" + QString::number(testset->value(t, f)) + " , expceted :" + QString::number(specgram[t][f]); // WORLD 自体は同じだから値が全く同じでないとおかしい。 QFAIL(("Error ;" + s).toUtf8().data()); } } } delete[] specgram[0]; delete[] specgram; delete[] t; delete[] f0; }
void JavaScriptAudioNode::process(size_t framesToProcess) { // Discussion about inputs and outputs: // As in other AudioNodes, JavaScriptAudioNode uses an AudioBus for its input and output (see inputBus and outputBus below). // Additionally, there is a double-buffering for input and output which is exposed directly to JavaScript (see inputBuffer and outputBuffer below). // This node is the producer for inputBuffer and the consumer for outputBuffer. // The JavaScript code is the consumer of inputBuffer and the producer for outputBuffer. // Get input and output busses. AudioBus* inputBus = this->input(0)->bus(); AudioBus* outputBus = this->output(0)->bus(); // Get input and output buffers. We double-buffer both the input and output sides. unsigned doubleBufferIndex = this->doubleBufferIndex(); bool isDoubleBufferIndexGood = doubleBufferIndex < 2 && doubleBufferIndex < m_inputBuffers.size() && doubleBufferIndex < m_outputBuffers.size(); ASSERT(isDoubleBufferIndexGood); if (!isDoubleBufferIndexGood) return; AudioBuffer* inputBuffer = m_inputBuffers[doubleBufferIndex].get(); AudioBuffer* outputBuffer = m_outputBuffers[doubleBufferIndex].get(); // Check the consistency of input and output buffers. unsigned numberOfInputChannels = m_internalInputBus.numberOfChannels(); bool buffersAreGood = outputBuffer && bufferSize() == outputBuffer->length() && m_bufferReadWriteIndex + framesToProcess <= bufferSize(); // If the number of input channels is zero, it's ok to have inputBuffer = 0. if (m_internalInputBus.numberOfChannels()) buffersAreGood = buffersAreGood && inputBuffer && bufferSize() == inputBuffer->length(); ASSERT(buffersAreGood); if (!buffersAreGood) return; // We assume that bufferSize() is evenly divisible by framesToProcess - should always be true, but we should still check. bool isFramesToProcessGood = framesToProcess && bufferSize() >= framesToProcess && !(bufferSize() % framesToProcess); ASSERT(isFramesToProcessGood); if (!isFramesToProcessGood) return; unsigned numberOfOutputChannels = outputBus->numberOfChannels(); bool channelsAreGood = (numberOfInputChannels == m_numberOfInputChannels) && (numberOfOutputChannels == m_numberOfOutputChannels); ASSERT(channelsAreGood); if (!channelsAreGood) return; for (unsigned i = 0; i < numberOfInputChannels; i++) m_internalInputBus.setChannelMemory(i, inputBuffer->getChannelData(i)->data() + m_bufferReadWriteIndex, framesToProcess); if (numberOfInputChannels) m_internalInputBus.copyFrom(*inputBus); // Copy from the output buffer to the output. for (unsigned i = 0; i < numberOfOutputChannels; ++i) memcpy(outputBus->channel(i)->mutableData(), outputBuffer->getChannelData(i)->data() + m_bufferReadWriteIndex, sizeof(float) * framesToProcess); // Update the buffering index. m_bufferReadWriteIndex = (m_bufferReadWriteIndex + framesToProcess) % bufferSize(); // m_bufferReadWriteIndex will wrap back around to 0 when the current input and output buffers are full. // When this happens, fire an event and swap buffers. if (!m_bufferReadWriteIndex) { // Avoid building up requests on the main thread to fire process events when they're not being handled. // This could be a problem if the main thread is very busy doing other things and is being held up handling previous requests. if (m_isRequestOutstanding) { // We're late in handling the previous request. The main thread must be very busy. // The best we can do is clear out the buffer ourself here. outputBuffer->zero(); } else { // Reference ourself so we don't accidentally get deleted before fireProcessEvent() gets called. ref(); // Fire the event on the main thread, not this one (which is the realtime audio thread). m_doubleBufferIndexForEvent = m_doubleBufferIndex; m_isRequestOutstanding = true; callOnMainThread(fireProcessEventDispatch, this); } swapBuffers(); } }
void ScriptProcessorHandler::process(size_t framesToProcess) { // Discussion about inputs and outputs: // As in other AudioNodes, ScriptProcessorNode uses an AudioBus for its input and output (see inputBus and outputBus below). // Additionally, there is a double-buffering for input and output which is exposed directly to JavaScript (see inputBuffer and outputBuffer below). // This node is the producer for inputBuffer and the consumer for outputBuffer. // The JavaScript code is the consumer of inputBuffer and the producer for outputBuffer. // Get input and output busses. AudioBus* inputBus = input(0).bus(); AudioBus* outputBus = output(0).bus(); // Get input and output buffers. We double-buffer both the input and output sides. unsigned doubleBufferIndex = this->doubleBufferIndex(); bool isDoubleBufferIndexGood = doubleBufferIndex < 2 && doubleBufferIndex < m_inputBuffers.size() && doubleBufferIndex < m_outputBuffers.size(); ASSERT(isDoubleBufferIndexGood); if (!isDoubleBufferIndexGood) return; AudioBuffer* inputBuffer = m_inputBuffers[doubleBufferIndex].get(); AudioBuffer* outputBuffer = m_outputBuffers[doubleBufferIndex].get(); // Check the consistency of input and output buffers. unsigned numberOfInputChannels = m_internalInputBus->numberOfChannels(); bool buffersAreGood = outputBuffer && bufferSize() == outputBuffer->length() && m_bufferReadWriteIndex + framesToProcess <= bufferSize(); // If the number of input channels is zero, it's ok to have inputBuffer = 0. if (m_internalInputBus->numberOfChannels()) buffersAreGood = buffersAreGood && inputBuffer && bufferSize() == inputBuffer->length(); ASSERT(buffersAreGood); if (!buffersAreGood) return; // We assume that bufferSize() is evenly divisible by framesToProcess - should always be true, but we should still check. bool isFramesToProcessGood = framesToProcess && bufferSize() >= framesToProcess && !(bufferSize() % framesToProcess); ASSERT(isFramesToProcessGood); if (!isFramesToProcessGood) return; unsigned numberOfOutputChannels = outputBus->numberOfChannels(); bool channelsAreGood = (numberOfInputChannels == m_numberOfInputChannels) && (numberOfOutputChannels == m_numberOfOutputChannels); ASSERT(channelsAreGood); if (!channelsAreGood) return; for (unsigned i = 0; i < numberOfInputChannels; ++i) m_internalInputBus->setChannelMemory(i, inputBuffer->getChannelData(i)->data() + m_bufferReadWriteIndex, framesToProcess); if (numberOfInputChannels) m_internalInputBus->copyFrom(*inputBus); // Copy from the output buffer to the output. for (unsigned i = 0; i < numberOfOutputChannels; ++i) memcpy(outputBus->channel(i)->mutableData(), outputBuffer->getChannelData(i)->data() + m_bufferReadWriteIndex, sizeof(float) * framesToProcess); // Update the buffering index. m_bufferReadWriteIndex = (m_bufferReadWriteIndex + framesToProcess) % bufferSize(); // m_bufferReadWriteIndex will wrap back around to 0 when the current input and output buffers are full. // When this happens, fire an event and swap buffers. if (!m_bufferReadWriteIndex) { // Avoid building up requests on the main thread to fire process events when they're not being handled. // This could be a problem if the main thread is very busy doing other things and is being held up handling previous requests. // The audio thread can't block on this lock, so we call tryLock() instead. MutexTryLocker tryLocker(m_processEventLock); if (!tryLocker.locked()) { // We're late in handling the previous request. The main thread must be very busy. // The best we can do is clear out the buffer ourself here. outputBuffer->zero(); } else if (context()->executionContext()) { // Fire the event on the main thread, not this one (which is the realtime audio thread). m_doubleBufferIndexForEvent = m_doubleBufferIndex; context()->executionContext()->postTask(BLINK_FROM_HERE, createCrossThreadTask(&ScriptProcessorHandler::fireProcessEvent, PassRefPtr<ScriptProcessorHandler>(this))); } swapBuffers(); } }
void JavaScriptAudioNode::process(size_t framesToProcess) { // Discussion about inputs and outputs: // As in other AudioNodes, JavaScriptAudioNode uses an AudioBus for its input and output (see inputBus and outputBus below). // Additionally, there is a double-buffering for input and output which is exposed directly to JavaScript (see inputBuffer and outputBuffer below). // This node is the producer for inputBuffer and the consumer for outputBuffer. // The JavaScript code is the consumer of inputBuffer and the producer for outputBuffer. // Get input and output busses. AudioBus* inputBus = this->input(0)->bus(); AudioBus* outputBus = this->output(0)->bus(); // Get input and output buffers. We double-buffer both the input and output sides. unsigned doubleBufferIndex = this->doubleBufferIndex(); bool isDoubleBufferIndexGood = doubleBufferIndex < 2 && doubleBufferIndex < m_inputBuffers.size() && doubleBufferIndex < m_outputBuffers.size(); ASSERT(isDoubleBufferIndexGood); if (!isDoubleBufferIndexGood) return; AudioBuffer* inputBuffer = m_inputBuffers[doubleBufferIndex].get(); AudioBuffer* outputBuffer = m_outputBuffers[doubleBufferIndex].get(); // Check the consistency of input and output buffers. bool buffersAreGood = inputBuffer && outputBuffer && bufferSize() == inputBuffer->length() && bufferSize() == outputBuffer->length() && m_bufferReadWriteIndex + framesToProcess <= bufferSize(); ASSERT(buffersAreGood); if (!buffersAreGood) return; // We assume that bufferSize() is evenly divisible by framesToProcess - should always be true, but we should still check. bool isFramesToProcessGood = framesToProcess && bufferSize() >= framesToProcess && !(bufferSize() % framesToProcess); ASSERT(isFramesToProcessGood); if (!isFramesToProcessGood) return; unsigned numberOfInputChannels = inputBus->numberOfChannels(); bool channelsAreGood = (numberOfInputChannels == 1 || numberOfInputChannels == 2) && outputBus->numberOfChannels() == 2; ASSERT(channelsAreGood); if (!channelsAreGood) return; const float* sourceL = inputBus->channel(0)->data(); const float* sourceR = numberOfInputChannels > 1 ? inputBus->channel(1)->data() : 0; float* destinationL = outputBus->channel(0)->mutableData(); float* destinationR = outputBus->channel(1)->mutableData(); // Copy from the input to the input buffer. See "buffersAreGood" check above for safety. size_t bytesToCopy = sizeof(float) * framesToProcess; memcpy(inputBuffer->getChannelData(0)->data() + m_bufferReadWriteIndex, sourceL, bytesToCopy); if (numberOfInputChannels == 2) memcpy(inputBuffer->getChannelData(1)->data() + m_bufferReadWriteIndex, sourceR, bytesToCopy); else if (numberOfInputChannels == 1) { // If the input is mono, then also copy the mono input to the right channel of the AudioBuffer which the AudioProcessingEvent uses. // FIXME: it is likely the audio API will evolve to present an AudioBuffer with the same number of channels as our input. memcpy(inputBuffer->getChannelData(1)->data() + m_bufferReadWriteIndex, sourceL, bytesToCopy); } // Copy from the output buffer to the output. See "buffersAreGood" check above for safety. memcpy(destinationL, outputBuffer->getChannelData(0)->data() + m_bufferReadWriteIndex, bytesToCopy); memcpy(destinationR, outputBuffer->getChannelData(1)->data() + m_bufferReadWriteIndex, bytesToCopy); // Update the buffering index. m_bufferReadWriteIndex = (m_bufferReadWriteIndex + framesToProcess) % bufferSize(); // m_bufferReadWriteIndex will wrap back around to 0 when the current input and output buffers are full. // When this happens, fire an event and swap buffers. if (!m_bufferReadWriteIndex) { // Avoid building up requests on the main thread to fire process events when they're not being handled. // This could be a problem if the main thread is very busy doing other things and is being held up handling previous requests. if (m_isRequestOutstanding) { // We're late in handling the previous request. The main thread must be very busy. // The best we can do is clear out the buffer ourself here. outputBuffer->zero(); } else { // Reference ourself so we don't accidentally get deleted before fireProcessEvent() gets called. ref(); // Fire the event on the main thread, not this one (which is the realtime audio thread). m_doubleBufferIndexForEvent = m_doubleBufferIndex; m_isRequestOutstanding = true; callOnMainThread(fireProcessEventDispatch, this); } swapBuffers(); } }
void ScriptProcessorHandler::process(size_t framesToProcess) { // Discussion about inputs and outputs: // As in other AudioNodes, ScriptProcessorNode uses an AudioBus for its input // and output (see inputBus and outputBus below). Additionally, there is a // double-buffering for input and output which is exposed directly to // JavaScript (see inputBuffer and outputBuffer below). This node is the // producer for inputBuffer and the consumer for outputBuffer. The JavaScript // code is the consumer of inputBuffer and the producer for outputBuffer. // Get input and output busses. AudioBus* inputBus = input(0).bus(); AudioBus* outputBus = output(0).bus(); // Get input and output buffers. We double-buffer both the input and output // sides. unsigned doubleBufferIndex = this->doubleBufferIndex(); bool isDoubleBufferIndexGood = doubleBufferIndex < 2 && doubleBufferIndex < m_inputBuffers.size() && doubleBufferIndex < m_outputBuffers.size(); DCHECK(isDoubleBufferIndexGood); if (!isDoubleBufferIndexGood) return; AudioBuffer* inputBuffer = m_inputBuffers[doubleBufferIndex].get(); AudioBuffer* outputBuffer = m_outputBuffers[doubleBufferIndex].get(); // Check the consistency of input and output buffers. unsigned numberOfInputChannels = m_internalInputBus->numberOfChannels(); bool buffersAreGood = outputBuffer && bufferSize() == outputBuffer->length() && m_bufferReadWriteIndex + framesToProcess <= bufferSize(); // If the number of input channels is zero, it's ok to have inputBuffer = 0. if (m_internalInputBus->numberOfChannels()) buffersAreGood = buffersAreGood && inputBuffer && bufferSize() == inputBuffer->length(); DCHECK(buffersAreGood); if (!buffersAreGood) return; // We assume that bufferSize() is evenly divisible by framesToProcess - should // always be true, but we should still check. bool isFramesToProcessGood = framesToProcess && bufferSize() >= framesToProcess && !(bufferSize() % framesToProcess); DCHECK(isFramesToProcessGood); if (!isFramesToProcessGood) return; unsigned numberOfOutputChannels = outputBus->numberOfChannels(); bool channelsAreGood = (numberOfInputChannels == m_numberOfInputChannels) && (numberOfOutputChannels == m_numberOfOutputChannels); DCHECK(channelsAreGood); if (!channelsAreGood) return; for (unsigned i = 0; i < numberOfInputChannels; ++i) m_internalInputBus->setChannelMemory( i, inputBuffer->getChannelData(i)->data() + m_bufferReadWriteIndex, framesToProcess); if (numberOfInputChannels) m_internalInputBus->copyFrom(*inputBus); // Copy from the output buffer to the output. for (unsigned i = 0; i < numberOfOutputChannels; ++i) memcpy(outputBus->channel(i)->mutableData(), outputBuffer->getChannelData(i)->data() + m_bufferReadWriteIndex, sizeof(float) * framesToProcess); // Update the buffering index. m_bufferReadWriteIndex = (m_bufferReadWriteIndex + framesToProcess) % bufferSize(); // m_bufferReadWriteIndex will wrap back around to 0 when the current input // and output buffers are full. // When this happens, fire an event and swap buffers. if (!m_bufferReadWriteIndex) { // Avoid building up requests on the main thread to fire process events when // they're not being handled. This could be a problem if the main thread is // very busy doing other things and is being held up handling previous // requests. The audio thread can't block on this lock, so we call // tryLock() instead. MutexTryLocker tryLocker(m_processEventLock); if (!tryLocker.locked()) { // We're late in handling the previous request. The main thread must be // very busy. The best we can do is clear out the buffer ourself here. outputBuffer->zero(); } else if (context()->getExecutionContext()) { // With the realtime context, execute the script code asynchronously // and do not wait. if (context()->hasRealtimeConstraint()) { // Fire the event on the main thread with the appropriate buffer // index. context()->getExecutionContext()->postTask( BLINK_FROM_HERE, createCrossThreadTask(&ScriptProcessorHandler::fireProcessEvent, crossThreadUnretained(this), m_doubleBufferIndex)); } else { // If this node is in the offline audio context, use the // waitable event to synchronize to the offline rendering thread. std::unique_ptr<WaitableEvent> waitableEvent = wrapUnique(new WaitableEvent()); context()->getExecutionContext()->postTask( BLINK_FROM_HERE, createCrossThreadTask( &ScriptProcessorHandler::fireProcessEventForOfflineAudioContext, crossThreadUnretained(this), m_doubleBufferIndex, crossThreadUnretained(waitableEvent.get()))); // Okay to block the offline audio rendering thread since it is // not the actual audio device thread. waitableEvent->wait(); } } swapBuffers(); } }