Esempio n. 1
0
//==============================================================================
void MainContentComponent::handleIncomingMidiMessage (MidiInput* /*source*/, const MidiMessage &message)
{
    // This is called on the MIDI thread

    if (message.isNoteOnOrOff())
        postMessage (new MidiCallbackMessage (message));
}
Esempio n. 2
0
bool MidiControlAction::processIncomingMessage(const MidiMessage& message)
{
    if (isLearning)
    {
        isLearning = false;
        messageTemplate = message;
        return true;
    }
    else if (message.isForChannel(messageTemplate.getChannel()))
    {
        if (messageTemplate.isNoteOnOrOff() && message.isNoteOnOrOff() && message.getNoteNumber() == messageTemplate.getNoteNumber())
        {
            // toggle!
            if (message.isNoteOn() && message.getNoteNumber() == messageTemplate.getNoteNumber())
            {
                getMappedComponent()->setValue(getMappedComponent()->getValue() >= 0.5f?0.f:1.f);
                getMappedComponent()->triggerAsyncUpdate();
            }
            return true;
        }
        else if (messageTemplate.isController() && message.isController() && messageTemplate.getControllerNumber() == message.getControllerNumber())
        {
            //setValue(message.getControllerValue() / 127.0f);
            getMappedComponent()->setValue(message.getControllerValue() / 127.0f);
            getMappedComponent()->triggerAsyncUpdate();
            return true;
        }
    }
    return false;
}
Esempio n. 3
0
const String getMidiValue(const MidiMessage &m)
{
	if (m.isAftertouch())
	{
		return (String::formatted (" Val:[%4d]", m.getAfterTouchValue()));
	}

	if (m.isController())
	{
		return (String::formatted (" Val:[%4d]", m.getControllerValue()));
	}

	if (m.isNoteOnOrOff())
	{
		return (String::formatted (" Val:[%4d]", m.getVelocity()));
	}

	if (m.isPitchWheel())
	{
		return (String::formatted (" Val:[%4d]", m.getPitchWheelValue()));
	}

	if (m.isChannelPressure())
	{
		return (String::formatted (" Val:[%4d]", m.getChannelPressureValue()));
	}

	return (" Val:[----]");
}
void CtrlrPanelMIDIInputThread::handleMIDIFromDevice (const MidiMessage &message)
{
	const ScopedWriteLock sl (lock);

	if (owner.getMidiOptionBool(panelMidiRealtimeIgnore) && message.getRawDataSize() <= 1)
		return;

	if (message.isNoteOnOrOff() || message.isMidiClock())
	{
		deviceInputBuffer.addEvent (message, deviceInputBuffer.getNumEvents()+1);
	}
	else
	{
		deviceInputBuffer.addEvent (message, deviceInputBuffer.getNumEvents()+2);
	}

	notify();
}
Esempio n. 5
0
void VoicerUGenInternal::handleIncomingMidiMessage (MidiInput* /*source*/, const MidiMessage& message) throw()
{
	if(message.isForChannel(midiChannel_) == false)	return;
	
	if(message.isNoteOnOrOff())
	{
		const ScopedLock sl(lock);
		sendMidiNote(message.getChannel(), message.getNoteNumber(), message.getVelocity());
	}
	else if(message.isController())
	{
		if((message.getControllerNumber() == 123) && (message.getControllerValue() == 0))
		{
			const ScopedLock sl(lock);
			initEvents();
		}
			
		getController(message.getControllerNumber()) = 
					 (message.getControllerValue() * (1.f / 127.f));
	}
	else if(message.isPitchWheel())
	{
		getPitchWheel() = (jlimit(-8191, 8191, message.getPitchWheelValue() - 8192) * (1.f / 8191.f));
	}
	else if(message.isChannelPressure())
	{
		getChannelPressure() = (message.getChannelPressureValue() * (1.f / 127.f));
	}
	else if(message.isAftertouch())
	{
		getKeyPressure(message.getNoteNumber()) = 
					  (message.getAfterTouchValue() * (1.f / 127.f));
	}
	else if(message.isProgramChange())
	{
		getProgram() = (message.getProgramChangeNumber());
	}
	else if(message.isAllNotesOff())
	{
		const ScopedLock sl(lock);
		initEvents();
	}
}
Esempio n. 6
0
const String getMidiNumber(const MidiMessage &m)
{
	if (m.isAftertouch())
	{
		return (String::formatted (" No:[%4d]", m.getNoteNumber()));
	}

	if (m.isController())
	{
		return (String::formatted (" No:[%4d]", m.getControllerNumber()));
	}

	if (m.isNoteOnOrOff())
	{
		return (String::formatted (" No:[%4d]", m.getNoteNumber()));
	}

	if (m.isProgramChange())
	{
		return (String::formatted (" No:[%4d]", m.getProgramChangeNumber()));
	}
	return (" No:[----]");
}
//==============================================================================
void MidiMonitorEditor::timerCallback ()
{
    MidiBuffer tmpBuffer;
    int hours, minutes, seconds, frames;
    MidiMessage::SmpteTimecodeType timeCode;
    
    MidiMessageCollector* collector = owner->getMessageCollector ();
    collector->removeNextBlockOfMessages (tmpBuffer, 1024);
    
    if (! tmpBuffer.isEmpty())
	{
        String midiLine;

        int samplePos = 0;
        MidiMessage msg (0xf4, 0.0);
        MidiBuffer::Iterator eventIterator (tmpBuffer);

        while (eventIterator.getNextEvent (msg, samplePos))
        {
           midiLine.printf (T("[CH: %d] "), msg.getChannel());

           if (msg.isNoteOnOrOff ())
           {
                midiLine += MidiMessage::getMidiNoteName (msg.getNoteNumber(),
                                                          true, true, 0);
                midiLine += " ";
                midiLine += String ((int) msg.getVelocity ());

                if (msg.isNoteOn())
                {
                    midiLine += " ON";
                }
                else
                {
                    midiLine += " OFF";
                }
           }
           else if (msg.isAllNotesOff())
           {
                midiLine += "ALL NOTES OFF";
           }
           else if (msg.isAllSoundOff())
           {
                midiLine += "ALL SOUND OFF";
           }
           else if (msg.isPitchWheel())
           {
                midiLine += "PITCHWEEL: ";
                midiLine += String (msg.getPitchWheelValue());
           }
           else if (msg.isAftertouch())
           {
                midiLine += "AFTERTOUCH: ";
                midiLine += String (msg.getAfterTouchValue());
           }
           else if (msg.isChannelPressure())
           {
                midiLine += "CHANNELPRESSURE: ";
                midiLine += String (msg.getChannelPressureValue());
           }
           else if (msg.isSysEx())
           {
                midiLine += "SYSEX: ";
                midiLine += String (msg.getSysExDataSize());
                midiLine += " bytes";
           }
           else if (msg.isProgramChange())
           {
                midiLine += "PROGRAM CHANGE: ";
                midiLine += String (msg.getProgramChangeNumber());
                midiLine += " (";
                midiLine += MidiMessage::getGMInstrumentName (msg.getProgramChangeNumber());
                midiLine += ")";
           }
           else if (msg.isController())
           {
                midiLine += "CC: #";
                midiLine += String (msg.getControllerNumber());
                midiLine += " (";
                midiLine += MidiMessage::getControllerName (msg.getControllerNumber());
                midiLine += ") = ";
                midiLine += String (msg.getControllerValue());
           }
            else if (msg.isTimeSignatureMetaEvent ())
            {
                int newNumerator, newDenominator;
                msg.getTimeSignatureInfo (newNumerator, newDenominator);

                midiLine += "TIME SIGNATURE: ";
                midiLine += String (newNumerator);
                midiLine += " / ";
                midiLine += String (newDenominator);
            }
            else if (msg.isTempoMetaEvent ())
            {
                midiLine += "TEMPO: ";
                midiLine += String (msg.getTempoSecondsPerQuarterNote ());
                //midiLine += " ";
                //midiLine += String (msg.getTempoMetaEventTickLength (ticksPerQuarterNote));
            }
            else if (msg.isMidiMachineControlMessage())
            {
                midiLine += "MIDI CONTROL: ";
                
                switch (msg.getMidiMachineControlCommand())
                {
                    case MidiMessage::mmc_stop:             midiLine += "stop"; break;
                    case MidiMessage::mmc_play:             midiLine += "play"; break;
                    case MidiMessage::mmc_deferredplay:     midiLine += "deferredplay"; break;
                    case MidiMessage::mmc_fastforward:      midiLine += "fastforward"; break;
                    case MidiMessage::mmc_rewind:           midiLine += "rewind"; break;
                    case MidiMessage::mmc_recordStart:      midiLine += "recordStart"; break;
                    case MidiMessage::mmc_recordStop:       midiLine += "recordStop"; break;
                    case MidiMessage::mmc_pause:            midiLine += "pause"; break;
                }
            }
            else if (msg.isMidiStart ())
            {
                midiLine += "MIDI START: ";
            }
            else if (msg.isMidiContinue ())
            {
                midiLine += "MIDI CONTINUE: ";
            }
            else if (msg.isMidiStop ())
            {
                midiLine += "MIDI STOP: ";
            }
            else if (msg.isSongPositionPointer ())
            {
                midiLine += "SONG POSITION: ";
                midiLine += String (msg.getSongPositionPointerMidiBeat ());
            }
            else if (msg.isQuarterFrame ())
            {
                midiLine += "QUARTER FRAME: ";
                midiLine += String (msg.getQuarterFrameSequenceNumber ());
                midiLine += " ";
                midiLine += String (msg.getQuarterFrameValue ());
            }
            else if (msg.isFullFrame ())
            {
                midiLine += "FULL FRAME: ";

                msg.getFullFrameParameters (hours, minutes, seconds, frames, timeCode);

                midiLine += String (hours);
                midiLine += ":";
                midiLine += String (minutes);
                midiLine += ":";
                midiLine += String (seconds);
                midiLine += ":";
                midiLine += String (frames);

                midiLine += " timecode: ";
                switch (timeCode) {
                    case MidiMessage::fps24:      midiLine += "fps24"; break;
                    case MidiMessage::fps25:      midiLine += "fps25"; break;
                    case MidiMessage::fps30drop:  midiLine += "fps30drop"; break;
                    case MidiMessage::fps30:      midiLine += "fps30"; break;
                }
            }
            else if (msg.isMidiMachineControlGoto (hours, minutes, seconds, frames))
            {
                midiLine += "MIDI CONTROL GOTO: ";
                midiLine += String (hours);
                midiLine += ":";
                midiLine += String (minutes);
                midiLine += ":";
                midiLine += String (seconds);
                midiLine += ":";
                midiLine += String (frames);
            }

            midiOutputEditor->insertTextAtCursor (midiLine + T("\n"));
        }

	}

}
//==============================================================================
void MidiTransform::processEvents (MidiBuffer& midiMessages, const int blockSize)
{
    int timeStamp;
    MidiMessage message (0xf4, 0.0);
    MidiBuffer::Iterator it (midiMessages);

    MidiBuffer midiOutput;

    switch (command)
    {
    case MidiTransform::KeepEvents:
            break;
    case MidiTransform::DiscardEvents:
        {
            midiMessages.clear ();
            break;
        }
    case MidiTransform::RemapChannel:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                message.setChannel (channelNumber);
                midiOutput.addEvent (message, timeStamp);
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::ScaleNotes:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOnOrOff ())
                {
                    message.setNoteNumber (roundFloatToInt (message.getNoteNumber () * noteScale));
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::InvertNotes:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOnOrOff ())
                {
                    message.setNoteNumber (127 - message.getNoteNumber ());
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
        }
    case MidiTransform::TransposeNotes:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOnOrOff ())
                {
                    message.setNoteNumber (jmax (0, jmin (127, message.getNoteNumber () - noteTranspose)));
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::ScaleVelocity:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOn ())
                {
                    message.setVelocity ((message.getVelocity () / 127.0f) * velocityScale);
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::InvertVelocity:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOn ())
                {
                    message.setVelocity ((uint8) (127 - message.getVelocity ()));
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::TransposeVelocity:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOn ())
                {
                    message.setVelocity (jmax (0, jmin (127, message.getVelocity () - velocityTranspose)));
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::TriggerCC:
        {
            break;
        }
    case MidiTransform::TriggerNote:
        {
            break;
        }
    }
}
Esempio n. 9
0
void InstanceProcessor::processBlock(AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
    for(int i = getTotalNumInputChannels(); i < getTotalNumOutputChannels(); ++i)
    {
        buffer.clear(i, 0, buffer.getNumSamples());
    }
    bool infos = false;
    
    AudioPlayHead* playhead = getPlayHead();
    if(playhead && m_patch_tie)
    {
        infos = playhead->getCurrentPosition(m_playinfos);
    }
    lock();
    {
        m_midi.clear();
        if(infos)
        {
            m_playing_list.setFloat(0, m_playinfos.isPlaying);
            m_playing_list.setFloat(1, m_playinfos.timeInSeconds);
            sendMessageAnything(m_patch_tie, s_playing, m_playing_list);
            m_measure_list.setFloat(0, m_playinfos.bpm);
            m_measure_list.setFloat(1, m_playinfos.timeSigNumerator);
            m_measure_list.setFloat(2, m_playinfos.timeSigDenominator);
            m_measure_list.setFloat(3, m_playinfos.ppqPosition);
            m_measure_list.setFloat(4, m_playinfos.ppqPositionOfLastBarStart);
            sendMessageAnything(m_patch_tie, s_measure, m_measure_list);
        }
        for(size_t i = 0; i < m_parameters.size() && m_parameters[i].isValid(); ++i)
        {
            sendMessageFloat(m_parameters[i].getTie(), m_parameters[i].getValueNonNormalized());
        }
        
        MidiMessage message;
        MidiBuffer::Iterator it(midiMessages);
        int position = midiMessages.getFirstEventTime();
        while(it.getNextEvent(message, position))
        {
            if(message.isNoteOnOrOff())
            {
                sendMidiNote(message.getChannel(), message.getNoteNumber(), message.getVelocity());
            }
            else if(message.isController())
            {
                sendMidiControlChange(message.getChannel(), message.getControllerNumber(), message.getControllerValue());
            }
            else if(message.isPitchWheel())
            {
                sendMidiPitchBend(message.getChannel(), message.getPitchWheelValue());
            }
            else if(message.isChannelPressure())
            {
                sendMidiAfterTouch(message.getChannel(), message.getChannelPressureValue());
            }
            else if(message.isAftertouch())
            {
                sendMidiPolyAfterTouch(message.getChannel(), message.getNoteNumber(), message.getAfterTouchValue());
            }
            else if(message.isProgramChange())
            {
                sendMidiProgramChange(message.getChannel(), message.getProgramChangeNumber());
            }
        }
    }
    midiMessages.clear();
    performDsp(buffer.getNumSamples(),
               getTotalNumInputChannels(), buffer.getArrayOfReadPointers(),
               getTotalNumOutputChannels(), buffer.getArrayOfWritePointers());
    midiMessages.swapWith(m_midi);
    unlock();
}
Esempio n. 10
0
//==============================================================================
int main (int argc, char* argv[])
{
	if (argc != 3) {
		cout << "Usage: <prog> <midi input file> <wav output file>" << endl;
		return 0;
	}
	File inMidiFile = File(argv[1]);
	File outWavFile = File(argv[2]);

	//File inMidiFile = File("C:\\Users\\GeorgeKrueger\\Documents\\GitHub\\pymusic\\out.mid");
	//File outWavFile = File("C:\\Users\\GeorgeKrueger\\Documents\\GitHub\\pymusic\\out.wav");

	FileInputStream fileStream(inMidiFile);
	juce::MidiFile midiFile;
	midiFile.readFrom(fileStream);
	int numTracks = midiFile.getNumTracks();
	midiFile.convertTimestampTicksToSeconds();
	std::cout << "Opened midi file: " << inMidiFile.getFileName() << " Tracks: " << numTracks << std::endl;;

	playHead.posInfo.bpm = 120;
	playHead.posInfo.isPlaying = true;
	playHead.posInfo.timeInSamples = 0;
	playHead.posInfo.timeInSeconds = 0;
	playHead.posInfo.timeSigNumerator = 4;
	playHead.posInfo.timeSigDenominator = 4;

	for (int i = 0; i < numTracks; ++i)
	{
		const juce::MidiMessageSequence* msgSeq = midiFile.getTrack(i);

		double trackLengthSeconds = 0;
		String plugFile = "";
		int program = 0;
		for (int j = 0; j < msgSeq->getNumEvents(); ++j)
		{
			juce::MidiMessageSequence::MidiEventHolder* midiEventHolder = msgSeq->getEventPointer(j);
			juce::MidiMessage midiMsg = midiEventHolder->message;
			if (midiMsg.isMetaEvent() && midiMsg.getMetaEventType() == 0x04) {
				// Instrument meta event
				int instrLength = midiMsg.getMetaEventLength();
				const juce::uint8* instrChars = midiMsg.getMetaEventData();
				String instrName((char*)instrChars, instrLength);
				plugFile = instrName;
			}
			if (midiMsg.isMetaEvent() && midiMsg.isEndOfTrackMetaEvent()) {
				//int oetDataLength = midiMsg.getMetaEventLength();
				//const uint8* oetData = midiMsg.getMetaEventData();
				//std::cout << "Found end of track event data size: " << oetDataLength << " data: " << oetData << std::endl;
				trackLengthSeconds = midiMsg.getTimeStamp();
				std::cout << "Track length in seconds: " << trackLengthSeconds << std::endl;
			}
		}

		if (trackLengthSeconds == 0) {
			std::cerr << "Skipping track " << i << " since it has zero length" << std::endl;
			continue;
		}

		if (plugFile.isEmpty()) {
			plugFile = "C:\\VST\\helm.dll";
			std::cout << "No plug found for track. Defaulting to: " << plugFile << std::endl;
			//std::cerr << "Skipping track " << i << ". No instrument found." << std::endl;
			//continue;
		}
		else {
			std::cout << "Found plugin file '" << plugFile << "' from track " << i << std::endl;
		}

		OwnedArray<PluginDescription> results;
		VSTPluginFormat vstFormat;
		vstFormat.findAllTypesForFile(results, plugFile);
		if (results.size() > 0) {
			std::cout << "Found " << results.size() << " plugin(s) in file '" << plugFile << "'" << std::endl;

			int blockSize = 1024;
			double sampleRate = 44100;
			int totalSizeInSamples = ((static_cast<int>(44100 * trackLengthSeconds) / 1024) + 1) * 1024;
			cout << "Total samples to render " << totalSizeInSamples << endl;
			juce::AudioPluginInstance* plugInst = vstFormat.createInstanceFromDescription(*results[0], sampleRate, blockSize);
			if (!plugInst) {
				cout << "Failed to load plugin " << plugFile << endl;
				continue;
			}

			AudioProcessorGraph* graph = new AudioProcessorGraph();
			graph->setPlayConfigDetails(0, 2, sampleRate, blockSize);
			graph->setPlayHead(&playHead);
			graph->addNode(plugInst, 1000);

			int AUDIO_IN_ID = 101;
			int AUDIO_OUT_ID = 102;
			int MIDI_IN_ID = 103;
			juce::AudioPluginInstance* audioInNode = new AudioGraphIOProcessor(AudioGraphIOProcessor::audioInputNode);
			juce::AudioPluginInstance* audioOutNode = new AudioGraphIOProcessor(AudioGraphIOProcessor::audioOutputNode);
			juce::AudioPluginInstance* midiInNode = new AudioGraphIOProcessor(AudioGraphIOProcessor::midiInputNode);
			graph->addNode(audioInNode, AUDIO_IN_ID);
			graph->addNode(audioOutNode, AUDIO_OUT_ID);
			graph->addNode(midiInNode, MIDI_IN_ID);

			graph->addConnection(AUDIO_IN_ID, 0, 1000, 0);
			graph->addConnection(AUDIO_IN_ID, 1, 1000, 1);
			graph->addConnection(MIDI_IN_ID, AudioProcessorGraph::midiChannelIndex, 1000, AudioProcessorGraph::midiChannelIndex);
			graph->addConnection(1000, 0, AUDIO_OUT_ID, 0);
			graph->addConnection(1000, 1, AUDIO_OUT_ID, 1);

			plugInst->setCurrentProgram(program);

			int numInputChannels = plugInst->getTotalNumInputChannels();
			int numOutputChannels = plugInst->getTotalNumOutputChannels();
			cout << "----- Plugin Information -----" << endl;
			cout << "Input channels : " << numInputChannels << endl;
			cout << "Output channels : " << numOutputChannels << endl;
			cout << "Num Programs: " << plugInst->getNumPrograms() << endl;
			cout << "Current program: " << plugInst->getCurrentProgram() << endl;

			int numParams = plugInst->getNumParameters();
			cout << "Num Parameters: " << numParams << endl;
			for (int p = 0; p < numParams; ++p)
			{
				std::cout << "Param " << p << ": " << plugInst->getParameterName(p);
				if (!plugInst->getParameterLabel(p).isEmpty()) {
					cout << "(" << plugInst->getParameterLabel(p) << ")";
				}
				cout << " = " << plugInst->getParameter(p) << endl;
			}
			cout << "-----------------------------" << endl;

			int maxChannels = std::max(numInputChannels, numOutputChannels);
			AudioBuffer<float> entireAudioBuffer(maxChannels, totalSizeInSamples);
			entireAudioBuffer.clear();
			unsigned int midiSeqPos = 0;

			graph->releaseResources();
			graph->prepareToPlay(sampleRate, blockSize);

			cout << "Num midi events: " << msgSeq->getNumEvents() << endl;

			// Render the audio in blocks
			for (int t = 0; t < totalSizeInSamples; t += blockSize)
			{
				//cout << "processing block " << t << " to " << t + blockSize << endl;
				MidiBuffer midiBuffer;
				for (int j = midiSeqPos; j < msgSeq->getNumEvents(); ++j)
				{
					MidiMessageSequence::MidiEventHolder* midiEventHolder = msgSeq->getEventPointer(j);
					MidiMessage midiMsg = midiEventHolder->message;
					int samplePos = static_cast<int>(midiMsg.getTimeStamp() * sampleRate);
					if (samplePos >= t && samplePos < t + blockSize) {
						if (midiMsg.isNoteOnOrOff()) {
							if (midiMsg.isNoteOn()) {
								cout << "note on event (" << midiMsg.getNoteNumber() << ") at " << samplePos << "(" << midiMsg.getTimeStamp() << "s) bufferpos=" << (samplePos - t) << endl;
							}
							else if (midiMsg.isNoteOff()) {
								cout << "note off event (" << midiMsg.getNoteNumber() << ") at " << samplePos << "(" << midiMsg.getTimeStamp() << "s) bufferpos=" << (samplePos - t) << endl;
							}
							midiBuffer.addEvent(midiMsg, samplePos - t);
						}
						else if (midiMsg.isProgramChange()) {
							program = midiMsg.getProgramChangeNumber();
							plugInst->setCurrentProgram(program);
						}
						midiSeqPos++;
					}
					else {
						break;
					}
				}

				playHead.posInfo.timeInSamples = t;
				playHead.posInfo.timeInSeconds = t / sampleRate;

				AudioBuffer<float> blockAudioBuffer(entireAudioBuffer.getNumChannels(), blockSize);
				blockAudioBuffer.clear();
				graph->processBlock(blockAudioBuffer, midiBuffer);

				for (int ch = 0; ch < entireAudioBuffer.getNumChannels(); ++ch) {
					entireAudioBuffer.addFrom(ch, t, blockAudioBuffer, ch, 0, blockSize);
				}
			}

			if (outWavFile.exists()) {
				outWavFile.deleteFile();
			}
			FileOutputStream* fileOutputStream = outWavFile.createOutputStream();
			WavAudioFormat wavFormat;
			StringPairArray metadataValues;
			juce::AudioFormatWriter* wavFormatWriter = wavFormat.createWriterFor(
				fileOutputStream, sampleRate, 2, 16, metadataValues, 0);
			bool writeAudioDataRet = wavFormatWriter->writeFromAudioSampleBuffer(entireAudioBuffer, 0, entireAudioBuffer.getNumSamples());
			wavFormatWriter->flush();

			cout << "Done writing to output file " << outWavFile.getFileName() << " . Write return value: "
				<< (int)writeAudioDataRet << endl;

			delete wavFormatWriter;
		}
		else {
			cerr << "Could not find plugin from file " << plugFile << endl;
		}
	}

    return 0;
}