//==============================================================================
void MidiManipulator::processEvents (MidiBuffer& midiMessages, const int blockSize)
{
    MidiBuffer midiOutput;

    if (! midiMessages.isEmpty ())
    {
		int timeStamp;
		MidiMessage message (0xf4, 0.0);
	    MidiBuffer::Iterator it (midiMessages);

        if (filter)
        {
	        while (it.getNextEvent (message, timeStamp))
	        {
                if (filter->filterEvent (message))
                    midiOutput.addEvent (message, timeStamp);
	        }
	    }
	    else
	    {
	        midiOutput = midiMessages;
	    }

        midiMessages.clear ();
    }

    if (transform)
    {
        transform->processEvents (midiOutput, blockSize);
    }

    midiMessages = midiOutput;
}
예제 #2
0
void SamplerProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
	midiBuffer.clear();
	tfMidiBuffer.clear();
	midiBuffer = midiMessages;
	tfMidiBuffer = midiMessages;
	//buffer.clear();
	//MidiBuffer incomingMidi;
	//midiCollector.removeNextBlockOfMessages(midiMessages, buffer.getNumSamples());
	if (midiCallback != nullptr){
		if (!midiMessages.isEmpty()){
			midiCallback->handleMidiBuffer(midiMessages);
			tf_selector->setMidiBuffer(tfMidiBuffer);
		}
	}
	int x = midiBuffer.getNumEvents();
	synth.renderNextBlock(buffer, midiBuffer, 0, buffer.getNumSamples());

	peak = 0.0;
	for (int i = 0; i<buffer.getNumSamples(); i++){
		if (buffer.getWritePointer(0)[i] > peak){
			peak = buffer.getWritePointer(0)[i];
		}
	}
}
예제 #3
0
void VoicerUGenInternal::sendMidiBuffer(MidiBuffer const& midiMessagesToAdd) throw()
{	
	if(midiMessagesToAdd.isEmpty() == false)
	{
		const ScopedLock sl(lock);
		midiMessages.addEvents(midiMessagesToAdd, 0, -1, 0);
	}
}
예제 #4
0
void MIDIDelay::processMIDIBuffer (MidiBuffer& inputMIDIBuffer)
{
    MidiBuffer::Iterator inputMIDIBufferIterator (inputMIDIBuffer);

    MidiMessage currentMidiMessage;
    int midiMessageSamplePosition = 0;

    if (! inputMIDIBuffer.isEmpty())
    {
        inputMIDIBufferIterator.getNextEvent (currentMidiMessage, midiMessageSamplePosition);
        bool midiBufferIsNotEmpty = true;

        for (int sampleIndex = 0; sampleIndex < mBlockSize; ++sampleIndex)
        {
            // Go through every MIDI message this sample.
            while (sampleIndex == midiMessageSamplePosition
                && midiBufferIsNotEmpty)
            {
                double delayedSamplePosition = midiMessageSamplePosition + mDelayTime[sampleIndex];
                MidiMessage delayedMIDIMessage (currentMidiMessage, delayedSamplePosition);

                mDelayedMIDIBuffer.push_back (delayedMIDIMessage);

                midiBufferIsNotEmpty = inputMIDIBufferIterator.getNextEvent (currentMidiMessage, midiMessageSamplePosition);
            }
        }
    }

    if (! mDelayedMIDIBuffer.empty())
    {
        for (int index = 0; index < mDelayedMIDIBuffer.size(); ++index)
        {
            if (mDelayedMIDIBuffer[index].getTimeStamp() < mBlockSize)
            {
                mReplacementBuffer.addEvent (mDelayedMIDIBuffer[index],
                                             int (mDelayedMIDIBuffer[index].getTimeStamp()));

                mDelayedMIDIBuffer.erase (mDelayedMIDIBuffer.begin() + index);
                --index;
            }
            else if (mDelayedMIDIBuffer[index].getTimeStamp() >= mBlockSize)
            {
                double newTimeStamp = mDelayedMIDIBuffer[index].getTimeStamp() - mBlockSize;
                mDelayedMIDIBuffer[index].setTimeStamp (newTimeStamp);
            }
        }
    }

    inputMIDIBuffer.swapWith (mReplacementBuffer);
    mReplacementBuffer.clear();
}
void YSE::SYNTH::implementationObject::process(YSE::SOUND_STATUS & intent) {
  // let juce take care of the midi parsing and buffer generation
  synthBuffer.clear();
  MidiBuffer incomingMidi;

  // get messages from midi files
  for (auto i = midiFiles.begin(); i != midiFiles.end(); i++) {
    (*i)->getMessages(incomingMidi);
  }

  // get messages from midi input
  midiCollector.removeNextBlockOfMessages(incomingMidi, STANDARD_BUFFERSIZE);
  
  // sync message queue
  keyboardState.processNextMidiBuffer(incomingMidi, 0, STANDARD_BUFFERSIZE, true);
  synthesizer.renderNextBlock(synthBuffer, incomingMidi, 0, STANDARD_BUFFERSIZE);

  // alter events if there's a callback function provided
  if (onNoteEvent != nullptr && !incomingMidi.isEmpty()) {
    MidiBuffer::Iterator iter(incomingMidi);
    MidiMessage m(0xf0);
    int sample;
    while (iter.getNextEvent(m, sample)) {
      if (m.isNoteOnOrOff()) {
        float pitch = m.getNoteNumber();
        float velocity = m.getFloatVelocity();
        onNoteEvent(m.isNoteOn(), &pitch, &velocity);
        m.setNoteNumber(pitch);
        m.setVelocity(velocity);
      }
    }
  }

  // now copy over the buffer generated by juce to our own format
  // TODO: is there a way to avoid this copy?
  float * out = buffer[0].getPtr();
  const float * in = synthBuffer.getReadPointer(0);

  int l = STANDARD_BUFFERSIZE;
  for (; l > 7; l -= 8, out += 8, in += 8) {
    out[0] = in[0]; out[1] = in[1]; out[2] = in[2]; out[3] = in[3];
    out[4] = in[4]; out[5] = in[5]; out[6] = in[6]; out[7] = in[7];
  }

  while (l--) *out++ = *in++;
}
예제 #6
0
void Pfm2AudioProcessor::handleIncomingMidiBuffer(MidiBuffer &buffer, int numberOfSamples) {
    if (!buffer.isEmpty()) {
        MidiBuffer newBuffer;
        MidiMessage midiMessage;
        int samplePosition;
        MidiBuffer::Iterator midiIterator(buffer);
        while (midiIterator.getNextEvent(midiMessage, samplePosition)) {
            bool copyMessageInNewBuffer = true;

            if (midiMessage.isController() && midiMessage.getChannel() == currentMidiChannel) {
                switch (midiMessage.getControllerNumber()) {
                case 99:
                    currentNrpn.paramMSB = midiMessage.getControllerValue();
                    copyMessageInNewBuffer = false;
                    break;
                case 98:
                    currentNrpn.paramLSB = midiMessage.getControllerValue();
                    copyMessageInNewBuffer = false;
                    break;
                case 6:
                    currentNrpn.valueMSB = midiMessage.getControllerValue();
                    copyMessageInNewBuffer = false;
                    break;
                case 38:
                {
                    currentNrpn.valueLSB = midiMessage.getControllerValue();
                    copyMessageInNewBuffer = false;
                    int param = (int)(currentNrpn.paramMSB << 7) + currentNrpn.paramLSB;
                    int value = (int)(currentNrpn.valueMSB << 7) + currentNrpn.valueLSB;

                    const MessageManagerLock mmLock;
                    handleIncomingNrpn(param, value);
                    break;
                }
                }
            }
            if (copyMessageInNewBuffer) {
                newBuffer.addEvent(midiMessage, samplePosition);
            }
        }
        buffer.swapWith(newBuffer);
    }
}
//==============================================================================
void MidiMonitorEditor::timerCallback ()
{
    MidiBuffer tmpBuffer;
    int hours, minutes, seconds, frames;
    MidiMessage::SmpteTimecodeType timeCode;
    
    MidiMessageCollector* collector = owner->getMessageCollector ();
    collector->removeNextBlockOfMessages (tmpBuffer, 1024);
    
    if (! tmpBuffer.isEmpty())
	{
        String midiLine;

        int samplePos = 0;
        MidiMessage msg (0xf4, 0.0);
        MidiBuffer::Iterator eventIterator (tmpBuffer);

        while (eventIterator.getNextEvent (msg, samplePos))
        {
           midiLine.printf (T("[CH: %d] "), msg.getChannel());

           if (msg.isNoteOnOrOff ())
           {
                midiLine += MidiMessage::getMidiNoteName (msg.getNoteNumber(),
                                                          true, true, 0);
                midiLine += " ";
                midiLine += String ((int) msg.getVelocity ());

                if (msg.isNoteOn())
                {
                    midiLine += " ON";
                }
                else
                {
                    midiLine += " OFF";
                }
           }
           else if (msg.isAllNotesOff())
           {
                midiLine += "ALL NOTES OFF";
           }
           else if (msg.isAllSoundOff())
           {
                midiLine += "ALL SOUND OFF";
           }
           else if (msg.isPitchWheel())
           {
                midiLine += "PITCHWEEL: ";
                midiLine += String (msg.getPitchWheelValue());
           }
           else if (msg.isAftertouch())
           {
                midiLine += "AFTERTOUCH: ";
                midiLine += String (msg.getAfterTouchValue());
           }
           else if (msg.isChannelPressure())
           {
                midiLine += "CHANNELPRESSURE: ";
                midiLine += String (msg.getChannelPressureValue());
           }
           else if (msg.isSysEx())
           {
                midiLine += "SYSEX: ";
                midiLine += String (msg.getSysExDataSize());
                midiLine += " bytes";
           }
           else if (msg.isProgramChange())
           {
                midiLine += "PROGRAM CHANGE: ";
                midiLine += String (msg.getProgramChangeNumber());
                midiLine += " (";
                midiLine += MidiMessage::getGMInstrumentName (msg.getProgramChangeNumber());
                midiLine += ")";
           }
           else if (msg.isController())
           {
                midiLine += "CC: #";
                midiLine += String (msg.getControllerNumber());
                midiLine += " (";
                midiLine += MidiMessage::getControllerName (msg.getControllerNumber());
                midiLine += ") = ";
                midiLine += String (msg.getControllerValue());
           }
            else if (msg.isTimeSignatureMetaEvent ())
            {
                int newNumerator, newDenominator;
                msg.getTimeSignatureInfo (newNumerator, newDenominator);

                midiLine += "TIME SIGNATURE: ";
                midiLine += String (newNumerator);
                midiLine += " / ";
                midiLine += String (newDenominator);
            }
            else if (msg.isTempoMetaEvent ())
            {
                midiLine += "TEMPO: ";
                midiLine += String (msg.getTempoSecondsPerQuarterNote ());
                //midiLine += " ";
                //midiLine += String (msg.getTempoMetaEventTickLength (ticksPerQuarterNote));
            }
            else if (msg.isMidiMachineControlMessage())
            {
                midiLine += "MIDI CONTROL: ";
                
                switch (msg.getMidiMachineControlCommand())
                {
                    case MidiMessage::mmc_stop:             midiLine += "stop"; break;
                    case MidiMessage::mmc_play:             midiLine += "play"; break;
                    case MidiMessage::mmc_deferredplay:     midiLine += "deferredplay"; break;
                    case MidiMessage::mmc_fastforward:      midiLine += "fastforward"; break;
                    case MidiMessage::mmc_rewind:           midiLine += "rewind"; break;
                    case MidiMessage::mmc_recordStart:      midiLine += "recordStart"; break;
                    case MidiMessage::mmc_recordStop:       midiLine += "recordStop"; break;
                    case MidiMessage::mmc_pause:            midiLine += "pause"; break;
                }
            }
            else if (msg.isMidiStart ())
            {
                midiLine += "MIDI START: ";
            }
            else if (msg.isMidiContinue ())
            {
                midiLine += "MIDI CONTINUE: ";
            }
            else if (msg.isMidiStop ())
            {
                midiLine += "MIDI STOP: ";
            }
            else if (msg.isSongPositionPointer ())
            {
                midiLine += "SONG POSITION: ";
                midiLine += String (msg.getSongPositionPointerMidiBeat ());
            }
            else if (msg.isQuarterFrame ())
            {
                midiLine += "QUARTER FRAME: ";
                midiLine += String (msg.getQuarterFrameSequenceNumber ());
                midiLine += " ";
                midiLine += String (msg.getQuarterFrameValue ());
            }
            else if (msg.isFullFrame ())
            {
                midiLine += "FULL FRAME: ";

                msg.getFullFrameParameters (hours, minutes, seconds, frames, timeCode);

                midiLine += String (hours);
                midiLine += ":";
                midiLine += String (minutes);
                midiLine += ":";
                midiLine += String (seconds);
                midiLine += ":";
                midiLine += String (frames);

                midiLine += " timecode: ";
                switch (timeCode) {
                    case MidiMessage::fps24:      midiLine += "fps24"; break;
                    case MidiMessage::fps25:      midiLine += "fps25"; break;
                    case MidiMessage::fps30drop:  midiLine += "fps30drop"; break;
                    case MidiMessage::fps30:      midiLine += "fps30"; break;
                }
            }
            else if (msg.isMidiMachineControlGoto (hours, minutes, seconds, frames))
            {
                midiLine += "MIDI CONTROL GOTO: ";
                midiLine += String (hours);
                midiLine += ":";
                midiLine += String (minutes);
                midiLine += ":";
                midiLine += String (seconds);
                midiLine += ":";
                midiLine += String (frames);
            }

            midiOutputEditor->insertTextAtCursor (midiLine + T("\n"));
        }

	}

}
void PolyWavegeneratorProcessor::processBlock(AudioSampleBuffer& buffer, MidiBuffer& midiBuffer)
{
  // clear all the midibuffers of the voices because they still contain the events from the last process Block
  for(int index = 0; index < voices.size(); index++)
  {
    voices[index]->clearMidiBuffer();
  }
  
  // Midi and Voice Handler this is not correct yet i need to watch out for different note ons of the same note in one buffer and other stuff
  if(takesMidi && !midiBuffer.isEmpty())
  {
    MidiMessage& message1 = *new MidiMessage();
    ScopedPointer<MidiBuffer::Iterator> iterator = new MidiBuffer::Iterator(midiBuffer);
    int sampleInBuffer = 0;
    while(iterator->getNextEvent(message1, sampleInBuffer))
    {
      if(message1.isNoteOn())
      {
        // always take the first one and move it to the back => the oldest voice will be "overwritten"
        voices[0]->setIsOn(true);
        voices[0]->setMidiNote(message1.getNoteNumber());
        voices[0]->addMidiEvent(message1, sampleInBuffer);
        
        voices.move(0, voices.size()-1);
      }
      else if(message1.isNoteOff())
      {
        for(int index = 0; index < voices.size(); index++)
        {
          if(voices[index]->getMidiNote() == message1.getNoteNumber())
          {
            ScopedPointer<Voice> tempVoice = voices[index];
            
            tempVoice->setIsOn(false);
            tempVoice->addMidiEvent(message1, sampleInBuffer);
            tempVoice->setMidiNote(-1); // this should be removed but just in case for now
            
            break;
          }
        }
      }
    }
  }
  
  
  
  // Audio Handling of the voices
  AudioBuffer<float> outBuffer = getBusBuffer(buffer, false, 0);
  
  int numActive = 0; // eventually this could be a member variable
  for(int index = 0; index < voices.size(); index++)
  {
    if(voices[index]->getIsOn())
    {
      numActive++;
      
      voices[index]->clearAudioBuffer();
      voices[index]->fillBufferEnvelope();
      voices[index]->fillBufferAudio();
      
      outBuffer.addFrom(0, 0, voices[index]->getOutBuffer(), 0, 0, outBuffer.getNumSamples());
    }
  }
  
  outBuffer.applyGain(1.0f/numActive);
  
  
}
예제 #9
0
void MidiControllerAutomationHandler::handleParameterData(MidiBuffer &b)
{
	const bool bufferEmpty = b.isEmpty();
	const bool noCCsUsed = !anyUsed && !unlearnedData.used;

	if (bufferEmpty || noCCsUsed) return;

	tempBuffer.clear();

	MidiBuffer::Iterator mb(b);
	MidiMessage m;

	int samplePos;

	while (mb.getNextEvent(m, samplePos))
	{
		bool consumed = false;

		if (m.isController())
		{
			const int number = m.getControllerNumber();

			if (isLearningActive())
			{
				setUnlearndedMidiControlNumber(number, sendNotification);
			}

			for (auto& a : automationData[number])
			{
				if (a.used)
				{
					jassert(a.processor.get() != nullptr);

					auto normalizedValue = (double)m.getControllerValue() / 127.0;

					if (a.inverted) normalizedValue = 1.0 - normalizedValue;

					const double value = a.parameterRange.convertFrom0to1(normalizedValue);

					const float snappedValue = (float)a.parameterRange.snapToLegalValue(value);

					if (a.macroIndex != -1)
					{
						a.processor->getMainController()->getMacroManager().getMacroChain()->setMacroControl(a.macroIndex, (float)m.getControllerValue(), sendNotification);
					}
					else
					{
						if (a.lastValue != snappedValue)
						{
							a.processor->setAttribute(a.attribute, snappedValue, sendNotification);
							a.lastValue = snappedValue;
						}
					}

					consumed = true;
				}
			}
		}

		if (!consumed) tempBuffer.addEvent(m, samplePos);
	}

	b.clear();
	b.addEvents(tempBuffer, 0, -1, 0);
}