//==============================================================================
void MidiManipulator::processEvents (MidiBuffer& midiMessages, const int blockSize)
{
    MidiBuffer midiOutput;

    if (! midiMessages.isEmpty ())
    {
		int timeStamp;
		MidiMessage message (0xf4, 0.0);
	    MidiBuffer::Iterator it (midiMessages);

        if (filter)
        {
	        while (it.getNextEvent (message, timeStamp))
	        {
                if (filter->filterEvent (message))
                    midiOutput.addEvent (message, timeStamp);
	        }
	    }
	    else
	    {
	        midiOutput = midiMessages;
	    }

        midiMessages.clear ();
    }

    if (transform)
    {
        transform->processEvents (midiOutput, blockSize);
    }

    midiMessages = midiOutput;
}
void Synthesiser::renderNextBlock (AudioSampleBuffer& outputBuffer, const MidiBuffer& midiData,
                                   int startSample, int numSamples)
{
    // must set the sample rate before using this!
    jassert (sampleRate != 0);

    const ScopedLock sl (lock);

    MidiBuffer::Iterator midiIterator (midiData);
    midiIterator.setNextSamplePosition (startSample);
    MidiMessage m (0xf4, 0.0);

    while (numSamples > 0)
    {
        int midiEventPos;
        const bool useEvent = midiIterator.getNextEvent (m, midiEventPos)
                                && midiEventPos < startSample + numSamples;

        const int numThisTime = useEvent ? midiEventPos - startSample
                                         : numSamples;

        if (numThisTime > 0)
            renderVoices (outputBuffer, startSample, numThisTime);

        if (useEvent)
            handleMidiEvent (m);

        startSample += numThisTime;
        numSamples -= numThisTime;
    }
}
void DssiPluginMidiManager::convertMidiMessages (MidiBuffer& midiMessages,
                                                 const int blockSamples)
{
    const uint8* data;
    int numBytesOfMidiData,
        samplePosition;
    MidiBuffer::Iterator it (midiMessages);

    currentMidiCount = 0;

    while (it.getNextEvent (data,
                            numBytesOfMidiData,
                            samplePosition))
    {
        if (numBytesOfMidiData > maxEventSize)
        {
            maxEventSize = numBytesOfMidiData;
            snd_midi_event_free (midiParser);
            snd_midi_event_new (maxEventSize, &midiParser);
        }

        snd_seq_event_t* event = & midiEventsBuffer [currentMidiCount];
        snd_seq_ev_clear (event);

        snd_midi_event_encode (midiParser,
                               data,
                               numBytesOfMidiData,
                               event);

        if (++currentMidiCount >= 2048)
            break;
    }

    snd_midi_event_reset_encode (midiParser);
}
示例#4
0
int GenericProcessor::checkForEvents(MidiBuffer& midiMessages)
{

	if (midiMessages.getNumEvents() > 0) 
	{
			
		int m = midiMessages.getNumEvents();
		//std::cout << m << " events received by node " << getNodeId() << std::endl;

		MidiBuffer::Iterator i (midiMessages);
		MidiMessage message(0xf4);

		int samplePosition = 0;
		i.setNextSamplePosition(samplePosition);

		while (i.getNextEvent (message, samplePosition)) {
			
			uint8* dataptr = message.getRawData();

			handleEvent(*dataptr, message, samplePosition);

		}

	}

	return -1;

}
void MidiKeyboardState::processNextMidiBuffer (MidiBuffer& buffer,
                                               const int startSample,
                                               const int numSamples,
                                               const bool injectIndirectEvents)
{
    MidiBuffer::Iterator i (buffer);
    MidiMessage message;
    int time;

    const ScopedLock sl (lock);

    while (i.getNextEvent (message, time))
        processNextMidiEvent (message);

    if (injectIndirectEvents)
    {
        MidiBuffer::Iterator i2 (eventsToAdd);
        const int firstEventToAdd = eventsToAdd.getFirstEventTime();
        const double scaleFactor = numSamples / (double) (eventsToAdd.getLastEventTime() + 1 - firstEventToAdd);

        while (i2.getNextEvent (message, time))
        {
            const int pos = jlimit (0, numSamples - 1, roundToInt ((time - firstEventToAdd) * scaleFactor));
            buffer.addEvent (message, startSample + pos);
        }
    }

    eventsToAdd.clear();
}
示例#6
0
int GenericProcessor::getNumSamples(MidiBuffer& events) {

	int numRead = 0;

	if (events.getNumEvents() > 0) 
	{
			
		int m = events.getNumEvents();

		//std::cout << getName() << " received " << m << " events." << std::endl;

		MidiBuffer::Iterator i (events);
		MidiMessage message(0xf4);

		int samplePosition = -5;

		while (i.getNextEvent (message, samplePosition)) {
			
			uint8* dataptr = message.getRawData();

			if (*dataptr == BUFFER_SIZE)
			{
				numRead = message.getTimeStamp();
			}
		}
	}

	return numRead;
}
示例#7
0
void Synthesiser::processNextBlock (AudioBuffer<floatType>& outputAudio,
                                    const MidiBuffer& midiData,
                                    int startSample,
                                    int numSamples)
{
    // must set the sample rate before using this!
    jassert (sampleRate != 0);
    const int targetChannels = outputAudio.getNumChannels();

    MidiBuffer::Iterator midiIterator (midiData);
    midiIterator.setNextSamplePosition (startSample);

    bool firstEvent = true;
    int midiEventPos;
    MidiMessage m;

    const ScopedLock sl (lock);

    while (numSamples > 0)
    {
        if (! midiIterator.getNextEvent (m, midiEventPos))
        {
            if (targetChannels > 0)
                renderVoices (outputAudio, startSample, numSamples);

            return;
        }

        const int samplesToNextMidiMessage = midiEventPos - startSample;

        if (samplesToNextMidiMessage >= numSamples)
        {
            if (targetChannels > 0)
                renderVoices (outputAudio, startSample, numSamples);

            handleMidiEvent (m);
            break;
        }

        if (samplesToNextMidiMessage < ((firstEvent && ! subBlockSubdivisionIsStrict) ? 1 : minimumSubBlockSize))
        {
            handleMidiEvent (m);
            continue;
        }

        firstEvent = false;

        if (targetChannels > 0)
            renderVoices (outputAudio, startSample, samplesToNextMidiMessage);

        handleMidiEvent (m);
        startSample += samplesToNextMidiMessage;
        numSamples  -= samplesToNextMidiMessage;
    }

    while (midiIterator.getNextEvent (m, midiEventPos))
        handleMidiEvent (m);
}
示例#8
0
void MidiOutput::sendBlockOfMessagesNow (const MidiBuffer& buffer)
{
    MidiBuffer::Iterator i (buffer);
    MidiMessage message;
    int samplePosition; // Note: not actually used, so no need to initialise.

    while (i.getNextEvent (message, samplePosition))
        sendMessageNow (message);
}
示例#9
0
//==============================================================================
void MPEZoneLayout::processNextMidiBuffer (const MidiBuffer& buffer)
{
    MidiBuffer::Iterator iter (buffer);
    MidiMessage message;
    int samplePosition; // not actually used, so no need to initialise.

    while (iter.getNextEvent (message, samplePosition))
        processNextMidiEvent (message);
}
示例#10
0
bool Gsp1101::sendCabData(UserCab const& userCab, unsigned char permFlag)
{
    std::auto_ptr<MidiBuffer> midiBuffer = userCab.createOutMidiBuffer(permFlag == 0x02);

    // F0 00 00 10 00 5F 01 (procedure 73) 00 30 01 (cab index) (permanence flag) (checksum) F7
    unsigned char cabEnd[]= { 0xf0, 0x00, 0x00, 0x10, 0x00, 0x5f, 0x01, 0x73, 0x00, 0x30, 0x01, 0x00, 0x00, 0x00, 0xf7 };

    cabEnd[sizeof(cabEnd) - 4] = static_cast<unsigned char>(userCab.getSlot() - 1);
    cabEnd[sizeof(cabEnd) - 3] = permFlag;
    unsigned char checksum = 0x00;
    for (size_t i = 0; i < sizeof(cabEnd) - 3; ++i)
    {
        checksum ^= cabEnd[i + 1];
    }
    cabEnd[sizeof(cabEnd) - 2] = checksum;
    MidiMessage midiMessage (cabEnd, sizeof(cabEnd));
    midiBuffer->addEvent (midiMessage, midiBuffer->getNumEvents() + 1);

    if (openMidi())
    {
        //Logger::outputDebugString("midiBuffer size = " + String(midiBuffer->getNumEvents()));
        MidiBuffer::Iterator i (*midiBuffer);
        const uint8* data;
        int len, time, count = midiBuffer->getNumEvents();
        bool rcvOK = false;
        while (i.getNextEvent (data, len, time) && deviceReady_M)
        {
            lastMidiInput_M.setSize(0);
            midiOutput_M->sendMessageNow(MidiMessage(data, len));

            unsigned char const testOK[]       = { 0x00, 0x00, 0x10, 0x00, 0x5f, 0x01, 0x7e, 0x00, 0x7a, 0x4a, };
            unsigned char const testOKcabEnd[] = { 0x00, 0x00, 0x10, 0x00, 0x5f, 0x01, 0x7e, 0x00, 0x73, 0x43, };

            if (--count > 0)
            {
                Logger::outputDebugString("\nMidiMessage to send:" + asHex(data, 16, true) + " ...");
                rcvOK = waitForMidiInput(testOK);
            }
            else
            {
                Logger::outputDebugString("\nMidiMessage to send:" + asHex(data, sizeof(cabEnd), true));
                rcvOK = waitForMidiInput(testOKcabEnd);
            }
            if (!rcvOK)
            {
                break;
            }
        }
        deviceReady_M = true;
        return rcvOK;
    }
    else
    {
        AlertWindow::showMessageBox (AlertWindow::WarningIcon, "Warning", "MIDI Device Not Found");
        return false;
    }
}
示例#11
0
void MPESynthesiserBase::renderNextBlock (AudioBuffer<floatType>& outputAudio,
                                          const MidiBuffer& inputMidi,
                                          int startSample,
                                          int numSamples)
{
    // you must set the sample rate before using this!
    jassert (sampleRate != 0);

    MidiBuffer::Iterator midiIterator (inputMidi);
    midiIterator.setNextSamplePosition (startSample);

    bool firstEvent = true;
    int midiEventPos;
    MidiMessage m;

    const ScopedLock sl (noteStateLock);

    while (numSamples > 0)
    {
        if (! midiIterator.getNextEvent (m, midiEventPos))
        {
            renderNextSubBlock (outputAudio, startSample, numSamples);
            return;
        }

        const int samplesToNextMidiMessage = midiEventPos - startSample;

        if (samplesToNextMidiMessage >= numSamples)
        {
            renderNextSubBlock (outputAudio, startSample, numSamples);
            handleMidiEvent (m);
            break;
        }

        if (samplesToNextMidiMessage < ((firstEvent && ! subBlockSubdivisionIsStrict) ? 1 : minimumSubBlockSize))
        {
            handleMidiEvent (m);
            continue;
        }

        firstEvent = false;

        renderNextSubBlock (outputAudio, startSample, samplesToNextMidiMessage);
        handleMidiEvent (m);
        startSample += samplesToNextMidiMessage;
        numSamples  -= samplesToNextMidiMessage;
    }

    while (midiIterator.getNextEvent (m, midiEventPos))
        handleMidiEvent (m);
}
示例#12
0
void MIDIDelay::processMIDIBuffer (MidiBuffer& inputMIDIBuffer)
{
    MidiBuffer::Iterator inputMIDIBufferIterator (inputMIDIBuffer);

    MidiMessage currentMidiMessage;
    int midiMessageSamplePosition = 0;

    if (! inputMIDIBuffer.isEmpty())
    {
        inputMIDIBufferIterator.getNextEvent (currentMidiMessage, midiMessageSamplePosition);
        bool midiBufferIsNotEmpty = true;

        for (int sampleIndex = 0; sampleIndex < mBlockSize; ++sampleIndex)
        {
            // Go through every MIDI message this sample.
            while (sampleIndex == midiMessageSamplePosition
                && midiBufferIsNotEmpty)
            {
                double delayedSamplePosition = midiMessageSamplePosition + mDelayTime[sampleIndex];
                MidiMessage delayedMIDIMessage (currentMidiMessage, delayedSamplePosition);

                mDelayedMIDIBuffer.push_back (delayedMIDIMessage);

                midiBufferIsNotEmpty = inputMIDIBufferIterator.getNextEvent (currentMidiMessage, midiMessageSamplePosition);
            }
        }
    }

    if (! mDelayedMIDIBuffer.empty())
    {
        for (int index = 0; index < mDelayedMIDIBuffer.size(); ++index)
        {
            if (mDelayedMIDIBuffer[index].getTimeStamp() < mBlockSize)
            {
                mReplacementBuffer.addEvent (mDelayedMIDIBuffer[index],
                                             int (mDelayedMIDIBuffer[index].getTimeStamp()));

                mDelayedMIDIBuffer.erase (mDelayedMIDIBuffer.begin() + index);
                --index;
            }
            else if (mDelayedMIDIBuffer[index].getTimeStamp() >= mBlockSize)
            {
                double newTimeStamp = mDelayedMIDIBuffer[index].getTimeStamp() - mBlockSize;
                mDelayedMIDIBuffer[index].setTimeStamp (newTimeStamp);
            }
        }
    }

    inputMIDIBuffer.swapWith (mReplacementBuffer);
    mReplacementBuffer.clear();
}
void MidiMessageCollector::removeNextBlockOfMessages (MidiBuffer& destBuffer,
                                                      const int numSamples)
{
   #if JUCE_DEBUG
    jassert (hasCalledReset); // you need to call reset() to set the correct sample rate before using this object
   #endif

    jassert (numSamples > 0);

    auto timeNow = Time::getMillisecondCounterHiRes();
    auto msElapsed = timeNow - lastCallbackTime;

    const ScopedLock sl (midiCallbackLock);
    lastCallbackTime = timeNow;

    if (! incomingMessages.isEmpty())
    {
        int numSourceSamples = jmax (1, roundToInt (msElapsed * 0.001 * sampleRate));
        int startSample = 0;
        int scale = 1 << 16;

        const uint8* midiData;
        int numBytes, samplePosition;

        MidiBuffer::Iterator iter (incomingMessages);

        if (numSourceSamples > numSamples)
        {
            // if our list of events is longer than the buffer we're being
            // asked for, scale them down to squeeze them all in..
            const int maxBlockLengthToUse = numSamples << 5;

            if (numSourceSamples > maxBlockLengthToUse)
            {
                startSample = numSourceSamples - maxBlockLengthToUse;
                numSourceSamples = maxBlockLengthToUse;
                iter.setNextSamplePosition (startSample);
            }

            scale = (numSamples << 10) / numSourceSamples;

            while (iter.getNextEvent (midiData, numBytes, samplePosition))
            {
                samplePosition = ((samplePosition - startSample) * scale) >> 10;

                destBuffer.addEvent (midiData, numBytes,
                                     jlimit (0, numSamples - 1, samplePosition));
            }
        }
        else
        {
示例#14
0
void Synthesiser::renderNextBlock (AudioSampleBuffer& outputBuffer, const MidiBuffer& midiData,
                                   int startSample, int numSamples)
{
    // must set the sample rate before using this!
    jassert (sampleRate != 0);

    MidiBuffer::Iterator midiIterator (midiData);
    midiIterator.setNextSamplePosition (startSample);

    int midiEventPos;
    MidiMessage m;

    const ScopedLock sl (lock);

    while (numSamples > 0)
    {
        if (! midiIterator.getNextEvent (m, midiEventPos))
        {
            renderVoices (outputBuffer, startSample, numSamples);
            return;
        }

        const int samplesToNextMidiMessage = midiEventPos - startSample;

        if (samplesToNextMidiMessage >= numSamples)
        {
            renderVoices (outputBuffer, startSample, numSamples);
            handleMidiEvent (m);
            break;
        }

        if (samplesToNextMidiMessage < minimumSubBlockSize)
        {
            handleMidiEvent (m);
            continue;
        }

        renderVoices (outputBuffer, startSample, samplesToNextMidiMessage);
        handleMidiEvent (m);
        startSample += samplesToNextMidiMessage;
        numSamples  -= samplesToNextMidiMessage;
    }

    while (midiIterator.getNextEvent (m, midiEventPos))
        handleMidiEvent (m);
}
示例#15
0
void MidiOutput::sendBlockOfMessages (const MidiBuffer& buffer,
                                      const double millisecondCounterToStartAt,
                                      double samplesPerSecondForBuffer)
{
    // You've got to call startBackgroundThread() for this to actually work..
    jassert (isThreadRunning());

    // this needs to be a value in the future - RTFM for this method!
    jassert (millisecondCounterToStartAt > 0);

    const double timeScaleFactor = 1000.0 / samplesPerSecondForBuffer;

    MidiBuffer::Iterator i (buffer);

    const uint8* data;
    int len, time;

    while (i.getNextEvent (data, len, time))
    {
        const double eventTime = millisecondCounterToStartAt + timeScaleFactor * time;

        PendingMessage* const m = new PendingMessage (data, len, eventTime);

        const ScopedLock sl (lock);

        if (firstMessage == nullptr || firstMessage->message.getTimeStamp() > eventTime)
        {
            m->next = firstMessage;
            firstMessage = m;
        }
        else
        {
            PendingMessage* mm = firstMessage;

            while (mm->next != nullptr && mm->next->message.getTimeStamp() <= eventTime)
                mm = mm->next;

            m->next = mm->next;
            mm->next = m;
        }
    }

    notify();
}
示例#16
0
    //==============================================================================
    void extractRawBinaryData (const MidiBuffer& midiBuffer, const uint8* bufferToCopyTo, std::size_t maxBytes)
    {
        std::size_t pos = 0;
        MidiBuffer::Iterator iter (midiBuffer);
        MidiMessage midiMessage;
        int samplePosition; // Note: not actually used, so no need to initialise.

        while (iter.getNextEvent (midiMessage, samplePosition))
        {
            const uint8* data = midiMessage.getRawData();
            std::size_t dataSize = (std::size_t) midiMessage.getRawDataSize();

            if (pos + dataSize > maxBytes)
                return;

            std::memcpy ((void*) (bufferToCopyTo + pos), data, dataSize);
            pos += dataSize;
        }
    }
示例#17
0
void LyrebirdAudioProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
    
    // Do Midi things
    buffer.clear();
    
    int time;
    MidiMessage m;
    
    for (MidiBuffer::Iterator i (midiMessages); i.getNextEvent (m, time);)
    {
        sawGenerator->setWavelength(currentSampleRate, m.getMidiNoteInHertz(m.getNoteNumber()));
        if (m.isNoteOff())
        {
            sawGenerator->setWavelength(currentSampleRate, 0);
        }
    }
    
    
    // In case we have more outputs than inputs, this code clears any output
    // channels that didn't contain input data, (because these aren't
    // guaranteed to be empty - they may contain garbage).
    // I've added this to avoid people getting screaming feedback
    // when they first compile the plugin, but obviously you don't need to
    // this code if your algorithm already fills all the output channels.
    for (int i = getNumInputChannels(); i < getNumOutputChannels(); ++i)
        buffer.clear (i, 0, buffer.getNumSamples());


    float* leftData = buffer.getWritePointer (0);
    float* rightData = buffer.getWritePointer(1);
    for (int sample = 0; sample < buffer.getNumSamples(); sample++)
    {
        leftData[sample] = sawGenerator->getCurrentAmplitude();
        rightData[sample] = sawGenerator->getCurrentAmplitude();
        sawGenerator->incrementSaw();
    }
}
示例#18
0
void EventNode::process(AudioSampleBuffer &buffer, 
                            MidiBuffer &midiMessages,
                            int& nSamples)
{
	accumulator++;

	if (!isSource) {
		
		if (midiMessages.getNumEvents() > 0) {
			
			std::cout << "Events received by node " << getNodeId() << std::endl;

			 MidiBuffer::Iterator i (midiMessages);
			 MidiMessage message(0xf4);

			 int samplePosition;
			 i.setNextSamplePosition(samplePosition);

			 while (i.getNextEvent (message, samplePosition)) {
				
					//message.getChannel();

					//MidiMessage msgCopy = MidiMessage(message);
					int numbytes = message.getRawDataSize();
					uint8* dataptr = message.getRawData();

					

					std::cout << " Bytes received: " << numbytes << std::endl;
					std::cout << " Message timestamp = " << message.getTimeStamp() << std::endl;

					//std::cout << sizeof(int) << " " << sizeof(uint16) << std::endl;
 
 					std::cout << "   ";
					for (int n = 0; n < numbytes; n++) {
						std::cout << String(*dataptr++) << " ";
					}
					
					std::cout << std::endl << std::endl;
				 	//std::cout << "  Event on channel " << message.getRawData() << std::endl; //<< message.getRawDataSize() << std::endl;

			}


			// accumulator = 0;
		}//MidiBuffer::Iterator = midiMessages.

		//midiMessages.clear();

	} else {

		if (accumulator > 20) {

			uint8 data[95];

			for (int n = 0; n < sizeof(data); n++) {
				data[n] = 1;
			}

			//MidiMessage event = MidiMessage::noteOn(2,1,10.0f);
			MidiMessage event = MidiMessage(data, 	// spike data (float)
											sizeof(data), 	// number of bytes to use
											1000.0 	// timestamp (64-bit)
											);
			
			//event.setChannel(1);

			midiMessages.addEvent(data, sizeof(data), 5);
			//midiMessages.addEvent(event, 1);

			for (int n = 0; n < sizeof(data); n++) {
				data[n] = 2;
			}

			midiMessages.addEvent(data, sizeof(data), 10);

			for (int n = 0; n < sizeof(data); n++) {
				data[n] = 3;
			}

			midiMessages.addEvent(data, sizeof(data), 15);

			//midiMessages.addEvent(event, 5);

			//std::cout << "Midi buffer contains " << midiMessages.getNumEvents() << " events." << std::endl;

			accumulator = 0;
		}
		

	}

	
	

}
示例#19
0
void SpikeDisplayCanvas::processSpikeEvents()
{


	if (spikeBuffer->getNumEvents() > 0) 
	{
		
		//int m = spikeBuffer->getNumEvents();

		//std::cout << "Received " << m << " events." << std::endl;
			
		//std::cout << m << " events received by node " << getNodeId() << std::endl;
		MidiBuffer::Iterator i (*spikeBuffer);
		MidiMessage message(0xf4);

		int samplePosition = 0;

		i.setNextSamplePosition(samplePosition);
		
		//int eventCount = 0;
		
		while (i.getNextEvent (message, samplePosition)) {
			//eventCount++;
			 uint8_t* dataptr = message.getRawData();
			 int bufferSize = message.getRawDataSize();
			// int nSamples = (bufferSize-4)/2;

			SpikeObject newSpike;
			SpikeObject simSpike;

			unpackSpike(&newSpike, dataptr, bufferSize);

			//

			int chan = newSpike.source;

			generateSimulatedSpike(&simSpike, 0, 0);


			for (int i = 0; i < newSpike.nChannels * newSpike.nSamples; i++)
			{
                    simSpike.data[i] = newSpike.data[i%80] + 5000;// * 3 - 10000;
			}

			simSpike.nSamples = 40;

			

			// std::cout << "Received spike on electrode " << chan << std::endl;

			// std::cout << "Spike has " << newSpike.nChannels << " channels and " <<
			//              newSpike.nSamples << " samples." << std::endl;

			// std::cout << "Data: ";

			// for (int n = 0; n < newSpike.nSamples; n++)
			// {
			// 	std::cout << newSpike.data[n] << " ";
			// }

			//	std::cout << std::endl;

			plots[chan]->processSpikeObject(simSpike);

		}

	}

	spikeBuffer->clear();

}
//==============================================================================
void MidiMonitorEditor::timerCallback ()
{
    MidiBuffer tmpBuffer;
    int hours, minutes, seconds, frames;
    MidiMessage::SmpteTimecodeType timeCode;
    
    MidiMessageCollector* collector = owner->getMessageCollector ();
    collector->removeNextBlockOfMessages (tmpBuffer, 1024);
    
    if (! tmpBuffer.isEmpty())
	{
        String midiLine;

        int samplePos = 0;
        MidiMessage msg (0xf4, 0.0);
        MidiBuffer::Iterator eventIterator (tmpBuffer);

        while (eventIterator.getNextEvent (msg, samplePos))
        {
           midiLine.printf (T("[CH: %d] "), msg.getChannel());

           if (msg.isNoteOnOrOff ())
           {
                midiLine += MidiMessage::getMidiNoteName (msg.getNoteNumber(),
                                                          true, true, 0);
                midiLine += " ";
                midiLine += String ((int) msg.getVelocity ());

                if (msg.isNoteOn())
                {
                    midiLine += " ON";
                }
                else
                {
                    midiLine += " OFF";
                }
           }
           else if (msg.isAllNotesOff())
           {
                midiLine += "ALL NOTES OFF";
           }
           else if (msg.isAllSoundOff())
           {
                midiLine += "ALL SOUND OFF";
           }
           else if (msg.isPitchWheel())
           {
                midiLine += "PITCHWEEL: ";
                midiLine += String (msg.getPitchWheelValue());
           }
           else if (msg.isAftertouch())
           {
                midiLine += "AFTERTOUCH: ";
                midiLine += String (msg.getAfterTouchValue());
           }
           else if (msg.isChannelPressure())
           {
                midiLine += "CHANNELPRESSURE: ";
                midiLine += String (msg.getChannelPressureValue());
           }
           else if (msg.isSysEx())
           {
                midiLine += "SYSEX: ";
                midiLine += String (msg.getSysExDataSize());
                midiLine += " bytes";
           }
           else if (msg.isProgramChange())
           {
                midiLine += "PROGRAM CHANGE: ";
                midiLine += String (msg.getProgramChangeNumber());
                midiLine += " (";
                midiLine += MidiMessage::getGMInstrumentName (msg.getProgramChangeNumber());
                midiLine += ")";
           }
           else if (msg.isController())
           {
                midiLine += "CC: #";
                midiLine += String (msg.getControllerNumber());
                midiLine += " (";
                midiLine += MidiMessage::getControllerName (msg.getControllerNumber());
                midiLine += ") = ";
                midiLine += String (msg.getControllerValue());
           }
            else if (msg.isTimeSignatureMetaEvent ())
            {
                int newNumerator, newDenominator;
                msg.getTimeSignatureInfo (newNumerator, newDenominator);

                midiLine += "TIME SIGNATURE: ";
                midiLine += String (newNumerator);
                midiLine += " / ";
                midiLine += String (newDenominator);
            }
            else if (msg.isTempoMetaEvent ())
            {
                midiLine += "TEMPO: ";
                midiLine += String (msg.getTempoSecondsPerQuarterNote ());
                //midiLine += " ";
                //midiLine += String (msg.getTempoMetaEventTickLength (ticksPerQuarterNote));
            }
            else if (msg.isMidiMachineControlMessage())
            {
                midiLine += "MIDI CONTROL: ";
                
                switch (msg.getMidiMachineControlCommand())
                {
                    case MidiMessage::mmc_stop:             midiLine += "stop"; break;
                    case MidiMessage::mmc_play:             midiLine += "play"; break;
                    case MidiMessage::mmc_deferredplay:     midiLine += "deferredplay"; break;
                    case MidiMessage::mmc_fastforward:      midiLine += "fastforward"; break;
                    case MidiMessage::mmc_rewind:           midiLine += "rewind"; break;
                    case MidiMessage::mmc_recordStart:      midiLine += "recordStart"; break;
                    case MidiMessage::mmc_recordStop:       midiLine += "recordStop"; break;
                    case MidiMessage::mmc_pause:            midiLine += "pause"; break;
                }
            }
            else if (msg.isMidiStart ())
            {
                midiLine += "MIDI START: ";
            }
            else if (msg.isMidiContinue ())
            {
                midiLine += "MIDI CONTINUE: ";
            }
            else if (msg.isMidiStop ())
            {
                midiLine += "MIDI STOP: ";
            }
            else if (msg.isSongPositionPointer ())
            {
                midiLine += "SONG POSITION: ";
                midiLine += String (msg.getSongPositionPointerMidiBeat ());
            }
            else if (msg.isQuarterFrame ())
            {
                midiLine += "QUARTER FRAME: ";
                midiLine += String (msg.getQuarterFrameSequenceNumber ());
                midiLine += " ";
                midiLine += String (msg.getQuarterFrameValue ());
            }
            else if (msg.isFullFrame ())
            {
                midiLine += "FULL FRAME: ";

                msg.getFullFrameParameters (hours, minutes, seconds, frames, timeCode);

                midiLine += String (hours);
                midiLine += ":";
                midiLine += String (minutes);
                midiLine += ":";
                midiLine += String (seconds);
                midiLine += ":";
                midiLine += String (frames);

                midiLine += " timecode: ";
                switch (timeCode) {
                    case MidiMessage::fps24:      midiLine += "fps24"; break;
                    case MidiMessage::fps25:      midiLine += "fps25"; break;
                    case MidiMessage::fps30drop:  midiLine += "fps30drop"; break;
                    case MidiMessage::fps30:      midiLine += "fps30"; break;
                }
            }
            else if (msg.isMidiMachineControlGoto (hours, minutes, seconds, frames))
            {
                midiLine += "MIDI CONTROL GOTO: ";
                midiLine += String (hours);
                midiLine += ":";
                midiLine += String (minutes);
                midiLine += ":";
                midiLine += String (seconds);
                midiLine += ":";
                midiLine += String (frames);
            }

            midiOutputEditor->insertTextAtCursor (midiLine + T("\n"));
        }

	}

}
//==============================================================================
void MidiTransform::processEvents (MidiBuffer& midiMessages, const int blockSize)
{
    int timeStamp;
    MidiMessage message (0xf4, 0.0);
    MidiBuffer::Iterator it (midiMessages);

    MidiBuffer midiOutput;

    switch (command)
    {
    case MidiTransform::KeepEvents:
            break;
    case MidiTransform::DiscardEvents:
        {
            midiMessages.clear ();
            break;
        }
    case MidiTransform::RemapChannel:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                message.setChannel (channelNumber);
                midiOutput.addEvent (message, timeStamp);
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::ScaleNotes:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOnOrOff ())
                {
                    message.setNoteNumber (roundFloatToInt (message.getNoteNumber () * noteScale));
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::InvertNotes:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOnOrOff ())
                {
                    message.setNoteNumber (127 - message.getNoteNumber ());
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
        }
    case MidiTransform::TransposeNotes:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOnOrOff ())
                {
                    message.setNoteNumber (jmax (0, jmin (127, message.getNoteNumber () - noteTranspose)));
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::ScaleVelocity:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOn ())
                {
                    message.setVelocity ((message.getVelocity () / 127.0f) * velocityScale);
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::InvertVelocity:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOn ())
                {
                    message.setVelocity ((uint8) (127 - message.getVelocity ()));
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::TransposeVelocity:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOn ())
                {
                    message.setVelocity (jmax (0, jmin (127, message.getVelocity () - velocityTranspose)));
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::TriggerCC:
        {
            break;
        }
    case MidiTransform::TriggerNote:
        {
            break;
        }
    }
}
示例#22
0
void SpikeViewer::renderOpenGL()
{
		
	 if (eventBuffer->getNumEvents() > 0) {

	   	glRasterPos2f(0.1,0.1);

	   	//const char* str = "i";
	  // 	void* font = GLUT_BITMAP_8_BY_13;

	  // 	glutBitmapCharacter(font,54);
	   //	drawBorder();

		//std::cout << "Events received by Spike Viewer." << std::endl;

		MidiBuffer::Iterator i (*eventBuffer);
		MidiMessage message(0xf4);

		int samplePosition;
		i.setNextSamplePosition(samplePosition);

		//Array<int> peaks;


		clearWaveforms();

		while (i.getNextEvent (message, samplePosition)) {

			int numbytes = message.getRawDataSize();
			int numSamples = (numbytes-2)/2;
			uint8* dataptr = message.getRawData();

			int chan = (*dataptr<<8) + *(dataptr+1);
			int electrode = config->getSource(0)->getElectrodeNumberForChannel(chan);

			//std::cout << chan << "::" << electrode << std::endl;

 			dataptr += 2;

			//glViewport(0,0,getWidth()/2,getHeight());

			if (electrode == 0)
			{
			//for (int n = 0; n < 4; n++) {
				setViewportForWaveN(chan);
				float peak = drawWaveform(dataptr, numSamples);

				peaks.set(chan,peak*1.25);
				//peaks.set(chan,peak);
				
			}

			if (peaks.size() == 4)
			{
				drawProjections();
				peaks.clear();
			}

			//std::cout << " Bytes received: " << numbytes << std::endl;
			//std::cout << " Message timestamp = " << message.getTimeStamp() << std::endl;
			//std::cout << " Message channel: " << chan << std::endl;

 			//std::cout << "   ";
			
 			//AudioDataConverters::convertInt16BEToFloat ( dataptr, // source
    		//			spikeData, // dest
    		//			numSamples, // numSamples
    		//			2 ); // destBytesPerSample = 2

			//for (int n = 0; n < numSamples; n++) {
			//	std::cout << String(spikeData[n]) << " ";
			//}
					
			//std::cout << std::endl << std::endl;
		}

		// for (int ch = 0; ch < 4; ch++)
		// {
			
		// }

		//eventBuffer->clear();

	}

	//glOrtho(0, 0.5, 0.5, 0, 0, 1);
	glFlush();

}
示例#23
0
void MiditoOscAudioProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
    static float cv[8], shiftcv[8];
    static bool _calibMode;
    
    MidiBuffer processedMidi;
    MidiMessage m;
    int time;
    
    char oscBuffer[IP_MTU_SIZE];
    osc::OutboundPacketStream p(oscBuffer, IP_MTU_SIZE);
    
    if (calibMode) // Calibration Mode A440Hz(MIDI number 69)
    {
        p << osc::BeginBundleImmediate
        << osc::BeginMessage( "/fader1" )
        << calibMap[69] << osc::EndMessage
        << osc::BeginMessage( "/fader2" )
        << calibMap[69] << osc::EndMessage
        << osc::BeginMessage( "/fader3" )
        << calibMap[69] << osc::EndMessage
        << osc::BeginMessage( "/fader4" )
        << calibMap[69] << osc::EndMessage
        << osc::BeginMessage( "/fader5" )
        << calibMap[69] << osc::EndMessage
        << osc::BeginMessage( "/fader6" )
        << calibMap[69] << osc::EndMessage
        << osc::BeginMessage( "/fader7" )
        << calibMap[69] << osc::EndMessage
        << osc::BeginMessage( "/fader8" )
        << calibMap[69] << osc::EndMessage
        << osc::BeginMessage( "/gate1" )
        << 1 << osc::EndMessage
        << osc::BeginMessage( "/gate2" )
        << 1 << osc::EndMessage
        << osc::EndBundle;
        
        sendOSCData(p);
        
        _calibMode = true;
        
        return;
        
    } else {
        
        if (_calibMode)
        {
            p << osc::BeginBundleImmediate
            << osc::BeginMessage( "/gate1" )
            << 0 << osc::EndMessage
            << osc::BeginMessage( "/gate2" )
            << 0 << osc::EndMessage
            << osc::EndBundle;
            
            sendOSCData(p);
            
            _calibMode = false;
            
        }
    }
    
    for (MidiBuffer::Iterator i (midiMessages); i.getNextEvent (m, time);)
    {
        p.Clear();
        usleep(30);
        
        if (m.isNoteOn())
        {
            if (monoMode) // mono Mode
            {
                uint32_t midiCh = m.getChannel();
                
                if (midiCh == 0 || midiCh > 7)
                {
                    midiCh = 1;
                }
                
                cv[midiCh - 1] = calibMap[m.getNoteNumber()];
                
                switch (midiCh)
                {
                    case 1:
                        p << osc::BeginMessage("/fader1")
                        << cv[0] << osc::EndMessage;
                        break;
                        
                    case 2:
                        p << osc::BeginMessage("/fader2")
                        << cv[1] << osc::EndMessage;
                        break;
                        
                    case 3:
                        p << osc::BeginMessage("/fader3")
                        << cv[2] << osc::EndMessage;
                        break;
                        
                    case 4:
                        p << osc::BeginMessage("/fader4")
                        << cv[3] << osc::EndMessage;
                        break;
                        
                    case 5:
                        p << osc::BeginMessage("/fader5")
                        << cv[4] << osc::EndMessage;
                        break;
                        
                    case 6:
                        p << osc::BeginMessage("/fader6")
                        << cv[5] << osc::EndMessage;
                        break;
                        
                    case 7:
                        p << osc::BeginMessage("/fader7")
                        << cv[6] << osc::EndMessage;
                        break;
                        
                    case 8:
                        p << osc::BeginMessage("/fader8")
                        << cv[7] << osc::EndMessage;
                        break;
                        
                    default:
                        break;
                }
                
                sendOSCData(p);
                
            } else if (shiftMode) { // shift Mode
                
                cv[0] = calibMap[m.getNoteNumber()];
                
                for (int i = 7; i > 0; i--)
                {
                    shiftcv[i] = shiftcv[i-1];
                }
                
                p << osc::BeginBundleImmediate
                << osc::BeginMessage( "/fader1" )
                << cv[0] << osc::EndMessage
                << osc::BeginMessage( "/fader2" )
                << shiftcv[1] << osc::EndMessage
                << osc::BeginMessage( "/fader3" )
                << shiftcv[2] << osc::EndMessage
                << osc::BeginMessage( "/fader4" )
                << shiftcv[3] << osc::EndMessage
                << osc::BeginMessage( "/fader5" )
                << shiftcv[4] << osc::EndMessage
                << osc::BeginMessage( "/fader6" )
                << shiftcv[5] << osc::EndMessage
                << osc::BeginMessage( "/fader7" )
                << shiftcv[6] << osc::EndMessage
                << osc::BeginMessage( "/fader8" )
                << shiftcv[7] << osc::EndMessage
                << osc::BeginMessage( "/gate1" )
                << 1 << osc::EndMessage
                << osc::BeginMessage( "/gate2" )
                << 1 << osc::EndMessage
                << osc::EndBundle;
                
                sendOSCData(p);
                
                shiftcv[0] = cv[0];
                
            } else { // poly Mode
                
                cv[ch] = calibMap[m.getNoteNumber()];
                
                if (currentMaxPoly == 1)
                {
                    cv[1] = cv[0];
                }
                
                p << osc::BeginBundleImmediate
                << osc::BeginMessage( "/fader1" )
                << cv[0] << osc::EndMessage
                << osc::BeginMessage( "/fader2" )
                << cv[1] << osc::EndMessage
                << osc::BeginMessage( "/fader3" )
                << cv[2] << osc::EndMessage
                << osc::BeginMessage( "/fader4" )
                << cv[3] << osc::EndMessage
                << osc::BeginMessage( "/fader5" )
                << cv[4] << osc::EndMessage
                << osc::BeginMessage( "/fader6" )
                << cv[5] << osc::EndMessage
                << osc::BeginMessage( "/fader7" )
                << m.getFloatVelocity() << osc::EndMessage
                << osc::BeginMessage( "/gate1" )
                << 1 << osc::EndMessage
                << osc::BeginMessage( "/gate2" )
                << 1 << osc::EndMessage
                << osc::EndBundle;
                
                sendOSCData(p);
                
                ch++;
                gateCount++;
                
                if (ch >= currentMaxPoly)
                {
                    ch = 0;
                }
                
            }
            
        } else if (m.isNoteOff()) {
            
            if (monoMode)
            {
                switch (m.getChannel())
                {
                    case 1:
                        p << osc::BeginMessage( "/gate1" )
                        << 0 << osc::EndMessage;
                        break;
                        
                    case 2:
                        p << osc::BeginMessage( "/gate2" )
                        << 0 << osc::EndMessage;
                        break;
                        
                    case 3:
                        p << osc::BeginMessage( "/gate3" )
                        << 0 << osc::EndMessage;
                        break;
                        
                    case 4:
                        p << osc::BeginMessage( "/gate4" )
                        << 0 << osc::EndMessage;
                        break;
                        
                    default:
                        break;
                }
                
                sendOSCData(p);
                
            } else if (shiftMode) {
                
                p << osc::BeginBundleImmediate
                << osc::BeginMessage( "/gate1" )
                << 0 << osc::EndMessage
                << osc::BeginMessage( "/gate2" )
                << 0 << osc::EndMessage
                << osc::EndBundle;
                
                sendOSCData(p);
                
            } else {
                
                gateCount --;
                
                if (gateCount <= 0)
                {
                    p << osc::BeginBundleImmediate
                    << osc::BeginMessage( "/gate1" )
                    << 0 << osc::EndMessage
                    << osc::BeginMessage( "/gate2" )
                    << 0 << osc::EndMessage
                    << osc::EndBundle;
                    
                    sendOSCData(p);
                    
                    gateCount = 0;
                }
                
                ch--;
                
                if (ch == -1)
                {
                    ch = 0;
                }
                
            }
            
        } else if (m.isControllerOfType(1)) { // Modulation Wheel
            
            float modulation = m.getControllerValue();
            
            if (!monoMode && !shiftMode)
            {
                p << osc::BeginMessage("/fader8")
                << (modulation / 127) << osc::EndMessage;
                
                sendOSCData(p);
            }
            
        }
        
        processedMidi.addEvent (m, time);
    }
    
    midiMessages.swapWith (processedMidi);
    
    buffer.clear();
    
    for (int channel = 0; channel < getNumInputChannels(); ++channel)
    {
        float* channelData = 0;
    }
}
示例#24
0
void JenSx1000AudioProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
    int time;
    MidiMessage m;
    
    for (MidiBuffer::Iterator i (midiMessages); i.getNextEvent (m, time);)
    {
        if (m.isNoteOn())
        {
            int lastMidiNote = m.getNoteNumber();
            DBG("MIDI note triggered : " << lastMidiNote << "\n");
            heldNotes.insert(lastMidiNote);
        }
        else if (m.isNoteOff())
        {
            int releasedNote = m.getNoteNumber();
            DBG("MIDI note released : " << releasedNote << "\n");
            heldNotes.erase(releasedNote);
        }
        else if (m.isAftertouch())
        {
        }
        else if (m.isPitchWheel())
        {
        }
    }
    
    int highestHeldNote;
    
    if (heldNotes.empty()){
        if (currentNote > 0){
            DBG("Note released");
            ampEnvelope.release();
            vcf.release();
            currentNote = -1000;
            nextNote = -2000;
        }
    } else {
        highestHeldNote = *heldNotes.rbegin();
        if (nextNote != highestHeldNote){
            nextNote = highestHeldNote;
            noClick.start();
        }
    }
    
    
    std::vector<float*> ChannelData;
    
    for (int i = 0; i < getNumOutputChannels(); i++){
        ChannelData.push_back(buffer.getWritePointer(i));
    }
    
    for (int sample = 0; sample < buffer.getNumSamples(); ++sample){
        
        if (!heldNotes.empty() && currentNote != nextNote && noClick.fadingIn()){
            currentNote = nextNote;
            freqControl.setNote(currentNote);
            ampEnvelope.begin();
            vcf.begin();
        }
        
        float nextLFOSample = lfo.getNextSample();
        freqControl.setNextVibratoOscSample(nextLFOSample);
        vcf.setNextLFOSample(nextLFOSample);
        oscillator.setNextPWMSample(nextLFOSample);
        float nextNoClickSample = noClick.getNextSample();
        
        oscillator.updateFrequency(freqControl.getNextFrequency());
        
        float nextOscSample = oscillator.getNextSample();
        float nextNoiseSample = noise.getNextSample();
        float nextAmpSample = ampEnvelope.getNextSample();
        
        
        float nextSample = (vcf.processNextSample((nextOscSample * vcoLevel) + (nextNoiseSample * noiseLevel)) * nextAmpSample) * ampLevel *nextNoClickSample;
        
        for (float* channel : ChannelData){
            channel[sample] = nextSample;
        }
    }
}
示例#25
0
void MLPluginProcessor::convertMIDIToEvents (MidiBuffer& midiMessages, MLControlEventVector& events)
{
    int c = 0;
    int size = events.size();
    
	MidiBuffer::Iterator i (midiMessages);
    juce::MidiMessage message (0xf4, 0.0);
    MLControlEvent::EventType type = MLControlEvent::eNull;
    int chan = 0;
    int id = 0;
    int time = 0;
    float v1 = 0.f;
    float v2 = 0.f;
		
    while (i.getNextEvent(message, time)) // writes to time
	{
        chan = message.getChannel();
		if (message.isNoteOn())
		{
            type = MLControlEvent::eNoteOn;
			v1 = message.getNoteNumber();
			v2 = message.getVelocity() / 127.f;
            id = (int)v1;
		}
		else if(message.isNoteOff())
		{
            type = MLControlEvent::eNoteOff;
			v1 = message.getNoteNumber();
			v2 = message.getVelocity() / 127.f;
            id = (int)v1;
		}
		else if (message.isController())
		{
            type = MLControlEvent::eController;
			v1 = message.getControllerNumber();
			v2 = message.getControllerValue() / 127.f;
		}
		else if (message.isPitchWheel())
		{
            type = MLControlEvent::ePitchWheel;
			v1 = message.getPitchWheelValue();
		}
		else if (message.isAftertouch())
		{
            type = MLControlEvent::eNotePressure;
			v1 = message.getNoteNumber();
			v2 = message.getAfterTouchValue() / 127.f;
            id = (int)v1;
		}
		else if (message.isChannelPressure())
		{
            type = MLControlEvent::eChannelPressure;
			v1 = message.getChannelPressureValue() / 127.f;
		}
		else if (message.isSustainPedalOn())
		{			
            type = MLControlEvent::eSustainPedal;
			v1 = 1.f;
		}
		else if (message.isSustainPedalOff())
		{
            type = MLControlEvent::eSustainPedal;
			v1 = 0.f;
		}
		else if (message.isProgramChange())
		{
			int pgm = message.getProgramChangeNumber();
//debug() << "program change " << pgm << "\n";
			if(pgm == kMLPluginMIDIPrograms)	
			{
				// load most recent saved program
				returnToLatestStateLoaded();
			}
			else
			{		
				pgm = clamp(pgm, 0, kMLPluginMIDIPrograms - 1);			
				setStateFromMIDIProgram(pgm);
			}
            type = MLControlEvent::eProgramChange;
            id = chan;
            v1 = (float)pgm;
		}
        else if (!message.isMidiClock())
		// TEST
		{
			int msgSize = message.getRawDataSize();
			const uint8* msgData = message.getRawData();
			debug() << "@" << std::hex << (void*)this << ": " << msgSize << "bytes uncaught MIDI [" ;
			
			for(int b=0; b<message.getRawDataSize(); ++b)
			{
				debug() << std::hex << (unsigned int)(msgData[b]) << " ";
			}	
			debug() << std::dec << "]\n";
		}
        if(c < size - 1)
        {
            events[c++] = MLControlEvent(type, chan, id, time, v1, v2);
        }
	}
    
    // null-terminate new event list
    events[c] = kMLNullControlEvent;
}