示例#1
0
int GenericProcessor::checkForEvents(MidiBuffer& midiMessages)
{

	if (midiMessages.getNumEvents() > 0) 
	{
			
		int m = midiMessages.getNumEvents();
		//std::cout << m << " events received by node " << getNodeId() << std::endl;

		MidiBuffer::Iterator i (midiMessages);
		MidiMessage message(0xf4);

		int samplePosition = 0;
		i.setNextSamplePosition(samplePosition);

		while (i.getNextEvent (message, samplePosition)) {
			
			uint8* dataptr = message.getRawData();

			handleEvent(*dataptr, message, samplePosition);

		}

	}

	return -1;

}
void SoftSynthAudioProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
    MidiBuffer processedMidi;
    int time;
    MidiMessage m;
    
    for (MidiBuffer::Iterator i(midiMessages); i.getNextEvent(m, time);) {
        if (m.isNoteOn()) {
            m = MidiMessage::noteOn(m.getChannel(), m.getNoteNumber(), m.getVelocity());
            synth.keyPressed(m.getNoteNumber(), m.getVelocity());
        } else if (m.isNoteOff()) {
            m = MidiMessage::noteOff(m.getChannel(), m.getNoteNumber(), m.getVelocity());
            synth.keyReleased(m.getNoteNumber());
        }
        
        processedMidi.addEvent(m, time);
    }
    
    auto synthBuffer = synth.getNextBuffer(buffer.getNumSamples());
    float *leftData = buffer.getWritePointer(0);
    float *rightData = buffer.getWritePointer(1);
    for (int i = 0; i < buffer.getNumSamples(); ++i) {
        leftData[i] = synthBuffer[i];
        rightData[i] = synthBuffer[i];
    }
    
    midiMessages.swapWith(processedMidi);
}
//==============================================================================
void MidiManipulator::processEvents (MidiBuffer& midiMessages, const int blockSize)
{
    MidiBuffer midiOutput;

    if (! midiMessages.isEmpty ())
    {
		int timeStamp;
		MidiMessage message (0xf4, 0.0);
	    MidiBuffer::Iterator it (midiMessages);

        if (filter)
        {
	        while (it.getNextEvent (message, timeStamp))
	        {
                if (filter->filterEvent (message))
                    midiOutput.addEvent (message, timeStamp);
	        }
	    }
	    else
	    {
	        midiOutput = midiMessages;
	    }

        midiMessages.clear ();
    }

    if (transform)
    {
        transform->processEvents (midiOutput, blockSize);
    }

    midiMessages = midiOutput;
}
示例#4
0
// Read at most 'nbytes' bytes from our MidiBuffer and store in 'buf'. Returns the 
// actual number of bytes read. Incomplete messages (such as a note on status without 
// the data bytes) should not be returned.
int midiReadCallback(CSOUND *csound, void *userData, unsigned char *buf, int nbytes)
{
	t_csound *x = (t_csound *) csoundGetHostData(csound);
	MidiBuffer *mb = &x->cso->m_midiBuffer;
	Sequencer *seq = &x->cso->m_sequencer;
	int bytesLeft = nbytes, bytesRead = 0, msg_size = 0;
	
	while(bytesLeft)
	{
		// Add a complete midi message to Csound's midi buffer.
		msg_size = mb->DequeueCompleteMessage(&buf[bytesRead], bytesLeft);
		if(0 == msg_size) break;
		else
		{
			if(seq->Recording()) seq->AddMIDIEvent(&buf[bytesRead], msg_size, true);
			if(0xB0 == (buf[bytesRead] & 0xf0))
			{
				// Keep track of CC values in the sequencer.
				byte b = 0, chan, ctrl, val;
				chan = b & 0x0f;
				ctrl = buf[bytesRead + 1];
				val = buf[bytesRead + 2];
				seq->UpdateCtrlMatrix(chan, ctrl, val);
			}
			bytesRead += msg_size;
			bytesLeft -= msg_size;
		}
	}
	return bytesRead;
}
//------------------------------------------------------------------------------
void MidiInterceptor::processBlock(AudioSampleBuffer &buffer,
								   MidiBuffer &midiMessages)
{
	int samplePos;
	double seconds;
	MidiMessage tempMess(0xf0);
	MidiBuffer::Iterator it(midiMessages);
	const double sampleRate = getSampleRate();

	jassert(sampleRate > 0.0);

	if(midiManager)
	{
		int numMess = midiMessages.getNumEvents();

		if(numMess > 0)
			numMess = numMess;

		while(it.getNextEvent(tempMess, samplePos))
		{
			seconds = (double)(samplesSinceStart+samplePos)/sampleRate;
			midiManager->midiCcReceived(tempMess, seconds);
		}
	}

	samplesSinceStart += buffer.getNumSamples();

	midiMessages.clear();
}
示例#6
0
int GenericProcessor::getNumSamples(MidiBuffer& events) {

	int numRead = 0;

	if (events.getNumEvents() > 0) 
	{
			
		int m = events.getNumEvents();

		//std::cout << getName() << " received " << m << " events." << std::endl;

		MidiBuffer::Iterator i (events);
		MidiMessage message(0xf4);

		int samplePosition = -5;

		while (i.getNextEvent (message, samplePosition)) {
			
			uint8* dataptr = message.getRawData();

			if (*dataptr == BUFFER_SIZE)
			{
				numRead = message.getTimeStamp();
			}
		}
	}

	return numRead;
}
//==============================================================================
void BeatboxVoxAudioProcessor::processBlock(AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
	const auto totalNumInputChannels = getTotalNumInputChannels();
	const auto totalNumOutputChannels = getTotalNumOutputChannels();
	const auto sampleRate = getSampleRate();
	const auto numSamples = buffer.getNumSamples();

	//Reset the noise synth if triggered
	midiMessages.addEvent(MidiMessage::noteOff(1, noiseNoteNumber), 0);

	classifier.processAudioBuffer(buffer.getReadPointer(0), numSamples);

	
	//This is used for configuring the onset detector settings from the GUI
	if (classifier.noteOnsetDetected())
	{
		if (usingOSDTestSound.load())
		{
			triggerOSDTestSound(midiMessages);
		}
		else if (classifier.getNumBuffersDelayed() > 0)
		{
			triggerNoise(midiMessages);
		}
	}

	const auto sound = classifier.classify();

	switch (sound)
	{
		case soundLabel::KickDrum:
			triggerKickDrum(midiMessages);
			break;
		case soundLabel::SnareDrum:
			triggerSnareDrum(midiMessages);
			break;
		case soundLabel::HiHat:
			triggerHiHat(midiMessages);
			break;
		default: break;
	}


	/** Now classification complete clear the input buffer/signal. 
	 *	We only want synth response output, no a blend of input vocal
	 *	signal + synth output.
	 **/
	buffer.clear();

	if (usingOSDTestSound.load())
		osdTestSynth.renderNextBlock(buffer, midiMessages, 0, buffer.getNumSamples());
	else
		drumSynth.renderNextBlock(buffer, midiMessages, 0, buffer.getNumSamples());

	//Not outputting midi so clear after rendering synths
	midiMessages.clear();
}
示例#8
0
void MidiSelectProcessor::processBlock (AudioSampleBuffer &buffer, MidiBuffer &midiMessages)
{
    // Do nothing to buffer; just let it pass through

    MidiBuffer inputMessages(midiMessages);
    midiMessages.clear();
    
    MidiBuffer::Iterator it(inputMessages);
    while (true) {
        MidiMessage message(0xf0);
        int samplePosition;
        if (!it.getNextEvent(message, samplePosition)) {
            break;
        }
        
        if (message.isNoteOn()) {
            if (findNextNote()) {
            	// We know it's safe to add to the list if findNextNote() returns true
                transformations[message.getNoteNumber()] = currentNote;
                message.setNoteNumber(currentNote);
                midiMessages.addEvent(message, samplePosition);
            }
            else {
                // This will just get skipped, but we must make note of that to also skip
                // the upcomming note off event
                transformations[message.getNoteNumber()] = -1;
            }
        }
        
        else if (message.isNoteOff()) {
            auto transformIt = transformations.find(message.getNoteNumber());
            if (transformIt == transformations.end()) {
                // I have no recollection of this note
                continue;
            }
            
            if (transformIt->second == -1) {
                // We discarded the note on, discard the note off too
                transformations.erase(transformIt);
                continue;
            }
            
            // Okay, make the note off match the note on, then add
            message.setNoteNumber(transformIt->second);
            midiMessages.addEvent(message, samplePosition);
            transformations.erase(transformIt);
        }
        
        else {
            // We don't mess with other events (yet), so pass on through
            midiMessages.addEvent(message, samplePosition);
        }
    }
}
示例#9
0
MidiBuffer MPEMessages::setZoneLayout (const MPEZoneLayout& layout)
{
    MidiBuffer buffer;

    buffer.addEvents (clearAllZones(), 0, -1, 0);

    for (int i = 0; i < layout.getNumZones(); ++i)
        buffer.addEvents (addZone (*layout.getZoneByIndex (i)), 0, -1, 0);

    return buffer;
}
示例#10
0
MidiBuffer MPEMessages::addZone (MPEZone zone)
{
    MidiBuffer buffer (MidiRPNGenerator::generate (zone.getFirstNoteChannel(),
                                                   zoneLayoutMessagesRpnNumber,
                                                   zone.getNumNoteChannels(),
                                                   false, false));

    buffer.addEvents (perNotePitchbendRange (zone), 0, -1, 0);
    buffer.addEvents (masterPitchbendRange (zone), 0, -1, 0);

    return buffer;
}
示例#11
0
void GenericSynth::processBlock (AudioSampleBuffer& buffer,
                                 MidiBuffer& midiMessages)
{
    buffer.clear();

    if (midiMessages.getNumEvents())
    {
        DBG("events "<<midiMessages.getNumEvents());
    }

    synth.renderNextBlock (buffer, midiMessages, 0, buffer.getNumSamples());
}
示例#12
0
bool MidiBuffer::copyTo(MidiBuffer targetBuffer) const {
    if(!isValid()) {
        return false;
    }

    int size = _size < targetBuffer.size() ? _size : targetBuffer.size();
    for(int i = 0; i < size; i++) {
        ((MidiData*)targetBuffer._jackBuffer)[i] = ((MidiData*)_jackBuffer)[i];
    }

    return true;
}
示例#13
0
void MIDIDelay::processMIDIBuffer (MidiBuffer& inputMIDIBuffer)
{
    MidiBuffer::Iterator inputMIDIBufferIterator (inputMIDIBuffer);

    MidiMessage currentMidiMessage;
    int midiMessageSamplePosition = 0;

    if (! inputMIDIBuffer.isEmpty())
    {
        inputMIDIBufferIterator.getNextEvent (currentMidiMessage, midiMessageSamplePosition);
        bool midiBufferIsNotEmpty = true;

        for (int sampleIndex = 0; sampleIndex < mBlockSize; ++sampleIndex)
        {
            // Go through every MIDI message this sample.
            while (sampleIndex == midiMessageSamplePosition
                && midiBufferIsNotEmpty)
            {
                double delayedSamplePosition = midiMessageSamplePosition + mDelayTime[sampleIndex];
                MidiMessage delayedMIDIMessage (currentMidiMessage, delayedSamplePosition);

                mDelayedMIDIBuffer.push_back (delayedMIDIMessage);

                midiBufferIsNotEmpty = inputMIDIBufferIterator.getNextEvent (currentMidiMessage, midiMessageSamplePosition);
            }
        }
    }

    if (! mDelayedMIDIBuffer.empty())
    {
        for (int index = 0; index < mDelayedMIDIBuffer.size(); ++index)
        {
            if (mDelayedMIDIBuffer[index].getTimeStamp() < mBlockSize)
            {
                mReplacementBuffer.addEvent (mDelayedMIDIBuffer[index],
                                             int (mDelayedMIDIBuffer[index].getTimeStamp()));

                mDelayedMIDIBuffer.erase (mDelayedMIDIBuffer.begin() + index);
                --index;
            }
            else if (mDelayedMIDIBuffer[index].getTimeStamp() >= mBlockSize)
            {
                double newTimeStamp = mDelayedMIDIBuffer[index].getTimeStamp() - mBlockSize;
                mDelayedMIDIBuffer[index].setTimeStamp (newTimeStamp);
            }
        }
    }

    inputMIDIBuffer.swapWith (mReplacementBuffer);
    mReplacementBuffer.clear();
}
示例#14
0
文件: EventNode.cpp 项目: AGenews/GUI
void EventNode::process(AudioSampleBuffer& buffer,
                        MidiBuffer& events,
                        int& nSamples)
{
    events.clear();

    //std::cout << "Adding message." << std::endl;

    Parameter& p1 =  parameters.getReference(0);

    //std::cout << (float) p1[0] << std::endl;

    for (int i = 0; i < buffer.getNumSamples(); i++)
    {
        accumulator += 1.0f;

        if (accumulator > getSampleRate() / (float) p1[0])
        {
            std::cout << "Adding message." << std::endl;
            addEvent(events, // MidiBuffer
                     TTL,    // eventType
                     i,      // sampleNum
                     1,	     // eventID
                     1		 // eventChannel
                    );

            accumulator = 0;
        }

    }

}
示例#15
0
void GenericProcessor::addEvent(MidiBuffer& eventBuffer,
							    uint8 type,
							    int sampleNum,
							    uint8 eventId,
							    uint8 eventChannel,
							    uint8 numBytes,
							    uint8* eventData)
{
	uint8 *data = new uint8[4+numBytes];

	data[0] = type;    // event type
    data[1] = nodeId;  // processor ID automatically added
    data[2] = eventId; // event ID
    data[3] = eventChannel; // event channel
    memcpy(&data[4], eventData, numBytes);

    eventBuffer.addEvent(data, 		// spike data
                          sizeof(data), // total bytes
                          sampleNum);     // sample index

    //if (type == TTL)
    //	std::cout << "Adding event for channel " << (int) eventChannel << " with ID " << (int) eventId << std::endl;

	delete data;
}
示例#16
0
void addCtrlrMidiMessageToBuffer (MidiBuffer &bufferToAddTo, CtrlrMidiMessage &m)
{
	for (int i=0; i<m.getNumMessages(); i++)
	{
		bufferToAddTo.addEvent (m.getReference(i).m, 1);
	}
}
void MidiKeyboardState::processNextMidiBuffer (MidiBuffer& buffer,
                                               const int startSample,
                                               const int numSamples,
                                               const bool injectIndirectEvents)
{
    MidiBuffer::Iterator i (buffer);
    MidiMessage message;
    int time;

    const ScopedLock sl (lock);

    while (i.getNextEvent (message, time))
        processNextMidiEvent (message);

    if (injectIndirectEvents)
    {
        MidiBuffer::Iterator i2 (eventsToAdd);
        const int firstEventToAdd = eventsToAdd.getFirstEventTime();
        const double scaleFactor = numSamples / (double) (eventsToAdd.getLastEventTime() + 1 - firstEventToAdd);

        while (i2.getNextEvent (message, time))
        {
            const int pos = jlimit (0, numSamples - 1, roundToInt ((time - firstEventToAdd) * scaleFactor));
            buffer.addEvent (message, startSample + pos);
        }
    }

    eventsToAdd.clear();
}
示例#18
0
文件: EventNode.cpp 项目: Labmind/GUI
void EventNode::process(AudioSampleBuffer &buffer, 
                            MidiBuffer &midiMessages,
                            int& nSamples)
{
	midiMessages.clear();
    
    //std::cout << "Adding message." << std::endl;
    
    Parameter& p1 =  parameters.getReference(0);
    
    //std::cout << (float) p1[0] << std::endl;

	for (int i = 0; i < buffer.getNumSamples(); i++)
	{
		accumulator += 1.0f;

		if (accumulator > getSampleRate() / (float) p1[0])
		{
			std::cout << "Adding message." << std::endl;
			addEvent(midiMessages, TTL, i);
			accumulator = 0;
		}

	}	

}
示例#19
0
void SamplerProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
	midiBuffer.clear();
	tfMidiBuffer.clear();
	midiBuffer = midiMessages;
	tfMidiBuffer = midiMessages;
	//buffer.clear();
	//MidiBuffer incomingMidi;
	//midiCollector.removeNextBlockOfMessages(midiMessages, buffer.getNumSamples());
	if (midiCallback != nullptr){
		if (!midiMessages.isEmpty()){
			midiCallback->handleMidiBuffer(midiMessages);
			tf_selector->setMidiBuffer(tfMidiBuffer);
		}
	}
	int x = midiBuffer.getNumEvents();
	synth.renderNextBlock(buffer, midiBuffer, 0, buffer.getNumSamples());

	peak = 0.0;
	for (int i = 0; i<buffer.getNumSamples(); i++){
		if (buffer.getWritePointer(0)[i] > peak){
			peak = buffer.getWritePointer(0)[i];
		}
	}
}
void
CfpluginAudioProcessor::MidiPanic(MidiBuffer& midiMessages)
{
	for(int i = 1; i <= 16; ++i) {
		midiMessages.addEvent(MidiMessage::allNotesOff(i), 0);
		midiMessages.addEvent(MidiMessage::allSoundOff(i), 0);
	}
}
示例#21
0
void VoicerUGenInternal::sendMidiBuffer(MidiBuffer const& midiMessagesToAdd) throw()
{	
	if(midiMessagesToAdd.isEmpty() == false)
	{
		const ScopedLock sl(lock);
		midiMessages.addEvents(midiMessagesToAdd, 0, -1, 0);
	}
}
void YSE::SYNTH::implementationObject::process(YSE::SOUND_STATUS & intent) {
  // let juce take care of the midi parsing and buffer generation
  synthBuffer.clear();
  MidiBuffer incomingMidi;

  // get messages from midi files
  for (auto i = midiFiles.begin(); i != midiFiles.end(); i++) {
    (*i)->getMessages(incomingMidi);
  }

  // get messages from midi input
  midiCollector.removeNextBlockOfMessages(incomingMidi, STANDARD_BUFFERSIZE);
  
  // sync message queue
  keyboardState.processNextMidiBuffer(incomingMidi, 0, STANDARD_BUFFERSIZE, true);
  synthesizer.renderNextBlock(synthBuffer, incomingMidi, 0, STANDARD_BUFFERSIZE);

  // alter events if there's a callback function provided
  if (onNoteEvent != nullptr && !incomingMidi.isEmpty()) {
    MidiBuffer::Iterator iter(incomingMidi);
    MidiMessage m(0xf0);
    int sample;
    while (iter.getNextEvent(m, sample)) {
      if (m.isNoteOnOrOff()) {
        float pitch = m.getNoteNumber();
        float velocity = m.getFloatVelocity();
        onNoteEvent(m.isNoteOn(), &pitch, &velocity);
        m.setNoteNumber(pitch);
        m.setVelocity(velocity);
      }
    }
  }

  // now copy over the buffer generated by juce to our own format
  // TODO: is there a way to avoid this copy?
  float * out = buffer[0].getPtr();
  const float * in = synthBuffer.getReadPointer(0);

  int l = STANDARD_BUFFERSIZE;
  for (; l > 7; l -= 8, out += 8, in += 8) {
    out[0] = in[0]; out[1] = in[1]; out[2] = in[2]; out[3] = in[3];
    out[4] = in[4]; out[5] = in[5]; out[6] = in[6]; out[7] = in[7];
  }

  while (l--) *out++ = *in++;
}
示例#23
0
void MLPluginProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
 	if (mEngine.isEnabled() && !isSuspended())
	{
		unsigned samples = buffer.getNumSamples();
		
		// get current time from host.
		// should refer to the start of the current block.
		AudioPlayHead::CurrentPositionInfo newTime;
		if (getPlayHead() != 0 && getPlayHead()->getCurrentPosition (newTime))
		{
			lastPosInfo = newTime;
		}
		else
		{
			lastPosInfo.resetToDefault();
		}

		// set host phasor 
		double bpm = lastPosInfo.isPlaying ? lastPosInfo.bpm : 0.;
		double ppqPosition = lastPosInfo.ppqPosition;
		double secsPosition = lastPosInfo.timeInSeconds;
		int64 samplesPosition = lastPosInfo.timeInSamples;
		bool isPlaying = lastPosInfo.isPlaying;
		
		// TEST
		if(0)
		if(lastPosInfo.isPlaying)
		{
			debug() << "bpm:" << lastPosInfo.bpm 
			<< " ppq:" << std::setprecision(5) << ppqPosition << std::setprecision(2) 
			<< " secs:" << secsPosition << "\n";
		}
			
		// set Engine I/O.  done here each time because JUCE may change pointers on us.  possibly.
		MLDSPEngine::ClientIOMap ioMap;
		for (int i=0; i<getNumInputChannels(); ++i)
		{
			ioMap.inputs[i] = buffer.getReadPointer(i);
		}		
		for (int i=0; i<getNumOutputChannels(); ++i)
		{
			ioMap.outputs[i] = buffer.getWritePointer(i);
		}
		mEngine.setIOBuffers(ioMap);
        
        if(acceptsMidi())
        {
            convertMIDIToEvents(midiMessages, mControlEvents);
            midiMessages.clear(); // otherwise messages will be passed back to the host
        }
        mEngine.processBlock(samples, mControlEvents, samplesPosition, secsPosition, ppqPosition, bpm, isPlaying);
    }
	else
	{
		buffer.clear();
	}
}
示例#24
0
/**
* Send midi note off message into buffer at given sample position.
*/
void StepSequencer::sendMidiNoteOffMessage(MidiBuffer& midiMessages, int sample)
{
    if (currStepOnOff[static_cast<int>(params.seqLastPlayedStep.get())]->getStep() == eOnOffToggle::eOn)
    {
        MidiMessage m = MidiMessage::noteOff(1, lastPlayedNote);
        midiMessages.addEvent(m, sample);
    }
    seqNoteIsPlaying = false;
}
示例#25
0
void MidiplugAudioProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
    // flush audio outputs
    for (int i = getNumInputChannels(); i < getNumOutputChannels(); ++i)
        buffer.clear (i, 0, buffer.getNumSamples());

    midiMessages.clear();
    _midiMessages.swapWith(midiMessages);
}
示例#26
0
void MetronomeProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
    const int numSamples = buffer.getNumSamples();
    int channel;

	// convert special metronome bip and bop (0xf2 00 and 0xf2 01) into note on/off messages

	MidiBuffer metronomeMidiBuffer;
	MidiBuffer::Iterator iter(midiMessages);
	MidiMessage message;
	int pos;
	while (iter.getNextEvent(message,pos))
	{
		if (message.getRawDataSize()==2 && message.isSongPositionPointer())
		{
			char raw[4] = {0,0,0,0};

			char* data = (char*)message.getRawData();
			if (data[1]==0)
			{
				// bip
				MidiMessage b = MidiMessage::noteOn(1,80,64.0f);
				memcpy(raw,b.getRawData(),4);
			}
			else
			{
				// bop
				MidiMessage b = MidiMessage::noteOn(1,70,64.0f);
				memcpy(raw,b.getRawData(),4);
			}

			if (raw[0])
			{
				MidiMessage m(raw[0],raw[1],raw[2]);
				//printf("m %d %d %d at %d\n",m.getRawData()[0],m.getRawData()[1],m.getRawData()[2],pos);

				metronomeMidiBuffer.addEvent(m,pos);
			}
		}
	}

    // and now get the synth to process these midi events and generate its output.
    synth.renderNextBlock (buffer, metronomeMidiBuffer, 0, numSamples);
}
示例#27
0
void DemoJuceFilter::processBlock (AudioSampleBuffer& buffer,
                                   MidiBuffer& midiMessages)
{
	if (isSyncedToHost)
	{
		AudioPlayHead::CurrentPositionInfo pos;

		if (getPlayHead() != 0 && getPlayHead()->getCurrentPosition (pos))
		{
	        if (memcmp (&pos, &lastPosInfo, sizeof (pos)) != 0)
			{
	            lastPosInfo = pos;

				const int ppqPerBar		= (pos.timeSigNumerator * 4 / pos.timeSigDenominator);
				const double beats		= (fmod (pos.ppqPosition, ppqPerBar) / ppqPerBar) * pos.timeSigNumerator;
				const double position	= beats*4;
				const int beat			= (int)position;
				
				currentBpm	= (int)pos.bpm;

				if (_p != beat)
				{
					for (int x=0; x<64; x++)
					{
						if (activePatterns[x])
						{
							patterns[x]->forward(beat+1);
						}
					}

					currentBeat = currentPatternPtr->getCurrentPosition();

					if (currentBeat > 16)
						currentBeat = currentBeat - 16;

					/* process midi events to their devices */
					midiMessages.addEvents (midiManager.getVstMidiEvents(),0,-1,0);

					/* clean the buffers */
					midiManager.clear();

					sendChangeMessage (this);
				}
				
				_p = beat;
			}
		}
		else
		{
	        zeromem (&lastPosInfo, sizeof (lastPosInfo));
			lastPosInfo.timeSigNumerator = 4;
	        lastPosInfo.timeSigDenominator = 4;
			lastPosInfo.bpm = 120;
	   }	
	}
}
void Pfm2AudioProcessor::handleIncomingMidiBuffer(MidiBuffer &buffer, int numberOfSamples) {
    if (!buffer.isEmpty()) {
        MidiBuffer newBuffer;
        MidiMessage midiMessage;
        int samplePosition;
        MidiBuffer::Iterator midiIterator(buffer);
        while (midiIterator.getNextEvent(midiMessage, samplePosition)) {
            bool copyMessageInNewBuffer = true;

            if (midiMessage.isController() && midiMessage.getChannel() == currentMidiChannel) {
                switch (midiMessage.getControllerNumber()) {
                case 99:
                    currentNrpn.paramMSB = midiMessage.getControllerValue();
                    copyMessageInNewBuffer = false;
                    break;
                case 98:
                    currentNrpn.paramLSB = midiMessage.getControllerValue();
                    copyMessageInNewBuffer = false;
                    break;
                case 6:
                    currentNrpn.valueMSB = midiMessage.getControllerValue();
                    copyMessageInNewBuffer = false;
                    break;
                case 38:
                {
                    currentNrpn.valueLSB = midiMessage.getControllerValue();
                    copyMessageInNewBuffer = false;
                    int param = (int)(currentNrpn.paramMSB << 7) + currentNrpn.paramLSB;
                    int value = (int)(currentNrpn.valueMSB << 7) + currentNrpn.valueLSB;

                    const MessageManagerLock mmLock;
                    handleIncomingNrpn(param, value);
                    break;
                }
                }
            }
            if (copyMessageInNewBuffer) {
                newBuffer.addEvent(midiMessage, samplePosition);
            }
        }
        buffer.swapWith(newBuffer);
    }
}
void CtrlrPanelMIDIInputThread::handleMIDIFromHost(MidiBuffer &buffer)
{
	{
		const ScopedWriteLock sl(lock);
		/* last event time is in samples, not event numbers you dumbass */
		hostInputBuffer.addEvents (buffer, 0, buffer.getLastEventTime() + 1, 1);
	}

	notify();
}
示例#30
0
文件: CpuRam.cpp 项目: Amcut/pizmidi
void CpuRam::processBlock (AudioSampleBuffer& buffer,
                                   MidiBuffer& midiMessages)
{
    for (int i = getNumInputChannels(); i < getNumOutputChannels(); ++i)
    {
        buffer.clear (i, 0, buffer.getNumSamples());
    }

	midiMessages.clear();
}