//==============================================================================
void MidiManipulator::processEvents (MidiBuffer& midiMessages, const int blockSize)
{
    MidiBuffer midiOutput;

    if (! midiMessages.isEmpty ())
    {
		int timeStamp;
		MidiMessage message (0xf4, 0.0);
	    MidiBuffer::Iterator it (midiMessages);

        if (filter)
        {
	        while (it.getNextEvent (message, timeStamp))
	        {
                if (filter->filterEvent (message))
                    midiOutput.addEvent (message, timeStamp);
	        }
	    }
	    else
	    {
	        midiOutput = midiMessages;
	    }

        midiMessages.clear ();
    }

    if (transform)
    {
        transform->processEvents (midiOutput, blockSize);
    }

    midiMessages = midiOutput;
}
Ejemplo n.º 2
0
void EventNode::process(AudioSampleBuffer &buffer, 
                            MidiBuffer &midiMessages,
                            int& nSamples)
{
	midiMessages.clear();
    
    //std::cout << "Adding message." << std::endl;
    
    Parameter& p1 =  parameters.getReference(0);
    
    //std::cout << (float) p1[0] << std::endl;

	for (int i = 0; i < buffer.getNumSamples(); i++)
	{
		accumulator += 1.0f;

		if (accumulator > getSampleRate() / (float) p1[0])
		{
			std::cout << "Adding message." << std::endl;
			addEvent(midiMessages, TTL, i);
			accumulator = 0;
		}

	}	

}
Ejemplo n.º 3
0
void EventNode::process(AudioSampleBuffer& buffer,
                        MidiBuffer& events,
                        int& nSamples)
{
    events.clear();

    //std::cout << "Adding message." << std::endl;

    Parameter& p1 =  parameters.getReference(0);

    //std::cout << (float) p1[0] << std::endl;

    for (int i = 0; i < buffer.getNumSamples(); i++)
    {
        accumulator += 1.0f;

        if (accumulator > getSampleRate() / (float) p1[0])
        {
            std::cout << "Adding message." << std::endl;
            addEvent(events, // MidiBuffer
                     TTL,    // eventType
                     i,      // sampleNum
                     1,	     // eventID
                     1		 // eventChannel
                    );

            accumulator = 0;
        }

    }

}
Ejemplo n.º 4
0
//------------------------------------------------------------------------------
void MidiInterceptor::processBlock(AudioSampleBuffer &buffer,
								   MidiBuffer &midiMessages)
{
	int samplePos;
	double seconds;
	MidiMessage tempMess(0xf0);
	MidiBuffer::Iterator it(midiMessages);
	const double sampleRate = getSampleRate();

	jassert(sampleRate > 0.0);

	if(midiManager)
	{
		int numMess = midiMessages.getNumEvents();

		if(numMess > 0)
			numMess = numMess;

		while(it.getNextEvent(tempMess, samplePos))
		{
			seconds = (double)(samplesSinceStart+samplePos)/sampleRate;
			midiManager->midiCcReceived(tempMess, seconds);
		}
	}

	samplesSinceStart += buffer.getNumSamples();

	midiMessages.clear();
}
Ejemplo n.º 5
0
void MLPluginProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
 	if (mEngine.isEnabled() && !isSuspended())
	{
		unsigned samples = buffer.getNumSamples();
		
		// get current time from host.
		// should refer to the start of the current block.
		AudioPlayHead::CurrentPositionInfo newTime;
		if (getPlayHead() != 0 && getPlayHead()->getCurrentPosition (newTime))
		{
			lastPosInfo = newTime;
		}
		else
		{
			lastPosInfo.resetToDefault();
		}

		// set host phasor 
		double bpm = lastPosInfo.isPlaying ? lastPosInfo.bpm : 0.;
		double ppqPosition = lastPosInfo.ppqPosition;
		double secsPosition = lastPosInfo.timeInSeconds;
		int64 samplesPosition = lastPosInfo.timeInSamples;
		bool isPlaying = lastPosInfo.isPlaying;
		
		// TEST
		if(0)
		if(lastPosInfo.isPlaying)
		{
			debug() << "bpm:" << lastPosInfo.bpm 
			<< " ppq:" << std::setprecision(5) << ppqPosition << std::setprecision(2) 
			<< " secs:" << secsPosition << "\n";
		}
			
		// set Engine I/O.  done here each time because JUCE may change pointers on us.  possibly.
		MLDSPEngine::ClientIOMap ioMap;
		for (int i=0; i<getNumInputChannels(); ++i)
		{
			ioMap.inputs[i] = buffer.getReadPointer(i);
		}		
		for (int i=0; i<getNumOutputChannels(); ++i)
		{
			ioMap.outputs[i] = buffer.getWritePointer(i);
		}
		mEngine.setIOBuffers(ioMap);
        
        if(acceptsMidi())
        {
            convertMIDIToEvents(midiMessages, mControlEvents);
            midiMessages.clear(); // otherwise messages will be passed back to the host
        }
        mEngine.processBlock(samples, mControlEvents, samplesPosition, secsPosition, ppqPosition, bpm, isPlaying);
    }
	else
	{
		buffer.clear();
	}
}
Ejemplo n.º 6
0
//==============================================================================
void BeatboxVoxAudioProcessor::processBlock(AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
	const auto totalNumInputChannels = getTotalNumInputChannels();
	const auto totalNumOutputChannels = getTotalNumOutputChannels();
	const auto sampleRate = getSampleRate();
	const auto numSamples = buffer.getNumSamples();

	//Reset the noise synth if triggered
	midiMessages.addEvent(MidiMessage::noteOff(1, noiseNoteNumber), 0);

	classifier.processAudioBuffer(buffer.getReadPointer(0), numSamples);

	
	//This is used for configuring the onset detector settings from the GUI
	if (classifier.noteOnsetDetected())
	{
		if (usingOSDTestSound.load())
		{
			triggerOSDTestSound(midiMessages);
		}
		else if (classifier.getNumBuffersDelayed() > 0)
		{
			triggerNoise(midiMessages);
		}
	}

	const auto sound = classifier.classify();

	switch (sound)
	{
		case soundLabel::KickDrum:
			triggerKickDrum(midiMessages);
			break;
		case soundLabel::SnareDrum:
			triggerSnareDrum(midiMessages);
			break;
		case soundLabel::HiHat:
			triggerHiHat(midiMessages);
			break;
		default: break;
	}


	/** Now classification complete clear the input buffer/signal. 
	 *	We only want synth response output, no a blend of input vocal
	 *	signal + synth output.
	 **/
	buffer.clear();

	if (usingOSDTestSound.load())
		osdTestSynth.renderNextBlock(buffer, midiMessages, 0, buffer.getNumSamples());
	else
		drumSynth.renderNextBlock(buffer, midiMessages, 0, buffer.getNumSamples());

	//Not outputting midi so clear after rendering synths
	midiMessages.clear();
}
Ejemplo n.º 7
0
void MidiplugAudioProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
    // flush audio outputs
    for (int i = getNumInputChannels(); i < getNumOutputChannels(); ++i)
        buffer.clear (i, 0, buffer.getNumSamples());

    midiMessages.clear();
    _midiMessages.swapWith(midiMessages);
}
Ejemplo n.º 8
0
void CpuRam::processBlock (AudioSampleBuffer& buffer,
                                   MidiBuffer& midiMessages)
{
    for (int i = getNumInputChannels(); i < getNumOutputChannels(); ++i)
    {
        buffer.clear (i, 0, buffer.getNumSamples());
    }

	midiMessages.clear();
}
Ejemplo n.º 9
0
    void perform (AudioBuffer<FloatType>& buffer, MidiBuffer& midiMessages, AudioPlayHead* audioPlayHead)
    {
        auto numSamples = buffer.getNumSamples();
        auto maxSamples = renderingBuffer.getNumSamples();

        if (numSamples > maxSamples)
        {
            // being asked to render more samples than our buffers have, so slice things up...
            tempMIDI.clear();
            tempMIDI.addEvents (midiMessages, maxSamples, numSamples, -maxSamples);

            {
                AudioBuffer<FloatType> startAudio (buffer.getArrayOfWritePointers(), buffer.getNumChannels(), maxSamples);
                midiMessages.clear (maxSamples, numSamples);
                perform (startAudio, midiMessages, audioPlayHead);
            }

            AudioBuffer<FloatType> endAudio (buffer.getArrayOfWritePointers(), buffer.getNumChannels(), maxSamples, numSamples - maxSamples);
            perform (endAudio, tempMIDI, audioPlayHead);
            return;
        }

        currentAudioInputBuffer = &buffer;
        currentAudioOutputBuffer.setSize (jmax (1, buffer.getNumChannels()), numSamples);
        currentAudioOutputBuffer.clear();
        currentMidiInputBuffer = &midiMessages;
        currentMidiOutputBuffer.clear();

        {
            const Context context { renderingBuffer.getArrayOfWritePointers(), midiBuffers.begin(), audioPlayHead, numSamples };

            for (auto* op : renderOps)
                op->perform (context);
        }

        for (int i = 0; i < buffer.getNumChannels(); ++i)
            buffer.copyFrom (i, 0, currentAudioOutputBuffer, i, 0, numSamples);

        midiMessages.clear();
        midiMessages.addEvents (currentMidiOutputBuffer, 0, buffer.getNumSamples(), 0);
        currentAudioInputBuffer = nullptr;
    }
Ejemplo n.º 10
0
void MidiSelectProcessor::processBlock (AudioSampleBuffer &buffer, MidiBuffer &midiMessages)
{
    // Do nothing to buffer; just let it pass through

    MidiBuffer inputMessages(midiMessages);
    midiMessages.clear();
    
    MidiBuffer::Iterator it(inputMessages);
    while (true) {
        MidiMessage message(0xf0);
        int samplePosition;
        if (!it.getNextEvent(message, samplePosition)) {
            break;
        }
        
        if (message.isNoteOn()) {
            if (findNextNote()) {
            	// We know it's safe to add to the list if findNextNote() returns true
                transformations[message.getNoteNumber()] = currentNote;
                message.setNoteNumber(currentNote);
                midiMessages.addEvent(message, samplePosition);
            }
            else {
                // This will just get skipped, but we must make note of that to also skip
                // the upcomming note off event
                transformations[message.getNoteNumber()] = -1;
            }
        }
        
        else if (message.isNoteOff()) {
            auto transformIt = transformations.find(message.getNoteNumber());
            if (transformIt == transformations.end()) {
                // I have no recollection of this note
                continue;
            }
            
            if (transformIt->second == -1) {
                // We discarded the note on, discard the note off too
                transformations.erase(transformIt);
                continue;
            }
            
            // Okay, make the note off match the note on, then add
            message.setNoteNumber(transformIt->second);
            midiMessages.addEvent(message, samplePosition);
            transformations.erase(transformIt);
        }
        
        else {
            // We don't mess with other events (yet), so pass on through
            midiMessages.addEvent(message, samplePosition);
        }
    }
}
Ejemplo n.º 11
0
  void getNextAudioBlock (AudioSourceChannelInfo const& bufferToFill)
  {
    int const numSamples = bufferToFill.numSamples;

    // the synth always adds its output
    //bufferToFill.clearActiveBufferRegion();

    m_midi.clear ();

    if (m_active)
    {
      double const samplesPerBeat = m_sampleRate * 60 / m_tempo;

      // Adjust phase so the beat is on or after the beginning of the output
      double beat;
      if (m_phase > 0)
        beat = 1 - m_phase;
      else
        beat = 0 - m_phase;

      // Set notes in midi buffer
      for (;;beat += 1)
      {
        // Calc beat pos
        int pos = static_cast <int> (beat * samplesPerBeat);

        if (pos < numSamples)
        {
          m_midi.addEvent (MidiMessage::noteOn (1, 84, 1.f), pos);
        }
        else
        {
          break;
        }
      }
    }

    m_synth.renderNextBlock (*bufferToFill.buffer,
                            m_midi,
                            0,
                            bufferToFill.numSamples);
  }
Ejemplo n.º 12
0
void CtrlrPanelProcessor::processBlock(MidiBuffer &midiMessages, MidiBuffer &leftoverBuffer, const AudioPlayHead::CurrentPositionInfo &info)
{
	if (owner.getMidiOptionBool(panelMidiInputFromHostCompare))
	{
		owner.getMIDIInputThread().handleMIDIFromHost(midiMessages);
	}

	leftoverBuffer.clear();

    processLua(midiMessages, info);

	MidiBuffer::Iterator i(midiMessages);
	MidiMessage m;
	int time;

	while (i.getNextEvent(m,time))
	{
		_MIN("VST INPUT", m, time);
		if (owner.getMidiOptionBool(panelMidiThruH2D) == true)
		{
			if (owner.getMidiOptionBool(panelMidiThruH2DChannelize))
			{
				m.setChannel (owner.getMidiChannel(panelMidiOutputChannelDevice));
			}

			owner.sendMidi(m);
		}

		if (owner.getMidiOptionBool(panelMidiThruH2H) == true)
		{
			if (owner.getMidiOptionBool(panelMidiThruH2HChannelize))
			{
				m.setChannel (owner.getMidiChannel(panelMidiOutputChannelHost));
			}

			leftoverBuffer.addEvent (m, time);
		}
	}
}
//==============================================================================
void MidiTransform::processEvents (MidiBuffer& midiMessages, const int blockSize)
{
    int timeStamp;
    MidiMessage message (0xf4, 0.0);
    MidiBuffer::Iterator it (midiMessages);

    MidiBuffer midiOutput;

    switch (command)
    {
    case MidiTransform::KeepEvents:
            break;
    case MidiTransform::DiscardEvents:
        {
            midiMessages.clear ();
            break;
        }
    case MidiTransform::RemapChannel:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                message.setChannel (channelNumber);
                midiOutput.addEvent (message, timeStamp);
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::ScaleNotes:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOnOrOff ())
                {
                    message.setNoteNumber (roundFloatToInt (message.getNoteNumber () * noteScale));
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::InvertNotes:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOnOrOff ())
                {
                    message.setNoteNumber (127 - message.getNoteNumber ());
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
        }
    case MidiTransform::TransposeNotes:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOnOrOff ())
                {
                    message.setNoteNumber (jmax (0, jmin (127, message.getNoteNumber () - noteTranspose)));
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::ScaleVelocity:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOn ())
                {
                    message.setVelocity ((message.getVelocity () / 127.0f) * velocityScale);
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::InvertVelocity:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOn ())
                {
                    message.setVelocity ((uint8) (127 - message.getVelocity ()));
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::TransposeVelocity:
        {
            while (it.getNextEvent (message, timeStamp))
            {
                if (message.isNoteOn ())
                {
                    message.setVelocity (jmax (0, jmin (127, message.getVelocity () - velocityTranspose)));
                    midiOutput.addEvent (message, timeStamp);
                }
            }
            midiMessages = midiOutput;
            break;
        }
    case MidiTransform::TriggerCC:
        {
            break;
        }
    case MidiTransform::TriggerNote:
        {
            break;
        }
    }
}
Ejemplo n.º 14
0
void MidiOutFilter::processBlock (AudioSampleBuffer& buffer,
                                   MidiBuffer& midiMessages)
{
    for (int i = 0; i < getNumOutputChannels(); ++i)
    {
        buffer.clear (i, 0, buffer.getNumSamples());
    }

    const double SR=getSampleRate();
	const double iSR=1.0/SR;
    AudioPlayHead::CurrentPositionInfo pos;
    if (getPlayHead() != 0 && getPlayHead()->getCurrentPosition (pos))
    {
        if (memcmp (&pos, &lastPosInfo, sizeof (pos)) != 0)
        {
            if(param[kMTC]>=0.5f) {
                double frameRate=24.0;
                int mtcFrameRate=0;

                const double samplesPerPpq=60.0*SR/pos.bpm;
                const double samplesPerClock = SR/(4.0*frameRate);
                const long double seconds = (long double)(pos.ppqPosition*60.0f/pos.bpm) /*+ smpteOffset*/;
                const long double absSecs = fabs (seconds);
                const bool neg  = seconds < 0.0;

                int hours, mins, secs, frames;
                if (frameRate==29.97) {
                    int64 frameNumber = int64(absSecs*29.97);
                    frameNumber +=  18*(frameNumber/17982) + 2*(((frameNumber%17982) - 2) / 1798);

                    hours  = int((((frameNumber / 30) / 60) / 60) % 24);
                    mins   = int(((frameNumber / 30) / 60) % 60);
                    secs   = int((frameNumber / 30) % 60);
                    frames = int(frameNumber % 30);
                }
                else {
                    hours  = (int) (absSecs / (60.0 * 60.0));
                    mins   = ((int) (absSecs / 60.0)) % 60;
                    secs   = ((int) absSecs) % 60;
                    frames = (int)(int64(absSecs*frameRate) % (int)frameRate);
                }
                if (pos.isPlaying)
                {
                    double i=0.0;
                    const double clockppq = fmod(absSecs*frameRate*4.0,(long double)1.0);
                    samplesToNextMTC = (int)(samplesPerClock * (clockppq+i));
                    i+=1.0;
                    if (!wasPlaying) {
                        //this is so the song position pointer will be sent before any
                        //other data at the beginning of the song
                        MidiBuffer temp = midiMessages;
                        midiMessages.clear();

                        if (samplesToNextMTC<buffer.getNumSamples()) {
                            int mtcData;
                            switch (mtcNumber)
                            {
                            case 0: mtcData=frames&0x0f; break;
                            case 1: mtcData=(frames&0xf0)>>4; break;
                            case 2: mtcData=secs&0x0f; break;
                            case 3: mtcData=(secs&0xf0)>>4; break;
                            case 4: mtcData=mins&0x0f; break;
                            case 5: mtcData=(mins&0xf0)>>4; break;
                            case 6: mtcData=hours&0x0f; break;
                            case 7: mtcData=(hours&0x10)>>4 | mtcFrameRate; break;
                            }
                            MidiMessage midiclock(0xf1,(mtcNumber<<4)|(mtcData));
                            ++mtcNumber;
                            mtcNumber&=0x07;
                            midiMessages.addEvent(midiclock,samplesToNextMTC);
                            samplesToNextMTC = (int)(samplesPerClock * (clockppq+i));
                            i+=1.0;
                            startMTCAt=-999.0;
                            sendmtc=true;
                        }

                        midiMessages.addEvents(temp,0,buffer.getNumSamples(),0);
                    }
                    if (startMTCAt >-999.0 && (int)(samplesPerPpq*(startMTCAt-pos.ppqPosition))<buffer.getNumSamples()) {
                            samplesToNextMTC = (int)(samplesPerPpq*(startMTCAt-pos.ppqPosition));
                            int mtcData;
                            switch (mtcNumber)
                            {
                            case 0: mtcData=frames&0x0f; break;
                            case 1: mtcData=(frames&0xf0)>>4; break;
                            case 2: mtcData=secs&0x0f; break;
                            case 3: mtcData=(secs&0xf0)>>4; break;
                            case 4: mtcData=mins&0x0f; break;
                            case 5: mtcData=(mins&0xf0)>>4; break;
                            case 6: mtcData=hours&0x0f; break;
                            case 7: mtcData=(hours&0x10)>>4 | mtcFrameRate; break;
                            }
                            MidiMessage midiclock(0xf1,(mtcNumber<<4)|(mtcData));
                            ++mtcNumber;
                            mtcNumber&=0x07;
                            midiMessages.addEvent(midiclock,samplesToNextMTC);
                            samplesToNextMTC = (int)(samplesPerClock * (clockppq+i));
                            i+=1.0;
                            startMTCAt=-999.0;
                            sendmtc=true;
                    }
Ejemplo n.º 15
0
void InstanceProcessor::processBlock(AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
    for(int i = getTotalNumInputChannels(); i < getTotalNumOutputChannels(); ++i)
    {
        buffer.clear(i, 0, buffer.getNumSamples());
    }
    bool infos = false;
    
    AudioPlayHead* playhead = getPlayHead();
    if(playhead && m_patch_tie)
    {
        infos = playhead->getCurrentPosition(m_playinfos);
    }
    lock();
    {
        m_midi.clear();
        if(infos)
        {
            m_playing_list.setFloat(0, m_playinfos.isPlaying);
            m_playing_list.setFloat(1, m_playinfos.timeInSeconds);
            sendMessageAnything(m_patch_tie, s_playing, m_playing_list);
            m_measure_list.setFloat(0, m_playinfos.bpm);
            m_measure_list.setFloat(1, m_playinfos.timeSigNumerator);
            m_measure_list.setFloat(2, m_playinfos.timeSigDenominator);
            m_measure_list.setFloat(3, m_playinfos.ppqPosition);
            m_measure_list.setFloat(4, m_playinfos.ppqPositionOfLastBarStart);
            sendMessageAnything(m_patch_tie, s_measure, m_measure_list);
        }
        for(size_t i = 0; i < m_parameters.size() && m_parameters[i].isValid(); ++i)
        {
            sendMessageFloat(m_parameters[i].getTie(), m_parameters[i].getValueNonNormalized());
        }
        
        MidiMessage message;
        MidiBuffer::Iterator it(midiMessages);
        int position = midiMessages.getFirstEventTime();
        while(it.getNextEvent(message, position))
        {
            if(message.isNoteOnOrOff())
            {
                sendMidiNote(message.getChannel(), message.getNoteNumber(), message.getVelocity());
            }
            else if(message.isController())
            {
                sendMidiControlChange(message.getChannel(), message.getControllerNumber(), message.getControllerValue());
            }
            else if(message.isPitchWheel())
            {
                sendMidiPitchBend(message.getChannel(), message.getPitchWheelValue());
            }
            else if(message.isChannelPressure())
            {
                sendMidiAfterTouch(message.getChannel(), message.getChannelPressureValue());
            }
            else if(message.isAftertouch())
            {
                sendMidiPolyAfterTouch(message.getChannel(), message.getNoteNumber(), message.getAfterTouchValue());
            }
            else if(message.isProgramChange())
            {
                sendMidiProgramChange(message.getChannel(), message.getProgramChangeNumber());
            }
        }
    }
    midiMessages.clear();
    performDsp(buffer.getNumSamples(),
               getTotalNumInputChannels(), buffer.getArrayOfReadPointers(),
               getTotalNumOutputChannels(), buffer.getArrayOfWritePointers());
    midiMessages.swapWith(m_midi);
    unlock();
}
Ejemplo n.º 16
0
void RemoteGoatVstAudioProcessor::processBlock(AudioSampleBuffer& buffer, MidiBuffer& midiMessages)
{
	//if (!midiMessages.isEmpty())
	//{
	//	String trace;
	//	trace << "MIDI:";
	//	for (int i = 0; i < midiMessages.data.size(); ++i)
	//	{
	//		trace << String::formatted(" %02X", midiMessages.data[i]);
	//	}
	//	writeTrace(trace);
	//}

	// For each sample name,
	// a sorted collection of "note on" event sample positions.
	std::map<String, std::set<std::pair<int, bool>>> noteOnSets;

	MidiBuffer::Iterator it(midiMessages);
	MidiMessage midiMessage;
	int samplePosition;
	while (it.getNextEvent(midiMessage, samplePosition))
	{
		// Check note number, map to sample name.
		int note = midiMessage.getNoteNumber();
		auto itt = _noteNumberSampleNameMap.find(note);
		if (itt != _noteNumberSampleNameMap.end())
		{
			String sampleName = itt->second;

			if (midiMessage.isNoteOn())
				// Save note on sample position for sample.
				noteOnSets[sampleName].insert(std::make_pair(samplePosition, true));
			else if (midiMessage.isNoteOff())
				noteOnSets[sampleName].insert(std::make_pair(samplePosition, false));
		}
	}

	midiMessages.clear();

	buffer.clear(0, 0, buffer.getNumSamples());
	buffer.clear(1, 0, buffer.getNumSamples());

	for (auto& samplePair : _samples)
	{
		Sample& sample = samplePair.second;
		auto noteOnSetsIterator = noteOnSets.find(sample.getName());
		if (noteOnSetsIterator != noteOnSets.end())
		{
			const std::set<std::pair<int, bool>>& noteOns = noteOnSetsIterator->second;
			int offset = noteOns.begin()->first;
			sample.read(buffer, 0, offset, false);
			for (auto noteOnIterator = noteOns.begin(); noteOnIterator != noteOns.end(); ++noteOnIterator)
			{
				int noteOn = noteOnIterator->first;
				bool onOrOff = noteOnIterator->second;
				writeTrace(String() << "Triggered " << sample.getName() + " (" << (int)onOrOff << ")");
				auto nextNoteOnIterator = noteOnIterator;
				++nextNoteOnIterator;
				if (nextNoteOnIterator != noteOns.end())
				{
					int nextNoteOn = nextNoteOnIterator->first;
					int diff = nextNoteOn - noteOn;
					if (onOrOff)
						sample.read(buffer, offset, diff, true);
					else
						sample.noteOff();
					offset += diff;
				}
				else
				{
					if (onOrOff)
						sample.read(buffer, offset, buffer.getNumSamples() - offset, true);
					else
						sample.noteOff();
				}
			}
		}
		else
		{
			sample.read(buffer, 0, buffer.getNumSamples(), false);
		}
	}
}
Ejemplo n.º 17
0
void MidiControllerAutomationHandler::handleParameterData(MidiBuffer &b)
{
	const bool bufferEmpty = b.isEmpty();
	const bool noCCsUsed = !anyUsed && !unlearnedData.used;

	if (bufferEmpty || noCCsUsed) return;

	tempBuffer.clear();

	MidiBuffer::Iterator mb(b);
	MidiMessage m;

	int samplePos;

	while (mb.getNextEvent(m, samplePos))
	{
		bool consumed = false;

		if (m.isController())
		{
			const int number = m.getControllerNumber();

			if (isLearningActive())
			{
				setUnlearndedMidiControlNumber(number, sendNotification);
			}

			for (auto& a : automationData[number])
			{
				if (a.used)
				{
					jassert(a.processor.get() != nullptr);

					auto normalizedValue = (double)m.getControllerValue() / 127.0;

					if (a.inverted) normalizedValue = 1.0 - normalizedValue;

					const double value = a.parameterRange.convertFrom0to1(normalizedValue);

					const float snappedValue = (float)a.parameterRange.snapToLegalValue(value);

					if (a.macroIndex != -1)
					{
						a.processor->getMainController()->getMacroManager().getMacroChain()->setMacroControl(a.macroIndex, (float)m.getControllerValue(), sendNotification);
					}
					else
					{
						if (a.lastValue != snappedValue)
						{
							a.processor->setAttribute(a.attribute, snappedValue, sendNotification);
							a.lastValue = snappedValue;
						}
					}

					consumed = true;
				}
			}
		}

		if (!consumed) tempBuffer.addEvent(m, samplePos);
	}

	b.clear();
	b.addEvents(tempBuffer, 0, -1, 0);
}
Ejemplo n.º 18
0
void LumaPlug::processBlock (AudioSampleBuffer& buffer,
                                   MidiBuffer& midiMessages)
{
	// we don't want any midi input events
	midiMessages.clear();
	
	bool isRunning = false;
	AudioPlayHead* playHead = getPlayHead();
	if (playHead)
	{
		//printf("playhead: 0x%x\n", playHead);
		AudioPlayHead::CurrentPositionInfo posInfo;
		playHead->getCurrentPosition(posInfo);
		isRunning = posInfo.isPlaying || posInfo.isRecording;
		luma_->SetBPM(posInfo.bpm);
	}
	
	if (isRunning && !luma_->IsRunning())
	{
		std::string error;
		bool setScriptOK = luma_->SetScript(scriptText_.toUTF8(), error);
		if (!setScriptOK && getActiveEditor())
		{
			((LumaPlugEditor*)getActiveEditor())->Log(error.c_str());
		}
//		else if (getActiveEditor())
//		{
//			((LumaPlugEditor*)getActiveEditor())->Log("Play");
//		}
		luma_->Start();
	}
	else if (!isRunning && luma_->IsRunning())
	{
		luma_->Stop();
//		if (getActiveEditor())
//		{
//			((LumaPlugEditor*)getActiveEditor())->Log("Stop");
//		}
	}

    if (luma_->IsRunning())
	{
		double sampleRate = getSampleRate();
		int numSamples = buffer.getNumSamples();
		float elapsed = (float(numSamples) / float(sampleRate)) * 1000.0;
		//printf("Elapsed: %f\n", elapsed);
		vector<shared_ptr<LumaEvent> > events;
		vector<float> offsets;
		luma_->Update(elapsed, events, offsets);
		
		if (events.size() > 0)
		{
			for (unsigned int i = 0; i < events.size(); i++)
			{
				//printf("New Event.  Offset = %d, OffsetSamples = %d, Type = %d\n\n", 
				//	offsets[i], midiEvent->deltaFrames, events[i]->GetType());
				
				int eventOffset = lroundf( ( float(offsets[i]) / 1000.0 ) * sampleRate );
				
				if (events[i]->GetType() == kLumaEvent_NoteOn)
				{
					NoteOnEvent* noteOn = (NoteOnEvent*)events[i].get();
					MidiMessage msg = MidiMessage::noteOn(1, noteOn->GetPitch(), (juce::uint8)noteOn->GetVelocity());
					midiMessages.addEvent(msg, eventOffset);
				}
				else if (events[i]->GetType() == kLumaEvent_NoteOff)
				{
					NoteOffEvent* noteOff = (NoteOffEvent*)events[i].get();
					MidiMessage msg = MidiMessage::noteOff(1, noteOff->GetPitch());
					midiMessages.addEvent(msg, eventOffset);
				}
				else
				{
					fprintf(stderr, "LumaVST: Unknown event type: %d\n", events[i]->GetType());
				}
			}
			
			// clear the used luma events
			events.clear();
		}
	}
	
	/*
	Simple test of sending midi from the plugin
	
	static int count = 0;
	count += buffer.getNumSamples();
	if (count >= 20000)
	{
		//MidiMessage msg = MidiMessage::noteOff(0, 60);
		//midiMessages.addEvent(msg, 0);
	
		MidiMessage msg = MidiMessage::noteOn(1, 60, (juce::uint8)100);
		midiMessages.addEvent(msg, 0);
		
		count = 0;
	}
	*/
}