static void RT_MIDI_send_msg_to_patch(struct Patch *patch, MidiMessage message, int64_t seq_time){ if (message.isNoteOn()) RT_PATCH_play_note(patch, message.getNoteNumber(), -1, message.getVelocity() / 127.0f, 0.0f, seq_time); else if (message.isNoteOff()) RT_PATCH_stop_note(patch, message.getNoteNumber(), -1, seq_time); else if (message.isAftertouch()) RT_PATCH_change_velocity(patch, message.getNoteNumber(), -1, message.getChannelPressureValue() / 127.0f, seq_time); else { const uint8_t *raw_data = message.getRawData(); int len = message.getRawDataSize(); R_ASSERT_RETURN_IF_FALSE(len>=1 && len<=3); uint32_t msg; if (len==3) msg = MIDI_msg_pack3(raw_data[0],raw_data[1],raw_data[2]); else if (len==2) msg = MIDI_msg_pack2(raw_data[0],raw_data[1]); else if (len==1) msg = MIDI_msg_pack1(raw_data[0]); else return; RT_PATCH_send_raw_midi_message(patch, msg, seq_time); } }
void SoftSynthAudioProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages) { MidiBuffer processedMidi; int time; MidiMessage m; for (MidiBuffer::Iterator i(midiMessages); i.getNextEvent(m, time);) { if (m.isNoteOn()) { m = MidiMessage::noteOn(m.getChannel(), m.getNoteNumber(), m.getVelocity()); synth.keyPressed(m.getNoteNumber(), m.getVelocity()); } else if (m.isNoteOff()) { m = MidiMessage::noteOff(m.getChannel(), m.getNoteNumber(), m.getVelocity()); synth.keyReleased(m.getNoteNumber()); } processedMidi.addEvent(m, time); } auto synthBuffer = synth.getNextBuffer(buffer.getNumSamples()); float *leftData = buffer.getWritePointer(0); float *rightData = buffer.getWritePointer(1); for (int i = 0; i < buffer.getNumSamples(); ++i) { leftData[i] = synthBuffer[i]; rightData[i] = synthBuffer[i]; } midiMessages.swapWith(processedMidi); }
bool MidiControlAction::processIncomingMessage(const MidiMessage& message) { if (isLearning) { isLearning = false; messageTemplate = message; return true; } else if (message.isForChannel(messageTemplate.getChannel())) { if (messageTemplate.isNoteOnOrOff() && message.isNoteOnOrOff() && message.getNoteNumber() == messageTemplate.getNoteNumber()) { // toggle! if (message.isNoteOn() && message.getNoteNumber() == messageTemplate.getNoteNumber()) { getMappedComponent()->setValue(getMappedComponent()->getValue() >= 0.5f?0.f:1.f); getMappedComponent()->triggerAsyncUpdate(); } return true; } else if (messageTemplate.isController() && message.isController() && messageTemplate.getControllerNumber() == message.getControllerNumber()) { //setValue(message.getControllerValue() / 127.0f); getMappedComponent()->setValue(message.getControllerValue() / 127.0f); getMappedComponent()->triggerAsyncUpdate(); return true; } } return false; }
void Synthesiser::handleMidiEvent (const MidiMessage& m) { if (m.isNoteOn()) { noteOn (m.getChannel(), m.getNoteNumber(), m.getFloatVelocity()); } else if (m.isNoteOff()) { noteOff (m.getChannel(), m.getNoteNumber(), true); } else if (m.isAllNotesOff() || m.isAllSoundOff()) { allNotesOff (m.getChannel(), true); } else if (m.isPitchWheel()) { const int channel = m.getChannel(); const int wheelPos = m.getPitchWheelValue(); lastPitchWheelValues [channel - 1] = wheelPos; handlePitchWheel (channel, wheelPos); } else if (m.isController()) { handleController (m.getChannel(), m.getControllerNumber(), m.getControllerValue()); } }
void RubberbandAudioSource::processNextAudioBlock() { const int numRequired = m_stretcher->getSamplesRequired(); if ( numRequired > 0 ) { AudioSourceChannelInfo info; info.buffer = &m_inSampleBuffer; info.startSample = 0; info.numSamples = qMin( m_inSampleBuffer.getNumFrames(), numRequired ); m_source->getNextAudioBlock( info, m_midiBuffer ); if ( m_midiBuffer.isEmpty() ) { m_stretcher->process( m_inSampleBuffer.getArrayOfReadPointers(), info.numSamples, false ); } else { MidiBuffer::Iterator iterator( m_midiBuffer ); MidiMessage message; int startFrame = info.startSample; int numFramesLeftToGo = info.numSamples; while ( numFramesLeftToGo > 0 ) { int eventPos; const bool isEventValid = iterator.getNextEvent( message, eventPos ) && message.isNoteOn() && eventPos < info.numSamples; const int numFramesToProcess = isEventValid ? eventPos - startFrame : numFramesLeftToGo; if ( numFramesToProcess > 0 ) { for ( int chanNum = 0; chanNum < m_numChans; chanNum++ ) { m_inFloatBuffer[ chanNum ] = m_inSampleBuffer.getReadPointer( chanNum, startFrame ); } m_stretcher->process( m_inFloatBuffer, numFramesToProcess, false ); } if ( isEventValid ) { m_noteTimeRatio = m_noteTimeRatioTable.value( message.getNoteNumber(), 1.0 ); m_stretcher->setTimeRatio( m_globalTimeRatio * m_noteTimeRatio ); } startFrame += numFramesToProcess; numFramesLeftToGo -= numFramesToProcess; } } } else // numRequired == 0 { m_stretcher->process( m_inSampleBuffer.getArrayOfReadPointers(), 0, false ); } }
void MIDIMostRecentNoteInternal::handleIncomingMidiMessage (MidiInput* source, const MidiMessage& message) throw() { if(port_ != 0 && port_ != source) return; if(message.isForChannel(midiChannel_) == false) return; if(message.isNoteOn() == false) return; setNormalisedValue(message.getNoteNumber() / 127.f); }
const CtrlrMidiMessageType midiMessageToType (const MidiMessage &midiMessage) { if (midiMessage.isController()) { return (CC); } else if (midiMessage.isSysEx()) { return (SysEx); } else if (midiMessage.isNoteOn()) { return (NoteOn); } else if (midiMessage.isNoteOff()) { return (NoteOff); } else if (midiMessage.isAftertouch()) { return (Aftertouch); } else if (midiMessage.isChannelPressure()) { return (ChannelPressure); } else if (midiMessage.isProgramChange()) { return (ProgramChange); } else if (midiMessage.isPitchWheel()) { return (PitchWheel); } else if (midiMessage.isMidiClock()) { return (MidiClock); } else if (midiMessage.isMidiContinue()) { return (MidiClockContinue); } else if (midiMessage.isMidiStart()) { return (MidiClockStart); } else if (midiMessage.isMidiStop()) { return (MidiClockStop); } else if (midiMessage.isActiveSense()) { return (ActiveSense); } return (SysEx); }
void MIDIInputDevice::handleIncomingMidiMessage(MidiInput * source, const MidiMessage & message) { if (source != device) { DBG("different device"); return; } if (message.isNoteOn()) inputListeners.call(&MIDIInputListener::noteOnReceived, message.getChannel(), message.getNoteNumber(), message.getVelocity()); else if(message.isNoteOff()) inputListeners.call(&MIDIInputListener::noteOffReceived, message.getChannel(), message.getNoteNumber(), 0); //force note off to velocity 0 else if(message.isController()) inputListeners.call(&MIDIInputListener::controlChangeReceived, message.getChannel(), message.getControllerNumber(), message.getControllerValue()); else if(message.isSysEx()) inputListeners.call(&MIDIInputListener::sysExReceived, message); }
void MidiKeyboardState::processNextMidiEvent (const MidiMessage& message) { if (message.isNoteOn()) { noteOnInternal (message.getChannel(), message.getNoteNumber(), message.getFloatVelocity()); } else if (message.isNoteOff()) { noteOffInternal (message.getChannel(), message.getNoteNumber(), message.getFloatVelocity()); } else if (message.isAllNotesOff()) { for (int i = 0; i < 128; ++i) noteOffInternal (message.getChannel(), i, 0.0f); } }
static PyObject * PyMidiMessage_str(PyObject *self) { MidiMessage *m = ((PyMidiMessage*)self)->m; static char s[256]; if(m->isNoteOn()) { sprintf(s, "<NOTE ON, note: %d (%s), velocity: %d, channel: %d>", m->getNoteNumber(), m->getMidiNoteName(m->getNoteNumber(), true, true, 3), m->getVelocity(), m->getChannel()); } else if(m->isNoteOff()) { sprintf(s, "<NOTE OFF, note: %d (%s), channel: %d>", m->getNoteNumber(), m->getMidiNoteName(m->getNoteNumber(), true, true, 3), m->getChannel()); } else if(m->isProgramChange()) { sprintf(s, "<PROGRAM CHANGE: program: %d, channel: %d>", m->getProgramChangeNumber(), m->getChannel()); } else if(m->isPitchWheel()) { sprintf(s, "<PITCH WHEEL: value: %d, channel: %d>", m->getPitchWheelValue(), m->getChannel()); } else if(m->isAftertouch()) { sprintf(s, "<AFTERTOUCH: note: %d (%s) value: %d, channel: %d>", m->getNoteNumber(), m->getMidiNoteName(m->getNoteNumber(), true, true, 3), m->getAfterTouchValue(), m->getChannel()); } else if(m->isChannelPressure()) { sprintf(s, "<CHANNEL PRESSURE: pressure: %d, channel: %d>", m->getChannelPressureValue(), m->getChannel()); } else if(m->isController()) { const char *name = m->getControllerName(m->getControllerNumber()); if(strlen(name) > 0) { sprintf(s, "<CONTROLLER: %d (\"%s\"), value: %d, channel: %d>", m->getControllerNumber(), m->getControllerName(m->getControllerNumber()), m->getControllerValue(), m->getChannel()); } else { sprintf(s, "<CONTROLLER: %d, value: %d, channel: %d>", m->getControllerNumber(), m->getControllerValue(), m->getChannel()); } } else { sprintf(s, "<MidiMessage (misc type)>"); } return PK_STRING(s); }
void MidiManager::processMidiMessage(const MidiMessage& midi_message, int sample_position) { if (midi_message.isProgramChange()) { current_patch_ = midi_message.getProgramChangeNumber(); File patch = LoadSave::loadPatch(current_bank_, current_folder_, current_patch_, synth_, *gui_state_); PatchLoadedCallback* callback = new PatchLoadedCallback(listener_, patch); callback->post(); return; } if (midi_message.isNoteOn()) { engine_->noteOn(midi_message.getNoteNumber(), midi_message.getVelocity() / (mopo::MIDI_SIZE - 1.0), 0, midi_message.getChannel() - 1); } else if (midi_message.isNoteOff()) engine_->noteOff(midi_message.getNoteNumber()); else if (midi_message.isAllNotesOff()) engine_->allNotesOff(); else if (midi_message.isSustainPedalOn()) engine_->sustainOn(); else if (midi_message.isSustainPedalOff()) engine_->sustainOff(); else if (midi_message.isAftertouch()) { mopo::mopo_float note = midi_message.getNoteNumber(); mopo::mopo_float value = (1.0 * midi_message.getAfterTouchValue()) / mopo::MIDI_SIZE; engine_->setAftertouch(note, value); } else if (midi_message.isPitchWheel()) { double percent = (1.0 * midi_message.getPitchWheelValue()) / PITCH_WHEEL_RESOLUTION; double value = 2 * percent - 1.0; engine_->setPitchWheel(value, midi_message.getChannel()); } else if (midi_message.isController()) { int controller_number = midi_message.getControllerNumber(); if (controller_number == MOD_WHEEL_CONTROL_NUMBER) { double percent = (1.0 * midi_message.getControllerValue()) / MOD_WHEEL_RESOLUTION; engine_->setModWheel(percent, midi_message.getChannel()); } else if (controller_number == BANK_SELECT_NUMBER) current_bank_ = midi_message.getControllerValue(); else if (controller_number == FOLDER_SELECT_NUMBER) current_folder_ = midi_message.getControllerValue(); midiInput(midi_message.getControllerNumber(), midi_message.getControllerValue()); } }
/** This function is called whenever the Seaboard object receives some Midi data from the physical Seaboard. It sends out the relevant midi message to all of its listeners, who in return overwrite the midi message functions in order to implement the desired response behaviour.*/ void Seaboard::handleIncomingMidiMessage(MidiInput *source, const MidiMessage &message) { if (message.isNoteOn()) { listeners.call(&Seaboard::Listener::seaboardDidGetNoteOn,message); } else if (message.isNoteOff()) { listeners.call(&Seaboard::Listener::seaboardDidGetNoteOff,message); } else if (message.isAftertouch()) { listeners.call(&Seaboard::Listener::seaboardDidGetAftertouch,message); } else if (message.isPitchWheel()) { listeners.call(&Seaboard::Listener::seaboardDidGetPitchBend,message); } listeners.call(&Seaboard::Listener::seaboardDidGetMessage,message); }
void Tunefish4AudioProcessor::processEvents(MidiBuffer &midiMessages, eU32 messageOffset, eU32 frameSize) { MidiBuffer::Iterator it(midiMessages); MidiMessage midiMessage; int samplePosition; it.setNextSamplePosition(messageOffset); while (it.getNextEvent(midiMessage, samplePosition)) { if (samplePosition >= messageOffset + frameSize) break; if (midiMessage.isNoteOn()) { eU8 velocity = midiMessage.getVelocity(); eU8 note = midiMessage.getNoteNumber(); eTfInstrumentNoteOn(*tf, note, velocity); } else if (midiMessage.isNoteOff()) { eU8 note = midiMessage.getNoteNumber(); eTfInstrumentNoteOff(*tf, note); } else if (midiMessage.isAllNotesOff()) { eTfInstrumentAllNotesOff(*tf); } else if (midiMessage.isPitchWheel()) { eS32 bend_lsb = midiMessage.getRawData()[1] & 0x7f; eS32 bend_msb = midiMessage.getRawData()[2] & 0x7f; eTfInstrumentPitchBend(*tf, ((eF32(bend_msb) / 127.0f) - 0.5f) * 2.0f, ((eF32(bend_lsb) / 127.0f) - 0.5f) * 2.0f); } } }
void Synthesiser::handleMidiEvent (const MidiMessage& m) { const int channel = m.getChannel(); if (m.isNoteOn()) { noteOn (channel, m.getNoteNumber(), m.getFloatVelocity()); } else if (m.isNoteOff()) { noteOff (channel, m.getNoteNumber(), m.getFloatVelocity(), true); } else if (m.isAllNotesOff() || m.isAllSoundOff()) { allNotesOff (channel, true); } else if (m.isPitchWheel()) { const int wheelPos = m.getPitchWheelValue(); lastPitchWheelValues [channel - 1] = wheelPos; handlePitchWheel (channel, wheelPos); } else if (m.isAftertouch()) { handleAftertouch (channel, m.getNoteNumber(), m.getAfterTouchValue()); } else if (m.isChannelPressure()) { handleChannelPressure (channel, m.getChannelPressureValue()); } else if (m.isController()) { handleController (channel, m.getControllerNumber(), m.getControllerValue()); } else if (m.isProgramChange()) { handleProgramChange (channel, m.getProgramChangeNumber()); } }
String ZenMidiVisualiserComponent::getMidiMessageDescription(const MidiMessage& m) { if (m.isNoteOn()) return "Note on: " + S(m.getNoteNumber()) + " (" + MidiMessage::getMidiNoteName(m.getNoteNumber(), true, true, 3) + ") Vel: " + S(m.getVelocity()); if (m.isNoteOff()) return "Note off: " + S(m.getNoteNumber()) + " (" + MidiMessage::getMidiNoteName(m.getNoteNumber(), true, true, 3) + ")"; if (m.isProgramChange()) return "Program change (Number): " + String(m.getProgramChangeNumber()); if (m.isPitchWheel()) return "Pitch wheel: " + String(m.getPitchWheelValue()); if (m.isAftertouch()) return "After touch: " + MidiMessage::getMidiNoteName(m.getNoteNumber(), true, true, 3) + ": " + String(m.getAfterTouchValue()); if (m.isChannelPressure()) return "Channel pressure: " + String(m.getChannelPressureValue()); if (m.isAllNotesOff()) return "All notes off"; if (m.isAllSoundOff()) return "All sound off"; if (m.isMetaEvent()) return "Meta event"; if (m.isController()) { String temp = MidiMessage::getControllerName(m.getControllerNumber()); String name = "Controller [" + S(m.getControllerNumber()) + "]"; if (!temp.isEmpty()) name += " " + temp; return name + ": " + String(m.getControllerValue()); } return String::toHexString(m.getRawData(), m.getRawDataSize()); }
static String getMidiMessageDescription (const MidiMessage& m) { if (m.isNoteOn()) return "Note on " + MidiMessage::getMidiNoteName (m.getNoteNumber(), true, true, 3); if (m.isNoteOff()) return "Note off " + MidiMessage::getMidiNoteName (m.getNoteNumber(), true, true, 3); if (m.isProgramChange()) return "Program change " + String (m.getProgramChangeNumber()); if (m.isPitchWheel()) return "Pitch wheel " + String (m.getPitchWheelValue()); if (m.isAftertouch()) return "After touch " + MidiMessage::getMidiNoteName (m.getNoteNumber(), true, true, 3) + ": " + String (m.getAfterTouchValue()); if (m.isChannelPressure()) return "Channel pressure " + String (m.getChannelPressureValue()); if (m.isAllNotesOff()) return "All notes off"; if (m.isAllSoundOff()) return "All sound off"; if (m.isMetaEvent()) return "Meta event"; if (m.isController()) { String name (MidiMessage::getControllerName (m.getControllerNumber())); if (name.isEmpty()) name = "[" + String (m.getControllerNumber()) + "]"; return "Controler " + name + ": " + String (m.getControllerValue()); } return String::toHexString (m.getRawData(), m.getRawDataSize()); }
const String getName(const MidiMessage &m) { if (m.isActiveSense()) { return (" [Active Sense]"); } if (m.isAftertouch()) { return (" [Aftertouch]"); } if (m.isAllNotesOff()) { return (" [All notes off]"); } if (m.isAllSoundOff()) { return (" [All sound off]"); } if (m.isChannelPressure()) { return (" [Channel pressure]"); } if (m.isController()) { return (" [Controller]"); } if (m.isFullFrame()) { return (" [Full frame]"); } if (m.isMetaEvent()) { return (" [Meta event]"); } if (m.isMidiClock()) { return (" [MIDI Clock]"); } if (m.isMidiContinue()) { return (" [MIDI Continue]"); } if (m.isMidiStart()) { return (" [MIDI Start]"); } if (m.isMidiStop()) { return (" [MIDI Stop]"); } if (m.isNoteOff()) { return (" [Note off]"); } if (m.isNoteOn()) { return (" [Note on]"); } if (m.isPitchWheel()) { return (" [Pitch wheel]"); } if (m.isProgramChange()) { return (" [Program change]"); } if (m.isQuarterFrame()) { return (" Quarter frame]"); } if (m.isSysEx()) { return (" [System exclusive]"); } return (" [Yet unknown]"); }
int main(int argc, char* argv[]) { string printThis = ""; //string which will contain the content to be saved to output file cout << "Path and name of file: "; //String st = "C:\\Program Files (x86)\\Phase Shift\\music\\Paramore\\Paramore - Ignorance\\notes.mid"; //this file was throwing exceptions in Java version //C:\Users\Sinead\Documents\102. Paramore 01_P-I\Paramore - Ignorance // String st = "C:\\Users\\Sinead\\Documents\\102. Paramore 01_P-I\\Paramore - Ignorance\\notes.mid"; //hardcoded for now String st = "C:\\Users\\Sinead\\Documents\\(GHSH_MSL)_09-TIO\\09 - Take It Off\\notes.mid"; //hardcoded for now //String st = "C:\\Program Files (x86)\\Phase Shift\\music\\The Donnas\\09 - Take It Off\\notes.mid"; //hardcoded for now //C:\\Users\\Sinead\\Documents\\(GHSH_MSL)_09-TIO\\09 - Take It Off printThis += st.toStdString() + "\n"; cout << st << "\n"; //load notes.midi File file(st); FileInputStream fiStream(file); MidiFile midiFile; if (!midiFile.readFrom(fiStream)) { cout << "Error: Nothing Loaded"; return 1; } if (midiFile.getNumTracks() == 0) return 1; //set level of difficulty string level = "expert"; //hardcoded for now int lvl = 0; if (level == "easy") { lvl = 4; } else if (level == "medium") { lvl = 5; } else if (level == "hard") { lvl = 6; } else if (level == "expert") { lvl = 7; } else { cout << "invalid level"; return 1; } printThis += "Level: " + level + "\n"; //set instrument string selectInstru = "drums"; if (selectInstru != "guitar" && selectInstru != "bass" && selectInstru != "drums" && selectInstru != "vocals") { cout << "invalid instrument"; return 1; } printThis += "Instrument: " + selectInstru + "\n"; long ticks_per_beat = midiFile.getTimeFormat(); int tracks = midiFile.getNumTracks(); int useTrack = 0; //the index of the mid file which contains the notes for the instrument. Set in following for-loop string timeSig = ""; //the time signature of the song. Set in following for-loop vector<vector<string>> songSections; //timestamps and sections names of the song. Set in following for-loop //use this to store entries of [timestamp, song_section] which will be used to mark the verse, chorus, etc of the tab //populated in the following for-loop for (int n = 0; n < tracks; n++) { const MidiMessageSequence* seq = midiFile.getTrack(n); MidiMessageSequence::MidiEventHolder * event = seq->getEventPointer(0); //get the event 0 for each track MidiMessage m = event->message; String trackName = m.getTextFromTextMetaEvent(); //TRACK WITH INSTRUMENT NOTES if (trackName.toLowerCase().contains(String(selectInstru))) //get indexes of the tracks which contain the songs sections, and drum notes { useTrack = n; //cout << "use Track " << useTrack << "\n"; } //TIME SIGNATURE if (trackName.equalsIgnoreCase("midi_export")) //get information about the song //time signature and tempo are entries 2 and 3, where tick ==0 { for (int nEvent = 1; nEvent < seq->getNumEvents(); nEvent++) { event = seq->getEventPointer(nEvent); //get each event in track MidiMessage m = event->message; double tick = m.getTimeStamp(); if (tick == 0) { //cout << n << " " << nEvent << " " << tick << " " << m.getTextFromTextMetaEvent() << "\n"; //cout << n << " " << nEvent << " isTimeSignatureMetaEvent " << m.isTimeSignatureMetaEvent () << " getTimeSignatureInfo \n"; //<< m.getTimeSignatureInfo() << "\n"; //getTimeSignatureInfo (int &numerator, int &denominator) //cout << n << " " << nEvent << " isTempoMetaEvent " << m.isTempoMetaEvent () << " getTimeSignatureInfo \n"; //<< m.getTimeSignatureInfo() << "\n"; } } } //EVENTS else if (trackName.equalsIgnoreCase("events")) //store the song sections, and the tick values where they start { for (int nEvent = 1; nEvent < seq->getNumEvents(); nEvent++) //loop through all events for this track, in which the TextFromTextMetaEvent() are in the format: [section <song section>] //song section eg: Intro, Main Riff 1, Main Riff 2, etc { //string tick_and_event[2]; vector<string> tick_and_event; event = seq->getEventPointer(nEvent); //get each event in track MidiMessage m = event->message; //the timestamp associated which each song section double tick = m.getTimeStamp(); ostringstream strs; //convert tick to a type string, to add to the tick_and_event array strs << tick; string timestamp = strs.str(); //song section String songSection = m.getTextFromTextMetaEvent(); songSection = songSection.substring(9, songSection.length() - 1); //ie "[section Intro]" is now "Intro" tick_and_event.push_back(timestamp); tick_and_event.push_back(songSection.toStdString()); songSections.push_back(tick_and_event); //songSections.push_back(songSection.toStdString()); //cout << n << " " << nEvent << " " << timestamp << " " << songSection << "\n"; } } } if (timeSig == "") { //no time signature found. Assume 4/4 timeSig = "4/4"; } //create an ArrayList of all tick indexes we want in our tab vector<double> allTimestamps; const MidiMessageSequence* seq = midiFile.getTrack(useTrack); //this is the sequence which contains notes on/off for the selected instrument long lastTick = 0; for (int nEvent = 0; nEvent < seq->getNumEvents(); nEvent++) { MidiMessageSequence::MidiEventHolder * event = seq->getEventPointer(nEvent); //get each event in track MidiMessage message = event->message; //the timestamp associated which each song section const double timestamp = message.getTimeStamp(); if (message.isNoteOn() //just note on timestamps, since for drums, we don't have to worry for duration && find(allTimestamps.begin(), allTimestamps.end(), timestamp) == allTimestamps.end()) //if !allTimestamps.contains(timestamp) { int note = message.getNoteNumber(); int octave = note / 12; if (octave == lvl) { allTimestamps.push_back(timestamp); //cout << nEvent << " " << timestamp << " " << note << " " << octave << " == " << lvl << "\n"; } //max value appears to be 100 = E8 } } //allTimstamps is now all the unique time indexes of notes //TODO: NOT ALL "-" ARE BEING RECORDED!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! //create a 2d array, containging the timeTick and all notes played for that timeTick, for the whole song //int** masterList = new int[allTimestamps.size()][7] int lengthOfMasterList = allTimestamps.size() + 1; //plus one, to take into account for the individual drum part string **masterList = new string*[lengthOfMasterList]; for (int i = 0; i < allTimestamps.size() + 1; ++i) { masterList[i] = new string[7]; } //string firstColumn[] = {"0", "B |", "FT|", "T2|", "S |", "HH|", "C |"}; masterList[0][0] = "0"; masterList[0][1] = "B |"; masterList[0][2] = "FT|"; masterList[0][3] = "T2|"; masterList[0][4] = "S |"; masterList[0][5] = "HH|"; masterList[0][6] = "C |"; //addArrayToMaster(masterList, firstColumn, 0); for (int i = 0; i < allTimestamps.size(); i++)//loop through all saved tick times { string oneTick[] = { "-", "-", "-", "-", "-", "-", "-" }; ostringstream strs; //convert allTimestamps[i] to a type string strs << allTimestamps[i]; oneTick[0] = strs.str(); for (int nEvent = 0; nEvent < seq->getNumEvents(); nEvent++) //loop through all events in track { MidiMessageSequence::MidiEventHolder * event = seq->getEventPointer(nEvent); //get each event in track MidiMessage message = event->message; //the timestamp associated which each song section const double timestamp = message.getTimeStamp(); if (message.isNoteOn() //just note on timestamps, since for drums, we don't have to worry for duration && timestamp == allTimestamps[i]) //if it's the timestamp we're looking for { //use http://www.electronics.dit.ie/staff/tscarff/Music_technology/midi/midi_note_numbers_for_octaves.htm to find the note and octave from getNoteNumber int note = message.getNoteNumber(); int octave = note / 12; if (octave == lvl) { convertToNote(oneTick, note); } } else if (timestamp > allTimestamps[i]) //we've gone past that point in the song { nEvent += seq->getNumEvents(); //break; } } //cout << "add:" << oneTick[0] << oneTick[1] << oneTick[2] << oneTick[3] << oneTick[4] << oneTick[5] << oneTick[6]; addArrayToMaster(masterList, oneTick, i + 1); //i+1, since [0] is the names of the drums } //work with time sig long note_amount = atol(timeSig.substr(0, timeSig.find("/")).c_str()); //convert string to long long note_type = atol(timeSig.substr(timeSig.find("/") + 1).c_str()); //convert string to long. Note_type: crochet, quaver, etc //GENERATE FINAL CONTENT TO BE PRINTED //the amount of --- should be printed in reverse, ie "1---3", the "---" is determined by "3". //if time 1 is 0, 3 is ticks_per_beat, and the ticks per beat is 480, "---" is printed, then "3" //if time 1 is 0, 3 is ticks_per_beat/2, "" , "-" is printed, then "3" //if time 1 is 0, 3 is ticks_per_beat/4, "" , "" is printed, then "3" //every ticks_per_beat*note_amount there should be a bar int noteAmount = 6; //amount of notes defined (base, snare, etc). 1 is the tick time, anything more is a drum int amountOfBarsPerLine = 4; //"complete" will be the final data, which will be printed. It will be masterList, but with bars inserted in keeping with the timeSig, and with the name of the song part vector<vector<string>> completeList; //a 2D vector vector<string> tickTimesUsed; //for (int i = 0; i < lengthOfMasterList; i++) //{ // for (int j = 0; j < noteAmount; j++)//lengthOfMasterList; i++) // { // printThis += masterList[i][j]; // } // printThis += "END \n"; //} // //printThis += "\n\n"; cout << "ticks_per_beat: " << ticks_per_beat << "\n"; cout << "note_amount:" << note_amount << "\n"; cout << "#ticks between notes: " << (ticks_per_beat/note_amount) << "\n"; cout << "note_type: " << note_type << "\n"; cout << "(note_amount*note_type): " << (note_amount*note_type) << "\n"; for (int j = 1; j <= noteAmount; j++) //crash at the top, bass at the bottom, start at index 1 because index 0 contains the timestamp { //TODO fix error where events are in margin int listIndex = 0; //the index we are at in "line" long bar_index = 0; //the index we are in a certain bar. Ie, the first "-" after a bar is at index one int barCount = 0; //the amount of bars, eg "|-------|-------|----" is 3 bars, barCount = 3 vector<string> line; //this will containing bars "|", gaps between notes "-", and the markers for a note "X" or "O" vector<string> eventLine; //this will be the line containing only spaces and song section/event names string start = ""; //this will contain "HH|", "B |", etc, depending on the value of j for (int i = 0; i < lengthOfMasterList; i++)//loop through all saved tick times, for this drum { if (i > 1) //the symbols for the drum kit, and the very first note should be printed without anything else in front of them { long currentNoteTick = atol(masterList[i][0].c_str()); //the tick belonging to the current note long previousNoteTick = atol(masterList[i - 1][0].c_str()); //the tick belonging to the previous note long diff = currentNoteTick - previousNoteTick; while (diff > (ticks_per_beat / note_amount) + 5) //+5, to allow for some time differences { //NOTE line.push_back("-"); bar_index++;//update bar_index to reflect adding the "-" diff -= (ticks_per_beat / note_amount); //seems to be 17 for first bar, 16 for the rest if (j == 1) //EVENT { eventLine.push_back(" "); //have to add an additional gap to eventLine, to keep it the same length as line } if (bar_index == (note_amount*note_type)) //every (note_amount*note_type)+1 character should be a bar line { line.push_back("|"); if (j == 1) //EVENT { eventLine.push_back(" "); //have to add an additional gap to eventLine, to keep it the same length as line } bar_index = 0; //reset bar_index, as we are now in a new bar barCount++; if (barCount == amountOfBarsPerLine) //we have the amount of bars we want in a line. Now move onto a new line { if (j == 1) //EVENT { //NOTE //we want to start new line completeList.insert(completeList.begin() + listIndex, line); //insert the vector "line" at index "listIndex" listIndex++; line.clear(); line.push_back(start); //always have which drum it is, at the start of the line barCount = 0; //reset barCount for the current line //we want to start new line completeList.insert(completeList.begin() + listIndex, eventLine); //insert the vector "eventLine" at index "listIndex" listIndex++; eventLine.clear(); eventLine.push_back(" "); //2 gaps } else { //NOTE //we want to start new line completeList.insert(completeList.begin() + listIndex, line); //insert the vector "line" at index "listIndex" listIndex += j + 1;// + num; //this orders the notes line.clear(); line.push_back(start); //always have which drum it is, at the start of the line barCount = 0; //reset barCount for the current line } } } } if (j == 1) //EVENT { string curTick = masterList[i][0]; string s = getEventAtTick(songSections, curTick); eventLine.push_back(s); tickTimesUsed.push_back(curTick); } } else if (i == 1) //check to see where abouts in the bar the first note should be { long currentNoteTick = atol(masterList[i][0].c_str()); //the tick belonging to the current note long gapBeforeFirst = currentNoteTick % (ticks_per_beat*note_amount); while (gapBeforeFirst > 0) { if (j == 1)// && !tickTimesUsed.contains(""+currentNoteTick)) //EVENT { string curTick = masterList[i][0]; string s = getEventAtTick(songSections, curTick); eventLine.push_back(s); tickTimesUsed.push_back(curTick); } //NOTE line.push_back("-"); bar_index++;//update bar_index to reflect adding the "-" gapBeforeFirst -= (ticks_per_beat / note_amount); } } else if (i == 0)//the very first index of an array for a note, ie "B |", "HH|", etc { start += masterList[i][j]; // "B |", "HH|", etc bar_index--; //printing out the first "|" will make bar_index = 1, when we want it to be 0 } string curTick = masterList[i][0]; if (j == 1 //EVENT && find(tickTimesUsed.begin(), tickTimesUsed.end(), curTick) == tickTimesUsed.end()) //if !allTimestamps.contains(timestamp) { string s = getEventAtTick(songSections, curTick); eventLine.push_back(s); tickTimesUsed.push_back(curTick); } //NOTE line.push_back(masterList[i][j]); bar_index++; //update bar_index to reflect adding the note //if adding the note has ended the bar if (bar_index == (note_amount*note_type)) //every (note_amount*note_type)+1 character should be a bar line { line.push_back("|"); if (j == 1) //EVENT { eventLine.push_back(" "); //have to add an additional gap to eventLine, to keep it the same length as line } bar_index = 0; //reset bar_index, as we are now in a new bar barCount++; //TODO: why is this the same as a section of code above? if (barCount == amountOfBarsPerLine) //a new line { if (j == 1) //EVENT { //NOTE //we want to start new line completeList.insert(completeList.begin() + listIndex, line); //insert the vector "line" at index "listIndex" listIndex++; line.clear(); line.push_back(start); //always have which drum it is, at the start of the line barCount = 0; //reset barCount for the current line //we want to start new line completeList.insert(completeList.begin() + listIndex, eventLine); //insert the vector "eventLine" at index "listIndex" listIndex++; eventLine.clear(); eventLine.push_back(" "); //2 gaps } else { //NOTE //we want to start new line completeList.insert(completeList.begin() + listIndex, line); //insert the vector "line" at index "listIndex" listIndex += j + 1;// + num; //this orders the notes line.clear(); line.push_back(start); //always have which drum it is, at the start of the line barCount = 0; //reset barCount for the current line } } } if (i == lengthOfMasterList - 1) //the very last index of an array for a note. Could be a note, or a "-" { //cout << "true"; //we want to add this bar to the arrayList, because it is the end, regardless if it's a full bar completeList.insert(completeList.begin() + listIndex, line); //insert the vector "line" at index "listIndex" listIndex += j; //this orders the notes line.clear(); line.push_back(start); //always have which drum it is, at the start of the line barCount = 0; //reset barCount for the current line } } } vector<vector<string>>::iterator it1; vector<string>::iterator it2; int i = 0; for (it1 = completeList.begin(); it1 != completeList.end(); ++it1) { if (i % (noteAmount + 1) == 0)//a new section. Add a gap to make it easier to read. Plus 1, for event line { //cout << "\n"; //print a new line printThis += "\n"; } string line = ""; //reset line for (it2 = (*it1).begin(); it2 != (*it1).end(); ++it2) //create a whole line to print { line += (*it2); //a single character } std::tr1::regex rx(".*[a-z]+.*"); //the line contains a note bool containsNote = regex_match(line.begin() + 3, line.end(), rx); line.substr(3); if (line.find("O") != string::npos || line.find("X") != string::npos || containsNote) { //cout << line << "\n"; printThis += line + "\n"; } i++; } ofstream myfile; myfile.open("C:\\Users\\Sinead\\Desktop\\example.txt"); myfile << printThis; myfile.close(); for (int i = 0; i < lengthOfMasterList; ++i) { delete[] masterList[i]; } delete[] masterList; cout << "end\n"; char c; cin >> c; return 0; }
void SynthAudioProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages) { const int totalNumInputChannels = getTotalNumInputChannels(); const int totalNumOutputChannels = getTotalNumOutputChannels(); // In case we have more outputs than inputs, this code clears any output // channels that didn't contain input data, (because these aren't // guaranteed to be empty - they may contain garbage). // This is here to avoid people getting screaming feedback // when they first compile a plugin, but obviously you don't need to keep // this code if your algorithm always overwrites all the output channels. MidiBuffer Midi; int time; MidiMessage m; for(MidiBuffer::Iterator i(midiMessages); i.getNextEvent(m, time);){ //handle monophonic on/off of notes if(m.isNoteOn()){ noteOn++; } if(m.isNoteOff()){ noteOn--; } if(noteOn > 0){ monoNoteOn = 1.0f; env.reset(); //handle the pitch of the note noteVal = m.getNoteNumber(); osc.setF(m.getMidiNoteInHertz(noteVal)); }else{ monoNoteOn = 0.0f; } } for (int i = totalNumInputChannels; i < totalNumOutputChannels; ++i) buffer.clear (i, 0, buffer.getNumSamples()); for (int channel = 0; channel < totalNumOutputChannels; ++channel){ //just do the synth stuff on one channel. if(channel == 0){ for(int sample = 0; sample < buffer.getNumSamples(); ++sample){ //do this stuff here. it's terribly inefficient.. freqValScaled = 20000.0f * pow(freqP->get(), 3.0f); envValScaled = 10000.0f * pow(envP->get(), 3.0f); speedValScaled = pow((1.0f - speedP->get()), 2.0f); oscValScaled = (oscP->get() - 0.5f) * 70.0f; detValScaled = (detP->get() - 0.5f) * 24.0f; filter.setFc(freqSmoothing.process(freqValScaled + (envValScaled * pow(env.process(),3.0f))) / UPSAMPLING); env.setSpeed(speedValScaled); filter.setQ(qP->get()); float frequency = noteVal + 24.0f + oscValScaled + modOsc.process(0) + (driftSmoothing.process(random.nextFloat() - 0.5f) * 20.0f); float frequency2 = exp((frequency + detValScaled + (driftSmoothing2.process(random.nextFloat() - 0.5f) * 10.0f)) / 17.31f) / UPSAMPLING; frequency = exp(frequency / 17.31f) / UPSAMPLING; osc.setF(frequency); osc2.setF(frequency2); float monoNoteOn2 = ampSmoothing.process(monoNoteOn); float data; for(int i = 0; i < UPSAMPLING; i++){ data = 20.0f * filter.process(0.1f * osc.process() + ampP->get() * 0.1f * osc2.process()); } data *= monoNoteOn2; buffer.setSample(0, sample, data); buffer.setSample(1, sample, data); } } } }
void JenSx1000AudioProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages) { int time; MidiMessage m; for (MidiBuffer::Iterator i (midiMessages); i.getNextEvent (m, time);) { if (m.isNoteOn()) { int lastMidiNote = m.getNoteNumber(); DBG("MIDI note triggered : " << lastMidiNote << "\n"); heldNotes.insert(lastMidiNote); } else if (m.isNoteOff()) { int releasedNote = m.getNoteNumber(); DBG("MIDI note released : " << releasedNote << "\n"); heldNotes.erase(releasedNote); } else if (m.isAftertouch()) { } else if (m.isPitchWheel()) { } } int highestHeldNote; if (heldNotes.empty()){ if (currentNote > 0){ DBG("Note released"); ampEnvelope.release(); vcf.release(); currentNote = -1000; nextNote = -2000; } } else { highestHeldNote = *heldNotes.rbegin(); if (nextNote != highestHeldNote){ nextNote = highestHeldNote; noClick.start(); } } std::vector<float*> ChannelData; for (int i = 0; i < getNumOutputChannels(); i++){ ChannelData.push_back(buffer.getWritePointer(i)); } for (int sample = 0; sample < buffer.getNumSamples(); ++sample){ if (!heldNotes.empty() && currentNote != nextNote && noClick.fadingIn()){ currentNote = nextNote; freqControl.setNote(currentNote); ampEnvelope.begin(); vcf.begin(); } float nextLFOSample = lfo.getNextSample(); freqControl.setNextVibratoOscSample(nextLFOSample); vcf.setNextLFOSample(nextLFOSample); oscillator.setNextPWMSample(nextLFOSample); float nextNoClickSample = noClick.getNextSample(); oscillator.updateFrequency(freqControl.getNextFrequency()); float nextOscSample = oscillator.getNextSample(); float nextNoiseSample = noise.getNextSample(); float nextAmpSample = ampEnvelope.getNextSample(); float nextSample = (vcf.processNextSample((nextOscSample * vcoLevel) + (nextNoiseSample * noiseLevel)) * nextAmpSample) * ampLevel *nextNoClickSample; for (float* channel : ChannelData){ channel[sample] = nextSample; } } }
void RemoteGoatVstAudioProcessor::processBlock(AudioSampleBuffer& buffer, MidiBuffer& midiMessages) { //if (!midiMessages.isEmpty()) //{ // String trace; // trace << "MIDI:"; // for (int i = 0; i < midiMessages.data.size(); ++i) // { // trace << String::formatted(" %02X", midiMessages.data[i]); // } // writeTrace(trace); //} // For each sample name, // a sorted collection of "note on" event sample positions. std::map<String, std::set<std::pair<int, bool>>> noteOnSets; MidiBuffer::Iterator it(midiMessages); MidiMessage midiMessage; int samplePosition; while (it.getNextEvent(midiMessage, samplePosition)) { // Check note number, map to sample name. int note = midiMessage.getNoteNumber(); auto itt = _noteNumberSampleNameMap.find(note); if (itt != _noteNumberSampleNameMap.end()) { String sampleName = itt->second; if (midiMessage.isNoteOn()) // Save note on sample position for sample. noteOnSets[sampleName].insert(std::make_pair(samplePosition, true)); else if (midiMessage.isNoteOff()) noteOnSets[sampleName].insert(std::make_pair(samplePosition, false)); } } midiMessages.clear(); buffer.clear(0, 0, buffer.getNumSamples()); buffer.clear(1, 0, buffer.getNumSamples()); for (auto& samplePair : _samples) { Sample& sample = samplePair.second; auto noteOnSetsIterator = noteOnSets.find(sample.getName()); if (noteOnSetsIterator != noteOnSets.end()) { const std::set<std::pair<int, bool>>& noteOns = noteOnSetsIterator->second; int offset = noteOns.begin()->first; sample.read(buffer, 0, offset, false); for (auto noteOnIterator = noteOns.begin(); noteOnIterator != noteOns.end(); ++noteOnIterator) { int noteOn = noteOnIterator->first; bool onOrOff = noteOnIterator->second; writeTrace(String() << "Triggered " << sample.getName() + " (" << (int)onOrOff << ")"); auto nextNoteOnIterator = noteOnIterator; ++nextNoteOnIterator; if (nextNoteOnIterator != noteOns.end()) { int nextNoteOn = nextNoteOnIterator->first; int diff = nextNoteOn - noteOn; if (onOrOff) sample.read(buffer, offset, diff, true); else sample.noteOff(); offset += diff; } else { if (onOrOff) sample.read(buffer, offset, buffer.getNumSamples() - offset, true); else sample.noteOff(); } } } else { sample.read(buffer, 0, buffer.getNumSamples(), false); } } }
int main(int argc, char **argv) { if (argc < 2) { printf("Usage %s file.mid\n", argv[0]); return(-1); } const char *l_pszFileName = argv[1]; File file(l_pszFileName); const double mm_per_second = 1000.0 / 60.0; // How fast does the tape move through the music box? (1 meter per minute - by observation) const double distance_between_notes_in_mm = 2.0; // across the paper. const double min_distance_between_notes = 7.0; // Cannot have two consecutive notes appear less than this distance between each other. std::list<std::string> gcode; std::list<std::string> heeks; int id=1; typedef std::vector<std::string> Keys_t; Keys_t keys, legal_keys; Keys_t::size_type middle_c_key; // Which integer tells us it's the 'C' in the middle or the 'C' in the // octave above or below. const int middle_c_octave = 5; for (int octave=2; octave <= 8; octave++) { for (char key='A'; key<='G'; key++) { std::ostringstream l_ossKey; // l_ossKey << key << middle_c_octave - 0; l_ossKey << key << octave; keys.push_back( l_ossKey.str() ); if ((key == 'C') && (octave == middle_c_octave)) middle_c_key = keys.size()-1; } } // Setup our scale of notes that will work with the music box. It covers from 'C' to 'C' over two octaves. // Octave below middle C for (char key='C'; key<='G'; key++) { std::ostringstream l_ossKey; l_ossKey << key << middle_c_octave - 1; legal_keys.push_back( l_ossKey.str() ); } // Octave that includes middle C for (char key='A'; key<='G'; key++) { std::ostringstream l_ossKey; l_ossKey << key << middle_c_octave - 0; legal_keys.push_back( l_ossKey.str() ); } // Octave above middle C for (char key='A'; key<='C'; key++) { std::ostringstream l_ossKey; l_ossKey << key << middle_c_octave + 1; legal_keys.push_back( l_ossKey.str() ); } const double track_width = distance_between_notes_in_mm * keys.size(); const double space_between_tracks = track_width * 0.75; MidiFile midi_file; FileInputStream midi_input_stream(file); if (! midi_file.readFrom( midi_input_stream )) { fprintf(stderr,"Could not open '%s' for reading\n", l_pszFileName); return(-1); } midi_file.convertTimestampTicksToSeconds(); std::set<int> notes; double time_scale = 1.0; bool time_scale_changed = false; do { std::map<std::string, double> key_position; time_scale_changed = false; gcode.clear(); heeks.clear(); key_position.clear(); std::ostringstream l_ossGCode; for (int track = 0; track<midi_file.getNumTracks(); track++) { int number_of_notes_included = 0; int number_of_notes_ignored = 0; const MidiMessageSequence *pMessageSequence = midi_file.getTrack(track); double start_time = pMessageSequence->getStartTime(); double end_time = pMessageSequence->getEndTime(); double duration = end_time - start_time; if (duration <= 0.0001) continue; l_ossGCode.str(""); l_ossGCode << "(Duration of track " << track << " is " << duration << " seconds)"; gcode.push_back( l_ossGCode.str() ); printf("%s\n", l_ossGCode.str().c_str()); // printf("Duration of track %d is %lf seconds\n", track, duration); for (int event = 0; event < pMessageSequence->getNumEvents(); event++) { MidiMessageSequence::MidiEventHolder *pEvent = pMessageSequence->getEventPointer(event); MidiMessage message = pEvent->message; double time_stamp = message.getTimeStamp(); if (message.isTextMetaEvent()) { String text = message.getTextFromTextMetaEvent(); char buf[1024]; memset( buf, '\0', sizeof(buf) ); text.copyToBuffer( buf, sizeof(buf)-1 ); // printf("Track %d is %s\n", track, buf ); l_ossGCode.str(""); l_ossGCode << "(Text track " << track << " is " << buf << ")"; gcode.push_back(l_ossGCode.str()); printf("%s\n", l_ossGCode.str().c_str()); std::ostringstream l_ossHeeks; l_ossHeeks << "<Text text=\"" << buf << "\" font=\"OpenGL\" col=\"0\" m0=\"-0.0443342566\" m1=\"-0.999016753\" m2=\"0\" m3=\"" << (double) (time_stamp * mm_per_second) << "\" m4=\"0.999016753\" m5=\"-0.0443342566\" m6=\"0\" m7=\"" << (double) ((track_width + space_between_tracks) * track) << "\" m8=\"0\" m9=\"0\" ma=\"1\" mb=\"0\" id=\"" << id++ << "\" />"; heeks.push_back( l_ossHeeks.str() ); } if (message.isTrackNameEvent()) { String text = message.getTextFromTextMetaEvent(); char buf[1024]; memset( buf, '\0', sizeof(buf) ); text.copyToBuffer( buf, sizeof(buf)-1 ); printf("Track %d is %s\n", track, buf ); } if (message.isNoteOn()) { char note_name[256]; memset( note_name, '\0', sizeof(note_name) ); message.getMidiNoteName(message.getNoteNumber(), true, true, middle_c_octave).copyToBuffer( note_name, sizeof(note_name)-1 ); notes.insert( message.getNoteNumber() ); // printf("time %lf note %s\n", time_stamp, note_name ); std::string l_ssNoteName(note_name); std::string::size_type offset; bool sharp_found = false; while ((offset = l_ssNoteName.find("#")) != std::string::npos) { l_ssNoteName = l_ssNoteName.erase(offset,1); sharp_found = true; } strncpy( note_name, l_ssNoteName.c_str(), sizeof(note_name)-1 ); const int blue = 16711680; const int black = 0; const int red = 255; int colour = blue; Keys_t::iterator l_itLegalKey = std::find( legal_keys.begin(), legal_keys.end(), note_name ); if (l_itLegalKey == legal_keys.end()) { colour = red; } // Find the note name in the keys we're interested in. Keys_t::iterator l_itKey = std::find( keys.begin(), keys.end(), note_name ); if (l_itKey != keys.end()) { double x = time_stamp * mm_per_second * time_scale; double y = double(double(std::distance( keys.begin(), l_itKey )) - double(middle_c_key)) * distance_between_notes_in_mm; y += ((track_width + space_between_tracks) * track); if (sharp_found) { y += (distance_between_notes_in_mm / 2.0); colour = red; } // Check to see if we have two notes that are too close to each other for the mechanism to play them. if (key_position.find(note_name) == key_position.end()) { key_position[note_name] = x; } // Measure the distance between this note and the previous equivalent note. If we need to expand our // time scale to ensure consecutive notes are not too close together, do it now. if ((dist(x, key_position[note_name]) < min_distance_between_notes) && (dist(x, key_position[note_name]) > 0.0)) { // Need to scale the whole piece up. double increase_in_time_scale = double(double(min_distance_between_notes) / double(dist(x, key_position[note_name]))); if (increase_in_time_scale > 1.0) { time_scale = increase_in_time_scale * time_scale; time_scale_changed = true; } } key_position[note_name] = x; // It's a key we have to play. Generate the GCode. l_ossGCode.str(""); l_ossGCode << "G83 X " << x << " Y " << y << "\t(" << note_name << ")"; gcode.push_back( l_ossGCode.str() ); if (sharp_found) { std::ostringstream l_ossHeeks; l_ossHeeks << "<Circle col=\"" << colour << "\" r=\"" << (distance_between_notes_in_mm / 2.0) * 0.85 << "\" cx=\"" << x << "\" cy=\"" << y << "\" cz=\"0\" ax=\"0\" ay=\"0\" az=\"1\" id=\"" << id++ << "\">\n"; l_ossHeeks << " <Point col=\"" << colour << "\" x=\"" << x << "\" y=\"" << y << "\" z=\"0\" id=\"" << id++ << "\" />\n"; l_ossHeeks << "</Circle>\n"; heeks.push_back( l_ossHeeks.str() ); } else { std::ostringstream l_ossHeeks; l_ossHeeks << "<Point col=\"" << colour << "\" x=\"" << x << "\" y=\"" << y << "\" z=\"0\" id=\"" << id++ << "\" />"; heeks.push_back( l_ossHeeks.str() ); } // printf("G83 Want hole for key %s at %lf,%lf\n", note_name, x, y ); number_of_notes_included++; } else { // This key doesn't fall exactly on our scale. Ignore it. number_of_notes_ignored++; printf("Missed note %s\n", note_name); } } // End if - then } // End for l_ossGCode.str(""); l_ossGCode << "(" << (double(number_of_notes_included)/double(number_of_notes_included + number_of_notes_ignored)) * 100.0 << " % utilisation of notes)"; gcode.push_back(l_ossGCode.str()); printf("%s\n", l_ossGCode.str().c_str()); l_ossGCode.str(""); l_ossGCode << "(Of the " << (number_of_notes_included + number_of_notes_ignored) << " notes, we are using " << number_of_notes_included << " (whole notes) and ignoring " << number_of_notes_ignored << " (sharps and flats))"; gcode.push_back(l_ossGCode.str()); printf("%s\n", l_ossGCode.str().c_str()); printf("At %lf mm per second (%lf mm per minute), we will need %lf mm of paper for this tune\n", mm_per_second, mm_per_second * 60.0 * time_scale, (end_time - start_time) * mm_per_second * time_scale ); printf("We have had to scale the tune %lf times to ensure no two consecutive notes were less than %lf mm apart\n", time_scale, min_distance_between_notes); // Draw a line for each possible note. for (Keys_t::iterator l_itKey = keys.begin(); l_itKey != keys.end(); l_itKey++) { double y = double(double(std::distance( keys.begin(), l_itKey )) - double(middle_c_key)) * distance_between_notes_in_mm; y += ((track_width + space_between_tracks) * track); if (std::find(legal_keys.begin(), legal_keys.end(), *l_itKey) != legal_keys.end()) { std::ostringstream l_ossHeeks; l_ossHeeks.str(""); l_ossHeeks << "<Sketch title=\"Sketch\" id=\"" << id++ << "\">\n"; l_ossHeeks << "<Line col=\"0\" id=\"" << id++ << "\">\n"; l_ossHeeks << "<Point col=\"0\" x=\"" << (double) (start_time * mm_per_second * time_scale) << "\" y=\"" << y << "\" z=\"0\" id=\"" << id++ << "\" />\n"; l_ossHeeks << "<Point col=\"0\" x=\"" << (double) (end_time * mm_per_second * time_scale) << "\" y=\"" << y << "\" z=\"0\" id=\"" << id++ << "\" />\n"; l_ossHeeks << "</Line>\n"; l_ossHeeks << "</Sketch>\n"; heeks.push_back(l_ossHeeks.str()); } } // End for } } while (time_scale_changed == true); /* for (std::set<int>::const_iterator l_itNote = notes.begin(); l_itNote != notes.end(); l_itNote++) { char note_name[256]; memset( note_name, '\0', sizeof(note_name) ); MidiMessage::getMidiNoteName(*l_itNote, true, true, 5).copyToBuffer( note_name, sizeof(note_name)-1 ); printf("Note %d %s\n", *l_itNote, note_name); } */ { String gcode_file_name(l_pszFileName); gcode_file_name = gcode_file_name.dropLastCharacters(4); gcode_file_name << ".ngc"; char buf[1024]; memset( buf, '\0', sizeof(buf) ); gcode_file_name.copyToBuffer(buf,sizeof(buf)-1); FILE *fp = fopen( buf, "w+t"); if (fp == NULL) { fprintf(stderr,"Could not open %s for writing\n", buf); return(-1); } for (std::list<std::string>::const_iterator l_itLine = gcode.begin(); l_itLine != gcode.end(); l_itLine++) { fprintf(fp,"%s\n", l_itLine->c_str()); } fclose(fp); } { String gcode_file_name(l_pszFileName); gcode_file_name = gcode_file_name.dropLastCharacters(4); gcode_file_name << ".heeks"; char buf[1024]; memset( buf, '\0', sizeof(buf) ); gcode_file_name.copyToBuffer(buf,sizeof(buf)-1); FILE *fp = fopen( buf, "w+t"); if (fp == NULL) { fprintf(stderr,"Could not open %s for writing\n", buf); return(-1); } for (std::list<std::string>::const_iterator l_itLine = heeks.begin(); l_itLine != heeks.end(); l_itLine++) { fprintf(fp,"%s\n", l_itLine->c_str()); } fclose(fp); } return 0; }
void PianoRoll::mouseUp (const MouseEvent& e) { int highnote,lownote; if (lasso.getWidth()>0) { lownote = (int)((float)(getHeight()-lasso.getY())*128.f/(float)getHeight()); highnote = (int)((float)(getHeight()-(lasso.getY()+lasso.getHeight()))*128.f/(float)getHeight()); if (lownote>highnote) swapVariables(lownote,highnote); for (int index=0;index<(sequence->getNumEvents());index++) { MidiMessage m = sequence->getEventPointer(index)->message; DBG("eventtime=" + String(sequence->getEventTime(index))); if (m.isNoteOn()) {DBG("note=" + String(m.getNoteNumber()));} DBG("lassostart=" + String(pixelsToPpq((float)lasso.getX(),false))); DBG("lassoend=" + String(pixelsToPpq((float)(lasso.getX()+lasso.getWidth()),false))); if (m.isNoteOn() && sequence->getEventTime(index)>=pixelsToPpq((float)lasso.getX(),false) && sequence->getEventTime(index)<=pixelsToPpq((float)(lasso.getX()+lasso.getWidth()),false) && m.getNoteNumber()>=lownote && m.getNoteNumber()<=highnote) { addToSelection(sequence->getEventPointer(index)); } } lasso.setSize(0,0); repaint(); noteLayer->repaint(); } if (hoveringNoteIndex != No_Note) { if (hoveringNoteIndex<-2) hoveringNoteIndex+=9999; if (e.mods.isPopupMenu() && hoveringNoteIndex!=-2) { //right click, delete notes plugin->getCallbackLock().enter(); for (int i=selectedNotes.size();--i>=0;) sequence->deleteEvent(sequence->getIndexOf(selectedNotes.getUnchecked(i)),true); sequence->updateMatchedPairs(); plugin->getCallbackLock().exit(); clearSelection(); } else { if (draggingNoteTimeDelta!=0 || draggingNoteTransposition!=0) { plugin->getCallbackLock().enter(); if (draggingNoteTimeDelta!=0.0) { for (int i=0;i<selectedNotes.size();i++) { selectedNotes.getUnchecked(i)->message.addToTimeStamp(draggingNoteTimeDelta); selectedNotes.getUnchecked(i)->noteOffObject->message.addToTimeStamp(draggingNoteTimeDelta); //sequence->moveEvent(sequence->getIndexOf(selectedNotes.getUnchecked(i)),draggingNoteTimeDelta,true); } draggingNoteTimeDelta=0.0; } if (draggingNoteTransposition!=0) { for (int i=0;i<selectedNotes.size();i++) sequence->transposeEvent(sequence->getIndexOf(selectedNotes.getUnchecked(i)),draggingNoteTransposition); draggingNoteTransposition=0; } sequence->updateMatchedPairs(true); plugin->getCallbackLock().exit(); } else if (wasResizing || e.mods.isAltDown()) { //resize notes wasResizing=false; for (int i=selectedNoteLengths.size();--i>=0;) { if (selectedNotes.getUnchecked(i)!=0 && selectedNotes.getUnchecked(i)->noteOffObject!=0) { selectedNoteLengths.getReference(i).updateLength(); } else { selectedNotes.remove(i); selectedNoteLengths.remove(i); } } plugin->getCallbackLock().enter(); sequence->updateMatchedPairs(true); plugin->getCallbackLock().exit(); } } hoveringNoteIndex = No_Note; hoveringNote=0; sendChangeMessage(); noteLayer->repaint(); } }
//============================================================================== void MidiMonitorEditor::timerCallback () { MidiBuffer tmpBuffer; int hours, minutes, seconds, frames; MidiMessage::SmpteTimecodeType timeCode; MidiMessageCollector* collector = owner->getMessageCollector (); collector->removeNextBlockOfMessages (tmpBuffer, 1024); if (! tmpBuffer.isEmpty()) { String midiLine; int samplePos = 0; MidiMessage msg (0xf4, 0.0); MidiBuffer::Iterator eventIterator (tmpBuffer); while (eventIterator.getNextEvent (msg, samplePos)) { midiLine.printf (T("[CH: %d] "), msg.getChannel()); if (msg.isNoteOnOrOff ()) { midiLine += MidiMessage::getMidiNoteName (msg.getNoteNumber(), true, true, 0); midiLine += " "; midiLine += String ((int) msg.getVelocity ()); if (msg.isNoteOn()) { midiLine += " ON"; } else { midiLine += " OFF"; } } else if (msg.isAllNotesOff()) { midiLine += "ALL NOTES OFF"; } else if (msg.isAllSoundOff()) { midiLine += "ALL SOUND OFF"; } else if (msg.isPitchWheel()) { midiLine += "PITCHWEEL: "; midiLine += String (msg.getPitchWheelValue()); } else if (msg.isAftertouch()) { midiLine += "AFTERTOUCH: "; midiLine += String (msg.getAfterTouchValue()); } else if (msg.isChannelPressure()) { midiLine += "CHANNELPRESSURE: "; midiLine += String (msg.getChannelPressureValue()); } else if (msg.isSysEx()) { midiLine += "SYSEX: "; midiLine += String (msg.getSysExDataSize()); midiLine += " bytes"; } else if (msg.isProgramChange()) { midiLine += "PROGRAM CHANGE: "; midiLine += String (msg.getProgramChangeNumber()); midiLine += " ("; midiLine += MidiMessage::getGMInstrumentName (msg.getProgramChangeNumber()); midiLine += ")"; } else if (msg.isController()) { midiLine += "CC: #"; midiLine += String (msg.getControllerNumber()); midiLine += " ("; midiLine += MidiMessage::getControllerName (msg.getControllerNumber()); midiLine += ") = "; midiLine += String (msg.getControllerValue()); } else if (msg.isTimeSignatureMetaEvent ()) { int newNumerator, newDenominator; msg.getTimeSignatureInfo (newNumerator, newDenominator); midiLine += "TIME SIGNATURE: "; midiLine += String (newNumerator); midiLine += " / "; midiLine += String (newDenominator); } else if (msg.isTempoMetaEvent ()) { midiLine += "TEMPO: "; midiLine += String (msg.getTempoSecondsPerQuarterNote ()); //midiLine += " "; //midiLine += String (msg.getTempoMetaEventTickLength (ticksPerQuarterNote)); } else if (msg.isMidiMachineControlMessage()) { midiLine += "MIDI CONTROL: "; switch (msg.getMidiMachineControlCommand()) { case MidiMessage::mmc_stop: midiLine += "stop"; break; case MidiMessage::mmc_play: midiLine += "play"; break; case MidiMessage::mmc_deferredplay: midiLine += "deferredplay"; break; case MidiMessage::mmc_fastforward: midiLine += "fastforward"; break; case MidiMessage::mmc_rewind: midiLine += "rewind"; break; case MidiMessage::mmc_recordStart: midiLine += "recordStart"; break; case MidiMessage::mmc_recordStop: midiLine += "recordStop"; break; case MidiMessage::mmc_pause: midiLine += "pause"; break; } } else if (msg.isMidiStart ()) { midiLine += "MIDI START: "; } else if (msg.isMidiContinue ()) { midiLine += "MIDI CONTINUE: "; } else if (msg.isMidiStop ()) { midiLine += "MIDI STOP: "; } else if (msg.isSongPositionPointer ()) { midiLine += "SONG POSITION: "; midiLine += String (msg.getSongPositionPointerMidiBeat ()); } else if (msg.isQuarterFrame ()) { midiLine += "QUARTER FRAME: "; midiLine += String (msg.getQuarterFrameSequenceNumber ()); midiLine += " "; midiLine += String (msg.getQuarterFrameValue ()); } else if (msg.isFullFrame ()) { midiLine += "FULL FRAME: "; msg.getFullFrameParameters (hours, minutes, seconds, frames, timeCode); midiLine += String (hours); midiLine += ":"; midiLine += String (minutes); midiLine += ":"; midiLine += String (seconds); midiLine += ":"; midiLine += String (frames); midiLine += " timecode: "; switch (timeCode) { case MidiMessage::fps24: midiLine += "fps24"; break; case MidiMessage::fps25: midiLine += "fps25"; break; case MidiMessage::fps30drop: midiLine += "fps30drop"; break; case MidiMessage::fps30: midiLine += "fps30"; break; } } else if (msg.isMidiMachineControlGoto (hours, minutes, seconds, frames)) { midiLine += "MIDI CONTROL GOTO: "; midiLine += String (hours); midiLine += ":"; midiLine += String (minutes); midiLine += ":"; midiLine += String (seconds); midiLine += ":"; midiLine += String (frames); } midiOutputEditor->insertTextAtCursor (midiLine + T("\n")); } } }
//============================================================================== void MidiTransform::processEvents (MidiBuffer& midiMessages, const int blockSize) { int timeStamp; MidiMessage message (0xf4, 0.0); MidiBuffer::Iterator it (midiMessages); MidiBuffer midiOutput; switch (command) { case MidiTransform::KeepEvents: break; case MidiTransform::DiscardEvents: { midiMessages.clear (); break; } case MidiTransform::RemapChannel: { while (it.getNextEvent (message, timeStamp)) { message.setChannel (channelNumber); midiOutput.addEvent (message, timeStamp); } midiMessages = midiOutput; break; } case MidiTransform::ScaleNotes: { while (it.getNextEvent (message, timeStamp)) { if (message.isNoteOnOrOff ()) { message.setNoteNumber (roundFloatToInt (message.getNoteNumber () * noteScale)); midiOutput.addEvent (message, timeStamp); } } midiMessages = midiOutput; break; } case MidiTransform::InvertNotes: { while (it.getNextEvent (message, timeStamp)) { if (message.isNoteOnOrOff ()) { message.setNoteNumber (127 - message.getNoteNumber ()); midiOutput.addEvent (message, timeStamp); } } midiMessages = midiOutput; } case MidiTransform::TransposeNotes: { while (it.getNextEvent (message, timeStamp)) { if (message.isNoteOnOrOff ()) { message.setNoteNumber (jmax (0, jmin (127, message.getNoteNumber () - noteTranspose))); midiOutput.addEvent (message, timeStamp); } } midiMessages = midiOutput; break; } case MidiTransform::ScaleVelocity: { while (it.getNextEvent (message, timeStamp)) { if (message.isNoteOn ()) { message.setVelocity ((message.getVelocity () / 127.0f) * velocityScale); midiOutput.addEvent (message, timeStamp); } } midiMessages = midiOutput; break; } case MidiTransform::InvertVelocity: { while (it.getNextEvent (message, timeStamp)) { if (message.isNoteOn ()) { message.setVelocity ((uint8) (127 - message.getVelocity ())); midiOutput.addEvent (message, timeStamp); } } midiMessages = midiOutput; break; } case MidiTransform::TransposeVelocity: { while (it.getNextEvent (message, timeStamp)) { if (message.isNoteOn ()) { message.setVelocity (jmax (0, jmin (127, message.getVelocity () - velocityTranspose))); midiOutput.addEvent (message, timeStamp); } } midiMessages = midiOutput; break; } case MidiTransform::TriggerCC: { break; } case MidiTransform::TriggerNote: { break; } } }
void MIDIReceiver::handleIncomingMidiMessage (MidiInput *source, const MidiMessage &message) { DBG("midi in"); // is this midi input source enabled if(_MidiDeviceManager->isMidiInputEnabled(source->getName())) { DBG(source->getName()); if(message.isController() && _MidiDeviceManager->isCcEnabled(source->getName(), true) && appProperties->getUserSettings()->getIntValue("midiInputChannel") == message.getChannel()) { DBG(message.getControllerNumber() ); // reference counted Signal ( string command, string origin ) Signal::SignalP ledStateSignal = new Signal("SEND_OSC", "RCV_MIDI"); ledStateSignal->addStringArg("/nomestate/grid/led/set"); // get the x position: LED bumber % 8 ledStateSignal->addIntArg(message.getControllerNumber() % 8); // get the y position: LED number / 8 ledStateSignal->addIntArg(message.getControllerNumber() / 8); // get the LED state: toggleState int ledState; if (message.getControllerValue() > 0) { ledState = 1; } ledStateSignal->addIntArg(ledState); _mCenter->handleSignal(*ledStateSignal); } else if (message.isNoteOn() && _MidiDeviceManager->isNoteEnabled(source->getName(), true) && appProperties->getUserSettings()->getIntValue("midiInputChannel") == message.getChannel()) { // lets set the color based off MIDI note Velocity int MIDIVelocity = message.getVelocity(); int r = 0; int g = 0; int b = 0; if (MIDIVelocity > 0) { // red if (MIDIVelocity <= 22) { r = 127; } // yellow else if (MIDIVelocity <= 43) { r = 127; g = 127; } // green else if (MIDIVelocity <= 64) { g = 127; } // teal else if (MIDIVelocity <= 85) { g = 127; b = 127; } // blue else if (MIDIVelocity <= 106) { b = 127; } // purple else { b = 127; r = 127; } } // reference counted Signal ( string command, string origin ) Signal::SignalP ledColourSignal = new Signal("SEND_OSC", "RCV_MIDI"); ledColourSignal->addStringArg("/nomestate/grid/led/color"); // get the x position: LED bumber % 8 ledColourSignal->addIntArg(message.getNoteNumber() % 8); // get the y position: LED number / 8 ledColourSignal->addIntArg(message.getNoteNumber() / 8); // get the LED RED: ledColourSignal->addIntArg(r); // get the LED GREEN: ledColourSignal->addIntArg(g); // get the LED BLUE: ledColourSignal->addIntArg(b); _mCenter->handleSignal(*ledColourSignal); // reference counted Signal ( string command, string origin ) Signal::SignalP ledStateSignal = new Signal("SEND_OSC", "RCV_MIDI"); ledStateSignal->addStringArg("/nomestate/grid/led/set"); // get the x position: LED bumber % 8 ledStateSignal->addIntArg(message.getNoteNumber() % 8); // get the y position: LED number / 8 ledStateSignal->addIntArg(message.getNoteNumber() / 8); // get the LED state: toggleState ledStateSignal->addIntArg(1); _mCenter->handleSignal(*ledStateSignal); } else if (message.isNoteOff() && _MidiDeviceManager->isNoteEnabled(source->getName(), true) && appProperties->getUserSettings()->getIntValue("midiInputChannel") == message.getChannel()) { // reference counted Signal ( string command, string origin ) Signal::SignalP ledStateSignal = new Signal("SEND_OSC", "RCV_MIDI"); ledStateSignal->addStringArg("/nomestate/grid/led/set"); // get the x position: LED bumber % 8 ledStateSignal->addIntArg(message.getNoteNumber() % 8); // get the y position: LED number / 8 ledStateSignal->addIntArg(message.getNoteNumber() / 8); // get the LED state: toggleState ledStateSignal->addIntArg(0); _mCenter->handleSignal(*ledStateSignal); } } }
void stepQuickEdit::messageTypeChanged() { MidiMessage *m = 0; MidiBuffer *mB = 0; if (editorComponent) { deleteAndZero (editorComponent); } if (midiMessage == 0) { return; } m = midiMessage->getMidiMessage(); if (m) { if (m->isNoteOn()) { addAndMakeVisible (editorComponent = new stepEditNote(midiMessage)); typeCombo->setSelectedId (noteOn, true); } else if (m->isProgramChange()) { typeCombo->setSelectedId (ProgramChange, true); } else if (m->isController()) { addAndMakeVisible (editorComponent = new stepEditController(midiMessage)); typeCombo->setSelectedId (Controller, true); } else if (m->isMidiMachineControlMessage()) { typeCombo->setSelectedId (MMC, true); } else if (m->isSysEx()) { addAndMakeVisible (editorComponent = new stepEditSysex(midiMessage)); typeCombo->setSelectedId (SysEx, true); } resized(); } mB = midiMessage->getMidiBuffer(); if (mB == 0) return; if (!m && mB) { MidiBuffer::Iterator i(*mB); int len; MidiMessage message (0xf4, 0.0); if (i.getNextEvent (message, len)) { uint8 *data = message.getRawData(); if (*data == 0xb0) { addAndMakeVisible (editorComponent = new stepEditController(midiMessage)); typeCombo->setSelectedId (Controller, true); } if (*data == 0xf0) { addAndMakeVisible (editorComponent = new stepEditSysex(midiMessage)); typeCombo->setSelectedId (SysEx, true); } resized(); } } }
void MiditoOscAudioProcessor::processBlock (AudioSampleBuffer& buffer, MidiBuffer& midiMessages) { static float cv[8], shiftcv[8]; static bool _calibMode; MidiBuffer processedMidi; MidiMessage m; int time; char oscBuffer[IP_MTU_SIZE]; osc::OutboundPacketStream p(oscBuffer, IP_MTU_SIZE); if (calibMode) // Calibration Mode A440Hz(MIDI number 69) { p << osc::BeginBundleImmediate << osc::BeginMessage( "/fader1" ) << calibMap[69] << osc::EndMessage << osc::BeginMessage( "/fader2" ) << calibMap[69] << osc::EndMessage << osc::BeginMessage( "/fader3" ) << calibMap[69] << osc::EndMessage << osc::BeginMessage( "/fader4" ) << calibMap[69] << osc::EndMessage << osc::BeginMessage( "/fader5" ) << calibMap[69] << osc::EndMessage << osc::BeginMessage( "/fader6" ) << calibMap[69] << osc::EndMessage << osc::BeginMessage( "/fader7" ) << calibMap[69] << osc::EndMessage << osc::BeginMessage( "/fader8" ) << calibMap[69] << osc::EndMessage << osc::BeginMessage( "/gate1" ) << 1 << osc::EndMessage << osc::BeginMessage( "/gate2" ) << 1 << osc::EndMessage << osc::EndBundle; sendOSCData(p); _calibMode = true; return; } else { if (_calibMode) { p << osc::BeginBundleImmediate << osc::BeginMessage( "/gate1" ) << 0 << osc::EndMessage << osc::BeginMessage( "/gate2" ) << 0 << osc::EndMessage << osc::EndBundle; sendOSCData(p); _calibMode = false; } } for (MidiBuffer::Iterator i (midiMessages); i.getNextEvent (m, time);) { p.Clear(); usleep(30); if (m.isNoteOn()) { if (monoMode) // mono Mode { uint32_t midiCh = m.getChannel(); if (midiCh == 0 || midiCh > 7) { midiCh = 1; } cv[midiCh - 1] = calibMap[m.getNoteNumber()]; switch (midiCh) { case 1: p << osc::BeginMessage("/fader1") << cv[0] << osc::EndMessage; break; case 2: p << osc::BeginMessage("/fader2") << cv[1] << osc::EndMessage; break; case 3: p << osc::BeginMessage("/fader3") << cv[2] << osc::EndMessage; break; case 4: p << osc::BeginMessage("/fader4") << cv[3] << osc::EndMessage; break; case 5: p << osc::BeginMessage("/fader5") << cv[4] << osc::EndMessage; break; case 6: p << osc::BeginMessage("/fader6") << cv[5] << osc::EndMessage; break; case 7: p << osc::BeginMessage("/fader7") << cv[6] << osc::EndMessage; break; case 8: p << osc::BeginMessage("/fader8") << cv[7] << osc::EndMessage; break; default: break; } sendOSCData(p); } else if (shiftMode) { // shift Mode cv[0] = calibMap[m.getNoteNumber()]; for (int i = 7; i > 0; i--) { shiftcv[i] = shiftcv[i-1]; } p << osc::BeginBundleImmediate << osc::BeginMessage( "/fader1" ) << cv[0] << osc::EndMessage << osc::BeginMessage( "/fader2" ) << shiftcv[1] << osc::EndMessage << osc::BeginMessage( "/fader3" ) << shiftcv[2] << osc::EndMessage << osc::BeginMessage( "/fader4" ) << shiftcv[3] << osc::EndMessage << osc::BeginMessage( "/fader5" ) << shiftcv[4] << osc::EndMessage << osc::BeginMessage( "/fader6" ) << shiftcv[5] << osc::EndMessage << osc::BeginMessage( "/fader7" ) << shiftcv[6] << osc::EndMessage << osc::BeginMessage( "/fader8" ) << shiftcv[7] << osc::EndMessage << osc::BeginMessage( "/gate1" ) << 1 << osc::EndMessage << osc::BeginMessage( "/gate2" ) << 1 << osc::EndMessage << osc::EndBundle; sendOSCData(p); shiftcv[0] = cv[0]; } else { // poly Mode cv[ch] = calibMap[m.getNoteNumber()]; if (currentMaxPoly == 1) { cv[1] = cv[0]; } p << osc::BeginBundleImmediate << osc::BeginMessage( "/fader1" ) << cv[0] << osc::EndMessage << osc::BeginMessage( "/fader2" ) << cv[1] << osc::EndMessage << osc::BeginMessage( "/fader3" ) << cv[2] << osc::EndMessage << osc::BeginMessage( "/fader4" ) << cv[3] << osc::EndMessage << osc::BeginMessage( "/fader5" ) << cv[4] << osc::EndMessage << osc::BeginMessage( "/fader6" ) << cv[5] << osc::EndMessage << osc::BeginMessage( "/fader7" ) << m.getFloatVelocity() << osc::EndMessage << osc::BeginMessage( "/gate1" ) << 1 << osc::EndMessage << osc::BeginMessage( "/gate2" ) << 1 << osc::EndMessage << osc::EndBundle; sendOSCData(p); ch++; gateCount++; if (ch >= currentMaxPoly) { ch = 0; } } } else if (m.isNoteOff()) { if (monoMode) { switch (m.getChannel()) { case 1: p << osc::BeginMessage( "/gate1" ) << 0 << osc::EndMessage; break; case 2: p << osc::BeginMessage( "/gate2" ) << 0 << osc::EndMessage; break; case 3: p << osc::BeginMessage( "/gate3" ) << 0 << osc::EndMessage; break; case 4: p << osc::BeginMessage( "/gate4" ) << 0 << osc::EndMessage; break; default: break; } sendOSCData(p); } else if (shiftMode) { p << osc::BeginBundleImmediate << osc::BeginMessage( "/gate1" ) << 0 << osc::EndMessage << osc::BeginMessage( "/gate2" ) << 0 << osc::EndMessage << osc::EndBundle; sendOSCData(p); } else { gateCount --; if (gateCount <= 0) { p << osc::BeginBundleImmediate << osc::BeginMessage( "/gate1" ) << 0 << osc::EndMessage << osc::BeginMessage( "/gate2" ) << 0 << osc::EndMessage << osc::EndBundle; sendOSCData(p); gateCount = 0; } ch--; if (ch == -1) { ch = 0; } } } else if (m.isControllerOfType(1)) { // Modulation Wheel float modulation = m.getControllerValue(); if (!monoMode && !shiftMode) { p << osc::BeginMessage("/fader8") << (modulation / 127) << osc::EndMessage; sendOSCData(p); } } processedMidi.addEvent (m, time); } midiMessages.swapWith (processedMidi); buffer.clear(); for (int channel = 0; channel < getNumInputChannels(); ++channel) { float* channelData = 0; } }
void PatternRecording::recordPattern(MidiBuffer &midiMessages, const int &numSamples) { // if we are recording the pattern, just rip the // MIDI messages from the incoming buffer if (isPatternRecording) { // get the iterator for the incoming buffer // so we can check through the messages MidiBuffer::Iterator i(midiMessages); MidiMessage message (0xf4, 0.0); int time; // if we are still during the precount, do nothing if (patternPrecountPosition > numSamples) patternPrecountPosition -= numSamples; // if the precount finishes during this buffer else if (patternPrecountPosition > 0) { // TODO: overdub could be option here? midiPattern.clear(); const int numSamplesToAdd = numSamples - patternPrecountPosition; // DBG("#1 recording " << numSamplesToAdd << " of midi, pos: " << patternPrecountPosition); // get messages from the main MIDI message queue while(i.getNextEvent(message, time)) { // and add them if they occur after the precount runs out if (time > patternPrecountPosition) { midiPattern.addEvent(message, time - patternPrecountPosition); // store noteOffs to fire at end if (message.isNoteOn()) { MidiMessage tempNoteOff = MidiMessage::noteOff(message.getChannel(), message.getNoteNumber(), message.getVelocity()); noteOffs.addEvent(tempNoteOff, 0); } } } // the precount has finished patternPrecountPosition = 0; // we are now numSamplesToAdd into the buffer patternPosition = numSamplesToAdd; } else { // if we are during recording (and not near the end), just // add the current input into the record buffer if (numSamples + patternPosition < patternLengthInSamples) { // DBG("#2 recording " << numSamples << " of midi, pos: " << patternPosition); // get messages from the main MIDI message queue while(i.getNextEvent(message, time)) { midiPattern.addEvent(message, time + patternPosition); // store noteOffs to fire at end if (message.isNoteOn()) { MidiMessage tempNoteOff = MidiMessage::noteOff(message.getChannel(), message.getNoteNumber(), message.getVelocity()); noteOffs.addEvent(tempNoteOff, 0); } } //midiPattern.addEvents(midiMessages, 0, numSamples, -patternPosition); patternPosition += numSamples; } // otherwise we are finishing up else { const int numSamplesLeftToRecord = patternLengthInSamples - patternPosition; // add remaining messages from the main MIDI message queue while(i.getNextEvent(message, time)) { if (time < numSamplesLeftToRecord) midiPattern.addEvent(message, time + patternPosition); // store noteOffs to fire at end if (message.isNoteOn()) { MidiMessage tempNoteOff = MidiMessage::noteOff(message.getChannel(), message.getNoteNumber(), message.getVelocity()); noteOffs.addEvent(tempNoteOff, 0); } } // add the note offs to clear any "phantom notes" midiPattern.addEvents(noteOffs, 0, 1, patternLengthInSamples - 1); // we are no longer recording isPatternRecording = false; // if we finish recording, let any listeners know // so they can redraw representations of the pattern sendChangeMessage(); DBG("pattern " << patternBank << " finished recording."); // start playing back from the start straight away isPatternStopping = false; isPatternPlaying = true; patternPosition = 0; // fill the remaining buffer with the newly recorded sequence const int samplesRemaining = numSamples - numSamplesLeftToRecord; if (patternPosition + samplesRemaining < patternLengthInSamples) { midiMessages.addEvents(midiPattern, patternPosition, samplesRemaining, numSamplesLeftToRecord); patternPosition += samplesRemaining; } } // Let the PatternStripControl know to recache pattern sendChangeMessage(); } } }
//============================================================================== int main (int argc, char* argv[]) { if (argc != 3) { cout << "Usage: <prog> <midi input file> <wav output file>" << endl; return 0; } File inMidiFile = File(argv[1]); File outWavFile = File(argv[2]); //File inMidiFile = File("C:\\Users\\GeorgeKrueger\\Documents\\GitHub\\pymusic\\out.mid"); //File outWavFile = File("C:\\Users\\GeorgeKrueger\\Documents\\GitHub\\pymusic\\out.wav"); FileInputStream fileStream(inMidiFile); juce::MidiFile midiFile; midiFile.readFrom(fileStream); int numTracks = midiFile.getNumTracks(); midiFile.convertTimestampTicksToSeconds(); std::cout << "Opened midi file: " << inMidiFile.getFileName() << " Tracks: " << numTracks << std::endl;; playHead.posInfo.bpm = 120; playHead.posInfo.isPlaying = true; playHead.posInfo.timeInSamples = 0; playHead.posInfo.timeInSeconds = 0; playHead.posInfo.timeSigNumerator = 4; playHead.posInfo.timeSigDenominator = 4; for (int i = 0; i < numTracks; ++i) { const juce::MidiMessageSequence* msgSeq = midiFile.getTrack(i); double trackLengthSeconds = 0; String plugFile = ""; int program = 0; for (int j = 0; j < msgSeq->getNumEvents(); ++j) { juce::MidiMessageSequence::MidiEventHolder* midiEventHolder = msgSeq->getEventPointer(j); juce::MidiMessage midiMsg = midiEventHolder->message; if (midiMsg.isMetaEvent() && midiMsg.getMetaEventType() == 0x04) { // Instrument meta event int instrLength = midiMsg.getMetaEventLength(); const juce::uint8* instrChars = midiMsg.getMetaEventData(); String instrName((char*)instrChars, instrLength); plugFile = instrName; } if (midiMsg.isMetaEvent() && midiMsg.isEndOfTrackMetaEvent()) { //int oetDataLength = midiMsg.getMetaEventLength(); //const uint8* oetData = midiMsg.getMetaEventData(); //std::cout << "Found end of track event data size: " << oetDataLength << " data: " << oetData << std::endl; trackLengthSeconds = midiMsg.getTimeStamp(); std::cout << "Track length in seconds: " << trackLengthSeconds << std::endl; } } if (trackLengthSeconds == 0) { std::cerr << "Skipping track " << i << " since it has zero length" << std::endl; continue; } if (plugFile.isEmpty()) { plugFile = "C:\\VST\\helm.dll"; std::cout << "No plug found for track. Defaulting to: " << plugFile << std::endl; //std::cerr << "Skipping track " << i << ". No instrument found." << std::endl; //continue; } else { std::cout << "Found plugin file '" << plugFile << "' from track " << i << std::endl; } OwnedArray<PluginDescription> results; VSTPluginFormat vstFormat; vstFormat.findAllTypesForFile(results, plugFile); if (results.size() > 0) { std::cout << "Found " << results.size() << " plugin(s) in file '" << plugFile << "'" << std::endl; int blockSize = 1024; double sampleRate = 44100; int totalSizeInSamples = ((static_cast<int>(44100 * trackLengthSeconds) / 1024) + 1) * 1024; cout << "Total samples to render " << totalSizeInSamples << endl; juce::AudioPluginInstance* plugInst = vstFormat.createInstanceFromDescription(*results[0], sampleRate, blockSize); if (!plugInst) { cout << "Failed to load plugin " << plugFile << endl; continue; } AudioProcessorGraph* graph = new AudioProcessorGraph(); graph->setPlayConfigDetails(0, 2, sampleRate, blockSize); graph->setPlayHead(&playHead); graph->addNode(plugInst, 1000); int AUDIO_IN_ID = 101; int AUDIO_OUT_ID = 102; int MIDI_IN_ID = 103; juce::AudioPluginInstance* audioInNode = new AudioGraphIOProcessor(AudioGraphIOProcessor::audioInputNode); juce::AudioPluginInstance* audioOutNode = new AudioGraphIOProcessor(AudioGraphIOProcessor::audioOutputNode); juce::AudioPluginInstance* midiInNode = new AudioGraphIOProcessor(AudioGraphIOProcessor::midiInputNode); graph->addNode(audioInNode, AUDIO_IN_ID); graph->addNode(audioOutNode, AUDIO_OUT_ID); graph->addNode(midiInNode, MIDI_IN_ID); graph->addConnection(AUDIO_IN_ID, 0, 1000, 0); graph->addConnection(AUDIO_IN_ID, 1, 1000, 1); graph->addConnection(MIDI_IN_ID, AudioProcessorGraph::midiChannelIndex, 1000, AudioProcessorGraph::midiChannelIndex); graph->addConnection(1000, 0, AUDIO_OUT_ID, 0); graph->addConnection(1000, 1, AUDIO_OUT_ID, 1); plugInst->setCurrentProgram(program); int numInputChannels = plugInst->getTotalNumInputChannels(); int numOutputChannels = plugInst->getTotalNumOutputChannels(); cout << "----- Plugin Information -----" << endl; cout << "Input channels : " << numInputChannels << endl; cout << "Output channels : " << numOutputChannels << endl; cout << "Num Programs: " << plugInst->getNumPrograms() << endl; cout << "Current program: " << plugInst->getCurrentProgram() << endl; int numParams = plugInst->getNumParameters(); cout << "Num Parameters: " << numParams << endl; for (int p = 0; p < numParams; ++p) { std::cout << "Param " << p << ": " << plugInst->getParameterName(p); if (!plugInst->getParameterLabel(p).isEmpty()) { cout << "(" << plugInst->getParameterLabel(p) << ")"; } cout << " = " << plugInst->getParameter(p) << endl; } cout << "-----------------------------" << endl; int maxChannels = std::max(numInputChannels, numOutputChannels); AudioBuffer<float> entireAudioBuffer(maxChannels, totalSizeInSamples); entireAudioBuffer.clear(); unsigned int midiSeqPos = 0; graph->releaseResources(); graph->prepareToPlay(sampleRate, blockSize); cout << "Num midi events: " << msgSeq->getNumEvents() << endl; // Render the audio in blocks for (int t = 0; t < totalSizeInSamples; t += blockSize) { //cout << "processing block " << t << " to " << t + blockSize << endl; MidiBuffer midiBuffer; for (int j = midiSeqPos; j < msgSeq->getNumEvents(); ++j) { MidiMessageSequence::MidiEventHolder* midiEventHolder = msgSeq->getEventPointer(j); MidiMessage midiMsg = midiEventHolder->message; int samplePos = static_cast<int>(midiMsg.getTimeStamp() * sampleRate); if (samplePos >= t && samplePos < t + blockSize) { if (midiMsg.isNoteOnOrOff()) { if (midiMsg.isNoteOn()) { cout << "note on event (" << midiMsg.getNoteNumber() << ") at " << samplePos << "(" << midiMsg.getTimeStamp() << "s) bufferpos=" << (samplePos - t) << endl; } else if (midiMsg.isNoteOff()) { cout << "note off event (" << midiMsg.getNoteNumber() << ") at " << samplePos << "(" << midiMsg.getTimeStamp() << "s) bufferpos=" << (samplePos - t) << endl; } midiBuffer.addEvent(midiMsg, samplePos - t); } else if (midiMsg.isProgramChange()) { program = midiMsg.getProgramChangeNumber(); plugInst->setCurrentProgram(program); } midiSeqPos++; } else { break; } } playHead.posInfo.timeInSamples = t; playHead.posInfo.timeInSeconds = t / sampleRate; AudioBuffer<float> blockAudioBuffer(entireAudioBuffer.getNumChannels(), blockSize); blockAudioBuffer.clear(); graph->processBlock(blockAudioBuffer, midiBuffer); for (int ch = 0; ch < entireAudioBuffer.getNumChannels(); ++ch) { entireAudioBuffer.addFrom(ch, t, blockAudioBuffer, ch, 0, blockSize); } } if (outWavFile.exists()) { outWavFile.deleteFile(); } FileOutputStream* fileOutputStream = outWavFile.createOutputStream(); WavAudioFormat wavFormat; StringPairArray metadataValues; juce::AudioFormatWriter* wavFormatWriter = wavFormat.createWriterFor( fileOutputStream, sampleRate, 2, 16, metadataValues, 0); bool writeAudioDataRet = wavFormatWriter->writeFromAudioSampleBuffer(entireAudioBuffer, 0, entireAudioBuffer.getNumSamples()); wavFormatWriter->flush(); cout << "Done writing to output file " << outWavFile.getFileName() << " . Write return value: " << (int)writeAudioDataRet << endl; delete wavFormatWriter; } else { cerr << "Could not find plugin from file " << plugFile << endl; } } return 0; }