void EventQueue::InsertEvent( Event *event, ncycle_t when, int priority ) { event->SetCycle( when ); /* If this event time is before our previous nextEventCycle, change it. */ if( when < nextEventCycle ) { nextEventCycle = when; } /* If there are no events at this time, create a new mapping. */ if( eventMap.count( when ) == 0 ) { EventList eventList; eventList.push_back( event ); eventMap.insert( std::pair<ncycle_t, EventList>( when, eventList ) ); } /* Otherwise append this event to the event list for this cycle. */ else { EventList& eventList = eventMap[when]; EventList::iterator it; bool inserted = false; for( it = eventList.begin(); it != eventList.end(); it++ ) { if( (*it)->GetPriority( ) > priority ) { eventList.insert( it, event ); inserted = true; break; } } if( !inserted ) { eventList.push_back( event ); } } }
/* Merge all tracks into one big track. This will not copy any event * and only return a list of pointers to the events in the right order. * Note though that this will modify the relative_* times! * * The tracks will not be modified but may not be used later for * playback since the times are messed up (but you could reconstruct * them using the absolute_* times) * * The parameter muted is a set of muted track/channel combinations in * the format ttttcccc, that is the last 4 bits are the channel and the * remaining bits are the track: * (track_nr << 4) | channel_nr * */ EventList MidiFile::mergedTracks(std::set<int> muted) { EventList result; typedef std::vector<_track> trackv; trackv tracks; int tindex = 0; for (TrackList::iterator t = m_tracks.begin(); t != m_tracks.end(); ++t) { _track tentry; tentry.t = *t; tentry.pos = 0; tentry.size = (*t)->size(); tentry.index = tindex; tracks.push_back(tentry); ++tindex; } bool exhausted; MidiEvent* min_event; _track* min_track; int combination; std::map<int, int> chanmap; int nextchan = 0; for (;;) { exhausted = true; min_event = 0; min_track = 0; for (trackv::iterator t = tracks.begin(); t != tracks.end(); ++t) { if (t->pos < t->size) { exhausted = false; if (min_event == 0 || t->t->at(t->pos)->absolute_musec < min_event->absolute_musec) { min_event = t->t->at(t->pos); min_track = &(*t); } } } if (exhausted) break; ++min_track->pos; if (min_event->type() == Event_Note_On) { NoteOnEvent *e = dynamic_cast<NoteOnEvent*>(min_event); combination = min_track->index << 4 | e->getChannel(); e->muted = (muted.find(combination) != muted.end()); if (chanmap.find(combination) == chanmap.end()) { chanmap[combination] = nextchan; ++nextchan; } e->setChannel(chanmap[combination]); } else if (min_event->type() == Event_Note_Off) { NoteOffEvent *e = dynamic_cast<NoteOffEvent*>(min_event); combination = min_track->index << 4 | e->getChannel(); e->muted = (muted.find(combination) != muted.end()); if (chanmap.find(combination) == chanmap.end()) { chanmap[combination] = nextchan; ++nextchan; } e->setChannel(chanmap[combination]); } result.push_back(min_event); } int dticks; double dmusec; for (size_t i = 0; i < result.size(); ++i) { if (i == 0) { dticks = result[i]->absolute_ticks; dmusec = result[i]->absolute_musec; } else { dticks = result[i]->absolute_ticks - result[i-1]->absolute_ticks; dmusec = result[i]->absolute_musec - result[i-1]->absolute_musec; } result[i]->relative_ticks = dticks; result[i]->relative_musec = dmusec; } return result; }