void Song::setTempo() { Engine::mixer()->requestChangeInModel(); const bpm_t tempo = ( bpm_t ) m_tempoModel.value(); PlayHandleList & playHandles = Engine::mixer()->playHandles(); for( PlayHandleList::Iterator it = playHandles.begin(); it != playHandles.end(); ++it ) { NotePlayHandle * nph = dynamic_cast<NotePlayHandle *>( *it ); if( nph && !nph->isReleased() ) { nph->lock(); nph->resize( tempo ); nph->unlock(); } } Engine::mixer()->doneChangeInModel(); Engine::updateFramesPerTick(); m_vstSyncController.setTempo( tempo ); emit tempoChanged( tempo ); }
bool NotePlayHandle::operator==( const NotePlayHandle & _nph ) const { return length() == _nph.length() && pos() == _nph.pos() && key() == _nph.key() && getVolume() == _nph.getVolume() && getPanning() == _nph.getPanning() && m_instrumentTrack == _nph.m_instrumentTrack && m_frames == _nph.m_frames && offset() == _nph.offset() && m_totalFramesPlayed == _nph.m_totalFramesPlayed && m_released == _nph.m_released && m_hasParent == _nph.m_hasParent && m_origBaseNote == _nph.m_origBaseNote && m_muted == _nph.m_muted && m_midiChannel == _nph.m_midiChannel && m_origin == _nph.m_origin; }
void sf2Instrument::play( sampleFrame * _working_buffer ) { const fpp_t frames = Engine::mixer()->framesPerPeriod(); // set midi pitch for this period const int currentMidiPitch = instrumentTrack()->midiPitch(); if( m_lastMidiPitch != currentMidiPitch ) { m_lastMidiPitch = currentMidiPitch; m_synthMutex.lock(); fluid_synth_pitch_bend( m_synth, m_channel, m_lastMidiPitch ); m_synthMutex.unlock(); } const int currentMidiPitchRange = instrumentTrack()->midiPitchRange(); if( m_lastMidiPitchRange != currentMidiPitchRange ) { m_lastMidiPitchRange = currentMidiPitchRange; m_synthMutex.lock(); fluid_synth_pitch_wheel_sens( m_synth, m_channel, m_lastMidiPitchRange ); m_synthMutex.unlock(); } // if we have no new noteons/noteoffs, just render a period and call it a day if( m_playingNotes.isEmpty() ) { renderFrames( frames, _working_buffer ); instrumentTrack()->processAudioBuffer( _working_buffer, frames, NULL ); return; } // processing loop // go through noteplayhandles in processing order f_cnt_t currentFrame = 0; while( ! m_playingNotes.isEmpty() ) { // find the note with lowest offset NotePlayHandle * currentNote = m_playingNotes[0]; for( int i = 1; i < m_playingNotes.size(); ++i ) { SF2PluginData * currentData = static_cast<SF2PluginData *>( currentNote->m_pluginData ); SF2PluginData * iData = static_cast<SF2PluginData *>( m_playingNotes[i]->m_pluginData ); if( currentData->offset > iData->offset ) { currentNote = m_playingNotes[i]; } } // process the current note: // first see if we're synced in frame count SF2PluginData * currentData = static_cast<SF2PluginData *>( currentNote->m_pluginData ); if( currentData->offset > currentFrame ) { renderFrames( currentData->offset - currentFrame, _working_buffer + currentFrame ); currentFrame = currentData->offset; } if( currentData->isNew ) { noteOn( currentData ); if( currentNote->isReleased() ) // if the note is released during the same period, we have to process it again for noteoff { currentData->isNew = false; currentData->offset = currentNote->framesBeforeRelease(); } else // otherwise remove the handle { m_playingNotesMutex.lock(); m_playingNotes.remove( m_playingNotes.indexOf( currentNote ) ); m_playingNotesMutex.unlock(); } } else { noteOff( currentData ); m_playingNotesMutex.lock(); m_playingNotes.remove( m_playingNotes.indexOf( currentNote ) ); m_playingNotesMutex.unlock(); } } if( currentFrame < frames ) { renderFrames( frames - currentFrame, _working_buffer + currentFrame ); } instrumentTrack()->processAudioBuffer( _working_buffer, frames, NULL ); }
bool InstrumentTrack::play( const MidiTime & _start, const fpp_t _frames, const f_cnt_t _offset, int _tco_num ) { if( ! m_instrument || ! tryLock() ) { return false; } const float frames_per_tick = Engine::framesPerTick(); tcoVector tcos; ::BBTrack * bb_track = NULL; if( _tco_num >= 0 ) { TrackContentObject * tco = getTCO( _tco_num ); tcos.push_back( tco ); bb_track = BBTrack::findBBTrack( _tco_num ); } else { getTCOsInRange( tcos, _start, _start + static_cast<int>( _frames / frames_per_tick ) ); } // Handle automation: detuning for( NotePlayHandleList::Iterator it = m_processHandles.begin(); it != m_processHandles.end(); ++it ) { ( *it )->processMidiTime( _start ); } if ( tcos.size() == 0 ) { unlock(); return false; } bool played_a_note = false; // will be return variable for( tcoVector::Iterator it = tcos.begin(); it != tcos.end(); ++it ) { Pattern* p = dynamic_cast<Pattern*>( *it ); // everything which is not a pattern or muted won't be played if( p == NULL || ( *it )->isMuted() ) { continue; } MidiTime cur_start = _start; if( _tco_num < 0 ) { cur_start -= p->startPosition(); } // get all notes from the given pattern... const NoteVector & notes = p->notes(); // ...and set our index to zero NoteVector::ConstIterator nit = notes.begin(); // very effective algorithm for playing notes that are // posated within the current sample-frame if( cur_start > 0 ) { // skip notes which are posated before start-tact while( nit != notes.end() && ( *nit )->pos() < cur_start ) { ++nit; } } Note * cur_note; while( nit != notes.end() && ( cur_note = *nit )->pos() == cur_start ) { if( cur_note->length() != 0 ) { const f_cnt_t note_frames = cur_note->length().frames( frames_per_tick ); NotePlayHandle* notePlayHandle = NotePlayHandleManager::acquire( this, _offset, note_frames, *cur_note ); notePlayHandle->setBBTrack( bb_track ); // are we playing global song? if( _tco_num < 0 ) { // then set song-global offset of pattern in order to // properly perform the note detuning notePlayHandle->setSongGlobalParentOffset( p->startPosition() ); } Engine::mixer()->addPlayHandle( notePlayHandle ); played_a_note = true; } ++nit; } } unlock(); return played_a_note; }