Пример #1
0
void patmanInstrument::setFile( const QString & _patch_file, bool _rename )
{
	if( _patch_file.size() <= 0 )
	{
		m_patchFile = QString::null;
		return;
	}

	// is current instrument-track-name equal to previous-filename??
	if( _rename &&
		( instrumentTrack()->name() ==
					QFileInfo( m_patchFile ).fileName() ||
				   	m_patchFile == "" ) )
	{
		// then set it to new one
		instrumentTrack()->setName( QFileInfo( _patch_file
								).fileName() );
	}
	// else we don't touch the instrument-track-name, because the user
	// named it self

	m_patchFile = sampleBuffer::tryToMakeRelative( _patch_file );
	LoadErrors error = loadPatch( sampleBuffer::tryToMakeAbsolute(
								_patch_file ) );
	if( error )
	{
		printf("Load error\n");
	}

	emit fileChanged();
}
Пример #2
0
// Could we get iph-based instruments support sample-exact models by using a
// frame-length of 1 while rendering?
void sf2Instrument::play( sampleFrame * _working_buffer )
{
	const fpp_t frames = engine::mixer()->framesPerPeriod();

	m_synthMutex.lock();

	const int currentMidiPitch = instrumentTrack()->midiPitch();
	if( m_lastMidiPitch != currentMidiPitch )
	{
		m_lastMidiPitch = currentMidiPitch;
		fluid_synth_pitch_bend( m_synth, m_channel, m_lastMidiPitch );
	}

	const int currentMidiPitchRange = instrumentTrack()->midiPitchRange();
	if( m_lastMidiPitchRange != currentMidiPitchRange )
	{
		m_lastMidiPitchRange = currentMidiPitchRange;
		fluid_synth_pitch_wheel_sens( m_synth, m_channel, m_lastMidiPitchRange );
	}

	if( m_internalSampleRate < engine::mixer()->processingSampleRate() &&
							m_srcState != NULL )
	{
		const fpp_t f = frames * m_internalSampleRate / engine::mixer()->processingSampleRate();
#ifdef __GNUC__
		sampleFrame tmp[f];
#else
		sampleFrame * tmp = new sampleFrame[f];
#endif
		fluid_synth_write_float( m_synth, f, tmp, 0, 2, tmp, 1, 2 );

		SRC_DATA src_data;
		src_data.data_in = tmp[0];
		src_data.data_out = _working_buffer[0];
		src_data.input_frames = f;
		src_data.output_frames = frames;
		src_data.src_ratio = (double) frames / f;
		src_data.end_of_input = 0;
		int error = src_process( m_srcState, &src_data );
#ifndef __GNUC__
		delete[] tmp;
#endif
		if( error )
		{
			qCritical( "sf2Instrument: error while resampling: %s", src_strerror( error ) );
		}
		if( src_data.output_frames_gen > frames )
		{
			qCritical( "sf2Instrument: not enough frames: %ld / %d", src_data.output_frames_gen, frames );
		}
	}
	else
	{
		fluid_synth_write_float( m_synth, frames, _working_buffer, 0, 2, _working_buffer, 1, 2 );
	}
	m_synthMutex.unlock();

	instrumentTrack()->processAudioBuffer( _working_buffer, frames, NULL );
}
Пример #3
0
void vestigeInstrument::loadFile( const QString & _file )
{
	m_pluginMutex.lock();
	const bool set_ch_name = ( m_plugin != NULL &&
        	instrumentTrack()->name() == m_plugin->name() ) ||
            	instrumentTrack()->name() == InstrumentTrack::tr( "Default preset" ) ||
            	instrumentTrack()->name() == displayName();

	m_pluginMutex.unlock();

	if ( m_plugin != NULL )
	{
		closePlugin();
	}

	m_pluginDLL = _file;
	textFloat * tf = textFloat::displayMessage(
			tr( "Loading plugin" ),
			tr( "Please wait while loading VST-plugin..." ),
			PLUGIN_NAME::getIconPixmap( "logo", 24, 24 ), 0 );

	m_pluginMutex.lock();
	m_plugin = new VstPlugin( m_pluginDLL );
	if( m_plugin->failed() )
	{
		m_pluginMutex.unlock();
		closePlugin();
		delete tf;
		QMessageBox::information( 0,
				tr( "Failed loading VST-plugin" ),
				tr( "The VST-plugin %1 could not "
					"be loaded for some reason.\n"
					"If it runs with other VST-"
					"software under Linux, please "
					"contact an LMMS-developer!"
					).arg( m_pluginDLL ),
						QMessageBox::Ok );
		return;
	}

	m_plugin->showEditor( NULL, false );

	if( set_ch_name )
	{
		instrumentTrack()->setName( m_plugin->name() );
	}

	m_pluginMutex.unlock();

	emit dataChanged();

	delete tf;
}
Пример #4
0
void Pattern::clearNotes()
{
	instrumentTrack()->lock();
	for( NoteVector::Iterator it = m_notes.begin(); it != m_notes.end();
									++it )
	{
		delete *it;
	}
	m_notes.clear();
	instrumentTrack()->unlock();

	checkType();
	emit dataChanged();
}
Пример #5
0
void ZynAddSubFxInstrument::updatePitchRange()
{
    m_pluginMutex.lock();
    if( m_remotePlugin )
    {
        m_remotePlugin->sendMessage( RemotePlugin::message( IdZasfSetPitchWheelBendRange ).
                                     addInt( instrumentTrack()->midiPitchRange() ) );
    }
    else
    {
        m_plugin->setPitchWheelBendRange( instrumentTrack()->midiPitchRange() );
    }
    m_pluginMutex.unlock();
}
Пример #6
0
CarlaInstrument::~CarlaInstrument()
{
    Engine::mixer()->removePlayHandles( instrumentTrack() );

    if (fHost.resourceDir != NULL)
    {
        std::free((char*)fHost.resourceDir);
        fHost.resourceDir = NULL;
    }

    if (fHost.uiName != NULL)
    {
        std::free((char*)fHost.uiName);
        fHost.uiName = NULL;
    }

    if (fHandle == NULL)
        return;

    if (fDescriptor->deactivate != NULL)
        fDescriptor->deactivate(fHandle);

    if (fDescriptor->cleanup != NULL)
        fDescriptor->cleanup(fHandle);

    fHandle = NULL;
}
Пример #7
0
void NotePlayHandle::noteOff( const f_cnt_t _s )
{
	if( m_released )
	{
		return;
	}

	// first note-off all sub-notes
	for( NotePlayHandleList::Iterator it = m_subNotes.begin(); it != m_subNotes.end(); ++it )
	{
		( *it )->noteOff( _s );
	}

	// then set some variables indicating release-state
	m_framesBeforeRelease = _s;
	m_releaseFramesToDo = qMax<f_cnt_t>( 0, m_instrumentTrack->m_soundShaping.releaseFrames() );

	if( hasParent() || !instrumentTrack()->isArpeggioEnabled() )
	{
		// send MidiNoteOff event
		m_instrumentTrack->processOutEvent(
			MidiEvent( MidiNoteOff, midiChannel(), midiKey(), 0 ),
			MidiTime::fromFrames( m_framesBeforeRelease, engine::framesPerTick() ) );
	}

	// inform attached components about MIDI finished (used for recording in Piano Roll)
	if( m_origin == OriginMidiInput )
	{
		setLength( MidiTime( static_cast<f_cnt_t>( totalFramesPlayed() / engine::framesPerTick() ) ) );
		m_instrumentTrack->midiNoteOff( *this );
	}

	m_released = true;
}
Пример #8
0
void patmanInstrument::playNote( notePlayHandle * _n,
						sampleFrame * _working_buffer )
{
	if( m_patchFile.isEmpty() )
	{
		return;
	}

	const fpp_t frames = _n->framesLeftForCurrentPeriod();

	if( !_n->m_pluginData )
	{
		selectSample( _n );
	}
	handle_data * hdata = (handle_data *)_n->m_pluginData;

	float play_freq = hdata->tuned ? _n->frequency() :
						hdata->sample->frequency();

	if( hdata->sample->play( _working_buffer, hdata->state, frames,
					play_freq, m_loopedModel.value() ) )
	{
		applyRelease( _working_buffer, _n );
		instrumentTrack()->processAudioBuffer( _working_buffer,
								frames, _n );
	}
}
Пример #9
0
ZynAddSubFxInstrument::ZynAddSubFxInstrument(
									InstrumentTrack * _instrumentTrack ) :
	Instrument( _instrumentTrack, &zynaddsubfx_plugin_descriptor ),
	m_hasGUI( false ),
	m_plugin( NULL ),
	m_remotePlugin( NULL ),
	m_portamentoModel( 0, 0, 127, 1, this, tr( "Portamento" ) ),
	m_filterFreqModel( 64, 0, 127, 1, this, tr( "Filter Frequency" ) ),
	m_filterQModel( 64, 0, 127, 1, this, tr( "Filter Resonance" ) ),
	m_bandwidthModel( 64, 0, 127, 1, this, tr( "Bandwidth" ) ),
	m_fmGainModel( 127, 0, 127, 1, this, tr( "FM Gain" ) ),
	m_resCenterFreqModel( 64, 0, 127, 1, this, tr( "Resonance Center Frequency" ) ),
	m_resBandwidthModel( 64, 0, 127, 1, this, tr( "Resonance Bandwidth" ) ),
	m_forwardMidiCcModel( true, this, tr( "Forward MIDI Control Change Events" ) )
{
	initPlugin();

	connect( &m_portamentoModel, SIGNAL( dataChanged() ), this, SLOT( updatePortamento() ) );
	connect( &m_filterFreqModel, SIGNAL( dataChanged() ), this, SLOT( updateFilterFreq() ) );
	connect( &m_filterQModel, SIGNAL( dataChanged() ), this, SLOT( updateFilterQ() ) );
	connect( &m_bandwidthModel, SIGNAL( dataChanged() ), this, SLOT( updateBandwidth() ) );
	connect( &m_fmGainModel, SIGNAL( dataChanged() ), this, SLOT( updateFmGain() ) );
	connect( &m_resCenterFreqModel, SIGNAL( dataChanged() ), this, SLOT( updateResCenterFreq() ) );
	connect( &m_resBandwidthModel, SIGNAL( dataChanged() ), this, SLOT( updateResBandwidth() ) );

	// now we need a play-handle which cares for calling play()
	InstrumentPlayHandle * iph = new InstrumentPlayHandle( this, _instrumentTrack );
	Engine::mixer()->addPlayHandle( iph );

	connect( Engine::mixer(), SIGNAL( sampleRateChanged() ),
			this, SLOT( reloadPlugin() ) );

	connect( instrumentTrack()->pitchRangeModel(), SIGNAL( dataChanged() ),
				this, SLOT( updatePitchRange() ) );
}
Пример #10
0
void CarlaInstrument::play(sampleFrame* workingBuffer)
{
    const uint bufsize = Engine::mixer()->framesPerPeriod();

    std::memset(workingBuffer, 0, sizeof(sample_t)*bufsize*DEFAULT_CHANNELS);

    if (fHandle == NULL)
    {
        instrumentTrack()->processAudioBuffer(workingBuffer, bufsize, NULL);
        return;
    }

    // set time info
    Song * const s = Engine::getSong();
    fTimeInfo.playing  = s->isPlaying();
    fTimeInfo.frame    = s->getPlayPos(s->playMode()).frames(Engine::framesPerTick());
    fTimeInfo.usecs    = s->getMilliseconds()*1000;
    fTimeInfo.bbt.bar  = s->getTacts() + 1;
    fTimeInfo.bbt.beat = s->getBeat() + 1;
    fTimeInfo.bbt.tick = s->getBeatTicks();
    fTimeInfo.bbt.barStartTick   = ticksPerBeat*s->getTimeSigModel().getNumerator()*s->getTacts();
    fTimeInfo.bbt.beatsPerBar    = s->getTimeSigModel().getNumerator();
    fTimeInfo.bbt.beatType       = s->getTimeSigModel().getDenominator();
    fTimeInfo.bbt.ticksPerBeat   = ticksPerBeat;
    fTimeInfo.bbt.beatsPerMinute = s->getTempo();

    float buf1[bufsize];
    float buf2[bufsize];
    float* rBuf[] = { buf1, buf2 };
    std::memset(buf1, 0, sizeof(float)*bufsize);
    std::memset(buf2, 0, sizeof(float)*bufsize);

    {
        const QMutexLocker ml(&fMutex);
        fDescriptor->process(fHandle, rBuf, rBuf, bufsize, fMidiEvents, fMidiEventCount);
        fMidiEventCount = 0;
    }

    for (uint i=0; i < bufsize; ++i)
    {
        workingBuffer[i][0] = buf1[i];
        workingBuffer[i][1] = buf2[i];
    }

    instrumentTrack()->processAudioBuffer(workingBuffer, bufsize, NULL);
}
Пример #11
0
void vibed::playNote( notePlayHandle * _n, sampleFrame * _working_buffer )
{
	if ( _n->totalFramesPlayed() == 0 || _n->m_pluginData == NULL )
	{
		_n->m_pluginData = new stringContainer( _n->frequency(),
				engine::getMixer()->processingSampleRate(),
						__sampleLength );
		
		for( Uint8 i = 0; i < 9; ++i )
		{
			if( m_powerButtons[i]->value() )
			{
				static_cast<stringContainer*>(
					_n->m_pluginData )->addString(
				m_harmonics[i]->value(),
				m_pickKnobs[i]->value(),
				m_pickupKnobs[i]->value(),
				m_graphs[i]->samples(),
				m_randomKnobs[i]->value(),
				m_stiffnessKnobs[i]->value(),
				m_detuneKnobs[i]->value(),
				static_cast<int>(
					m_lengthKnobs[i]->value() ),
				m_impulses[i]->value(),
				i );
			}
		}
	}

	const fpp_t frames = _n->framesLeftForCurrentPeriod();
	stringContainer * ps = static_cast<stringContainer *>(
							_n->m_pluginData );

	for( fpp_t i = 0; i < frames; ++i )
	{
		_working_buffer[i][0] = 0.0f;
		_working_buffer[i][1] = 0.0f;
		Uint8 s = 0;
		for( Uint8 string = 0; string < 9; ++string )
		{
			if( ps->exists( string ) )
			{
				// pan: 0 -> left, 1 -> right
				const float pan = (
					m_panKnobs[string]->value() + 1 ) /
									2.0f;
				const sample_t sample =
						ps->getStringSample( s ) *
					m_volumeKnobs[string]->value() / 100.0f;
				_working_buffer[i][0] += ( 1.0f - pan ) * sample;
				_working_buffer[i][1] += pan * sample;
				s++;
			}
		}
	}

	instrumentTrack()->processAudioBuffer( _working_buffer, frames, _n );
}
Пример #12
0
void lb302Synth::play( sampleFrame * _working_buffer )
{
	//printf(".");
	const fpp_t frames = engine::getMixer()->framesPerPeriod();

	process( _working_buffer, frames); 
	instrumentTrack()->processAudioBuffer( _working_buffer, frames,
									NULL );
}
Пример #13
0
Note * Pattern::addNote( const Note & _new_note, const bool _quant_pos )
{
	Note * new_note = new Note( _new_note );
	if( _quant_pos && gui->pianoRoll() )
	{
		new_note->quantizePos( gui->pianoRoll()->quantization() );
	}

	instrumentTrack()->lock();
	if( m_notes.size() == 0 || m_notes.back()->pos() <= new_note->pos() )
	{
		m_notes.push_back( new_note );
	}
	else
	{
		// simple algorithm for inserting the note between two
		// notes with smaller and greater position
		// maybe it could be optimized by starting in the middle and
		// going forward or backward but note-inserting isn't that
		// time-critical since it is usually not done while playing...
		long new_note_abs_time = new_note->pos();
		NoteVector::Iterator it = m_notes.begin();

		while( it != m_notes.end() &&
					( *it )->pos() < new_note_abs_time )
		{
			++it;
		}

		m_notes.insert( it, new_note );
	}
	instrumentTrack()->unlock();

	checkType();
	changeLength( length() );

	emit dataChanged();

	updateBBTrack();

	return new_note;
}
Пример #14
0
ZynAddSubFxInstrument::~ZynAddSubFxInstrument()
{
	Engine::mixer()->removePlayHandles( instrumentTrack() );

	m_pluginMutex.lock();
	delete m_plugin;
	delete m_remotePlugin;
	m_plugin = NULL;
	m_remotePlugin = NULL;
	m_pluginMutex.unlock();
}
Пример #15
0
sf2Instrument::~sf2Instrument()
{
	engine::mixer()->removePlayHandles( instrumentTrack() );
	freeFont();
	delete_fluid_synth( m_synth );
	delete_fluid_settings( m_settings );
	if( m_srcState != NULL )
	{
		src_delete( m_srcState );
	}

}
Пример #16
0
void lb302Synth::play( sampleFrame * _working_buffer )
{
	while( ! m_notes.isEmpty() )
	{
		processNote( m_notes.takeFirst() );
	};
	
	const fpp_t frames = engine::mixer()->framesPerPeriod();

	process( _working_buffer, frames );
	instrumentTrack()->processAudioBuffer( _working_buffer, frames, NULL );
	release_frame = 0;
}
Пример #17
0
sf2Instrument::~sf2Instrument()
{
	Engine::mixer()->removePlayHandlesOfTypes( instrumentTrack(),
				PlayHandle::TypeNotePlayHandle
				| PlayHandle::TypeInstrumentPlayHandle );
	freeFont();
	delete_fluid_synth( m_synth );
	delete_fluid_settings( m_settings );
	if( m_srcState != NULL )
	{
		src_delete( m_srcState );
	}

}
Пример #18
0
void Pattern::removeNote( const Note * _note_to_del )
{
	instrumentTrack()->lock();
	NoteVector::Iterator it = m_notes.begin();
	while( it != m_notes.end() )
	{
		if( *it == _note_to_del )
		{
			delete *it;
			m_notes.erase( it );
			break;
		}
		++it;
	}
	instrumentTrack()->unlock();

	checkType();
	changeLength( length() );

	emit dataChanged();

	updateBBTrack();
}
Пример #19
0
void ZynAddSubFxInstrument::play( sampleFrame * _buf )
{
    m_pluginMutex.lock();
    if( m_remotePlugin )
    {
        m_remotePlugin->process( NULL, _buf );
    }
    else
    {
        m_plugin->processAudio( _buf );
    }
    m_pluginMutex.unlock();
    instrumentTrack()->processAudioBuffer( _buf, Engine::mixer()->framesPerPeriod(), NULL );
}
Пример #20
0
void sf2Instrument::playNote( NotePlayHandle * _n, sampleFrame * )
{
	if( _n->isMasterNote() || ( _n->hasParent() && _n->isReleased() ) )
	{
		return;
	}
	
	const f_cnt_t tfp = _n->totalFramesPlayed();

	if( tfp == 0 )
	{
		const float LOG440 = 2.643452676f;

		int midiNote = (int)floor( 12.0 * ( log2( _n->unpitchedFrequency() ) - LOG440 ) - 4.0 );

		// out of range?
		if( midiNote <= 0 || midiNote >= 128 )
		{
			return;
		}
		const int baseVelocity = instrumentTrack()->midiPort()->baseVelocity();
		
		SF2PluginData * pluginData = new SF2PluginData;
		pluginData->midiNote = midiNote;
		pluginData->lastPanning = 0;
		pluginData->lastVelocity = _n->midiVelocity( baseVelocity );
		pluginData->fluidVoice = NULL;
		pluginData->isNew = true;
		pluginData->offset = _n->offset();
		pluginData->noteOffSent = false;

		_n->m_pluginData = pluginData;
		
		// insert the nph to the playing notes vector
		m_playingNotesMutex.lock();
		m_playingNotes.append( _n );
		m_playingNotesMutex.unlock();
	}
	else if( _n->isReleased() ) // note is released during this period
	{
		SF2PluginData * pluginData = static_cast<SF2PluginData *>( _n->m_pluginData );
		pluginData->offset = _n->framesBeforeRelease();
		pluginData->isNew = false;
		
		m_playingNotesMutex.lock();
		m_playingNotes.append( _n );
		m_playingNotesMutex.unlock();

	}
}
Пример #21
0
void lb302Synth::play( sampleFrame * _working_buffer )
{
	m_notesMutex.lock();
	while( ! m_notes.isEmpty() )
	{
		processNote( m_notes.takeFirst() );
	};
	m_notesMutex.unlock();
	
	const fpp_t frames = Engine::mixer()->framesPerPeriod();

	process( _working_buffer, frames );
	instrumentTrack()->processAudioBuffer( _working_buffer, frames, NULL );
//	release_frame = 0; //removed for issue # 1432
}
Пример #22
0
vestigeInstrument::~vestigeInstrument()
{
	if (p_subWindow != NULL) {
		delete p_subWindow;
		p_subWindow = NULL;
	}

	if (knobFModel != NULL) {
		delete []knobFModel;
		knobFModel = NULL;
	}

	engine::mixer()->removePlayHandles( instrumentTrack() );
	closePlugin();
}
Пример #23
0
f_cnt_t NotePlayHandle::framesLeft() const
{
	if( instrumentTrack()->isSustainPedalPressed() )
	{
		return 4*engine::mixer()->framesPerPeriod();
	}
	else if( m_released && actualReleaseFramesToDo() == 0 )
	{
		return m_framesBeforeRelease;
	}
	else if( m_released )
	{
		return m_framesBeforeRelease + m_releaseFramesToDo - m_releaseFramesDone;
	}
	return m_frames+actualReleaseFramesToDo()-m_totalFramesPlayed;
}
Пример #24
0
void vestigeInstrument::play( sampleFrame * _buf )
{
	m_pluginMutex.lock();
	if( m_plugin == NULL )
	{
		m_pluginMutex.unlock();
		return;
	}

	m_plugin->process( NULL, _buf );

	const fpp_t frames = engine::mixer()->framesPerPeriod();

	instrumentTrack()->processAudioBuffer( _buf, frames, NULL );

	m_pluginMutex.unlock();
}
Пример #25
0
void kickerInstrument::playNote( NotePlayHandle * _n,
						sampleFrame * _working_buffer )
{
	const fpp_t frames = _n->framesLeftForCurrentPeriod();
	const f_cnt_t offset = _n->noteOffset();
	const float decfr = m_decayModel.value() *
		Engine::mixer()->processingSampleRate() / 1000.0f;
	const f_cnt_t tfp = _n->totalFramesPlayed();

	if ( tfp == 0 )
	{
		_n->m_pluginData = new SweepOsc(
					DistFX( m_distModel.value(),
							m_gainModel.value() ),
					m_startNoteModel.value() ? _n->frequency() : m_startFreqModel.value(),
					m_endNoteModel.value() ? _n->frequency() : m_endFreqModel.value(),
					m_noiseModel.value() * m_noiseModel.value(),
					m_clickModel.value() * 0.25f,
					m_slopeModel.value(),
					m_envModel.value(),
					m_distModel.value(),
					m_distEndModel.value(),
					decfr );
	}
	else if( tfp > decfr && !_n->isReleased() )
	{
		_n->noteOff();
	}

	SweepOsc * so = static_cast<SweepOsc *>( _n->m_pluginData );
	so->update( _working_buffer + offset, frames, Engine::mixer()->processingSampleRate() );

	if( _n->isReleased() )
	{
		const float done = _n->releaseFramesDone();
		const float desired = desiredReleaseFrames();
		for( fpp_t f = 0; f < frames; ++f )
		{
			const float fac = ( done+f < desired ) ? ( 1.0f - ( ( done+f ) / desired ) ) : 0;
			_working_buffer[f+offset][0] *= fac;
			_working_buffer[f+offset][1] *= fac;
		}
	}

	instrumentTrack()->processAudioBuffer( _working_buffer, frames + offset, _n );
}
Пример #26
0
void bitInvader::playNote( NotePlayHandle * _n,
						sampleFrame * _working_buffer )
{
	if ( _n->totalFramesPlayed() == 0 || _n->m_pluginData == NULL )
	{
	
		float factor;
		if( !m_normalize.value() )
		{
			factor = 1.0f;
		}
		else
		{
			factor = m_normalizeFactor;
		}

		_n->m_pluginData = new bSynth(
					const_cast<float*>( m_graph.samples() ),
					m_graph.length(),
					_n,
					m_interpolation.value(), factor,
				Engine::mixer()->processingSampleRate() );
	}

	const fpp_t frames = _n->framesLeftForCurrentPeriod();
	const f_cnt_t offset = _n->noteOffset();

	bSynth * ps = static_cast<bSynth *>( _n->m_pluginData );
	for( fpp_t frame = offset; frame < frames + offset; ++frame )
	{
		const sample_t cur = ps->nextStringSample();
		for( ch_cnt_t chnl = 0; chnl < DEFAULT_CHANNELS; ++chnl )
		{
			_working_buffer[frame][chnl] = cur;
		}
	}

	applyRelease( _working_buffer, _n );

	instrumentTrack()->processAudioBuffer( _working_buffer, frames + offset, _n );
}
Пример #27
0
void sfxrInstrument::playNote( NotePlayHandle * _n, sampleFrame * _working_buffer )
{
    float currentSampleRate = Engine::mixer()->processingSampleRate();

    fpp_t frameNum = _n->framesLeftForCurrentPeriod();
    const f_cnt_t offset = _n->noteOffset();
    if ( _n->totalFramesPlayed() == 0 || _n->m_pluginData == NULL )
    {
        _n->m_pluginData = new SfxrSynth( this );
    }
    else if( static_cast<SfxrSynth*>(_n->m_pluginData)->isPlaying() == false )
    {
        _n->noteOff();
        return;
    }

    int32_t pitchedFrameNum = (_n->frequency()/BaseFreq)*frameNum;

    pitchedFrameNum /= ( currentSampleRate / 44100 );

// debug code
//	qDebug( "pFN %d", pitchedFrameNum );

    sampleFrame * pitchedBuffer = new sampleFrame[pitchedFrameNum];
    static_cast<SfxrSynth*>(_n->m_pluginData)->update( pitchedBuffer, pitchedFrameNum );
    for( fpp_t i=0; i<frameNum; i++ )
    {
        for( ch_cnt_t j=0; j<DEFAULT_CHANNELS; j++ )
        {
            _working_buffer[i+offset][j] = pitchedBuffer[i*pitchedFrameNum/frameNum][j];
        }
    }

    delete[] pitchedBuffer;

    applyRelease( _working_buffer, _n );

    instrumentTrack()->processAudioBuffer( _working_buffer, frameNum + offset, _n );

}
Пример #28
0
void NotePlayHandle::noteOff( const f_cnt_t _s )
{
	if( m_released )
	{
		return;
	}
	m_released = true;

	// first note-off all sub-notes
	for( NotePlayHandle * n : m_subNotes )
	{
		n->lock();
		n->noteOff( _s );
		n->unlock();
	}

	// then set some variables indicating release-state
	m_framesBeforeRelease = _s;
	m_releaseFramesToDo = qMax<f_cnt_t>( 0, actualReleaseFramesToDo() );

	if( hasParent() || ! m_instrumentTrack->isArpeggioEnabled() )
	{
		// send MidiNoteOff event
		m_instrumentTrack->processOutEvent(
				MidiEvent( MidiNoteOff, midiChannel(), midiKey(), 0 ),
				MidiTime::fromFrames( _s, Engine::framesPerTick() ),
				_s );
	}

	// inform attached components about MIDI finished (used for recording in Piano Roll)
	if (!instrumentTrack()->isSustainPedalPressed())
	{
		if( m_origin == OriginMidiInput )
		{
			setLength( MidiTime( static_cast<f_cnt_t>( totalFramesPlayed() / Engine::framesPerTick() ) ) );
			m_instrumentTrack->midiNoteOff( *this );
		}
	}
}
Пример #29
0
void ZynAddSubFxInstrument::loadFile( const QString & _file )
{
    const std::string fn = QSTR_TO_STDSTR( _file );
    if( m_remotePlugin )
    {
        m_remotePlugin->lock();
        m_remotePlugin->sendMessage( RemotePlugin::message( IdLoadPresetFile ).addString( fn ) );
        m_remotePlugin->waitForMessage( IdLoadPresetFile );
        m_remotePlugin->unlock();
    }
    else
    {
        m_pluginMutex.lock();
        m_plugin->loadPreset( fn );
        m_pluginMutex.unlock();
    }

    instrumentTrack()->setName( QFileInfo( _file ).baseName().replace( QRegExp( "^[0-9]{4}-" ), QString() ) );

    m_modifiedControllers.clear();

    emit settingsChanged();
}
Пример #30
0
void sf2Instrument::playNote( NotePlayHandle * _n, sampleFrame * )
{
	const float LOG440 = 2.643452676f;

	const f_cnt_t tfp = _n->totalFramesPlayed();

	int midiNote = (int)floor( 12.0 * ( log2( _n->unpitchedFrequency() ) - LOG440 ) - 4.0 );

	// out of range?
	if( midiNote <= 0 || midiNote >= 128 )
	{
		return;
	}

	if( tfp == 0 )
	{
		SF2PluginData * pluginData = new SF2PluginData;
		pluginData->midiNote = midiNote;
		pluginData->lastPanning = -1;
		pluginData->lastVelocity = 127;
		pluginData->fluidVoice = NULL;

		_n->m_pluginData = pluginData;

		m_synthMutex.lock();

		// get list of current voice IDs so we can easily spot the new
		// voice after the fluid_synth_noteon() call
		const int poly = fluid_synth_get_polyphony( m_synth );
		fluid_voice_t * voices[poly];
		unsigned int id[poly];
		fluid_synth_get_voicelist( m_synth, voices, poly, -1 );
		for( int i = 0; i < poly; ++i )
		{
			id[i] = 0;
		}
		for( int i = 0; i < poly && voices[i]; ++i )
		{
			id[i] = fluid_voice_get_id( voices[i] );
		}

		const int baseVelocity = instrumentTrack()->midiPort()->baseVelocity();

		fluid_synth_noteon( m_synth, m_channel, midiNote, _n->midiVelocity( baseVelocity ) );

		// get new voice and save it
		fluid_synth_get_voicelist( m_synth, voices, poly, -1 );
		for( int i = 0; i < poly && voices[i]; ++i )
		{
			const unsigned int newID = fluid_voice_get_id( voices[i] );
			if( id[i] != newID || newID == 0 )
			{
				pluginData->fluidVoice = voices[i];
				break;
			}
		}

		m_synthMutex.unlock();

		m_notesRunningMutex.lock();
		++m_notesRunning[midiNote];
		m_notesRunningMutex.unlock();
	}

	SF2PluginData * pluginData = static_cast<SF2PluginData *>(
							_n->m_pluginData );
#ifdef SOMEONE_FIXED_PER_NOTE_PANNING
	if( pluginData->fluidVoice &&
			pluginData->lastPanning != _n->getPanning() )
	{
		const float pan = -500 +
			  ( (float)( _n->getPanning() - PanningLeft ) ) /
			  ( (float)( PanningRight - PanningLeft ) ) * 1000;

		m_synthMutex.lock();
		fluid_voice_gen_set( pluginData->fluidVoice, GEN_PAN, pan );
		fluid_voice_update_param( pluginData->fluidVoice, GEN_PAN );
		m_synthMutex.unlock();

		pluginData->lastPanning = _n->getPanning();
	}
#endif

	const float currentVelocity = _n->volumeLevel( tfp ) * instrumentTrack()->midiPort()->baseVelocity();
	if( pluginData->fluidVoice &&
			pluginData->lastVelocity != currentVelocity )
	{
		m_synthMutex.lock();
		fluid_voice_gen_set( pluginData->fluidVoice, GEN_VELOCITY, currentVelocity );
		fluid_voice_update_param( pluginData->fluidVoice, GEN_VELOCITY );
		// make sure, FluidSynth modulates our changed GEN_VELOCITY via internal
		// attenuation modulator, so changes take effect (7=Volume CC)
		fluid_synth_cc( m_synth, m_channel, 7, 127 );
		m_synthMutex.unlock();

		pluginData->lastVelocity = currentVelocity;
	}
}