Example #1
0
void AutomationPattern::flipY( int min, int max )
{
	timeMap tempMap = m_timeMap;
	timeMap::ConstIterator iterate = m_timeMap.lowerBound(0);
	float tempValue = 0;

	int numPoints = 0;

	for( int i = 0; ( iterate + i + 1 ) != m_timeMap.end() && ( iterate + i ) != m_timeMap.end() ; i++)
	{
		numPoints++;
	}

	for( int i = 0; i <= numPoints; i++ )
	{

		if ( min < 0 )
		{
			tempValue = valueAt( ( iterate + i ).key() ) * -1;
			putValue( MidiTime( (iterate + i).key() ) , tempValue, false);
		}
		else
		{
			tempValue = max - valueAt( ( iterate + i ).key() );
			putValue( MidiTime( (iterate + i).key() ) , tempValue, false);
		}
	}

	generateTangents();
	emit dataChanged();
}
Example #2
0
void pattern::ensureBeatNotes()
{
	// make sure, that all step-note exist
	for( int i = 0; i < m_steps; ++i )
	{
		bool found = false;
		for( NoteVector::Iterator it = m_notes.begin();
						it != m_notes.end(); ++it )
		{
			if( ( *it )->pos() ==
				i * MidiTime::ticksPerTact() /
					MidiTime::stepsPerTact() &&
							( *it )->length() <= 0 )
			{
				found = true;
				break;
			}
		}
		if( found == false )
		{
			addNote( note( MidiTime( 0 ), MidiTime( i *
				MidiTime::ticksPerTact() /
					MidiTime::stepsPerTact() ) ), false );
		}
	}
}
Example #3
0
void Pattern::ensureBeatNotes()
{
	// make sure, that all step-note exist
	for( int i = 0; i < m_steps; ++i )
	{
		bool found = false;
		NoteVector::Iterator it;

		for( it = m_notes.begin(); it != m_notes.end(); ++it )
		{
			Note *note = *it;
			// if a note in this position is the one we want
			if( note->pos() ==
				( i * MidiTime::ticksPerTact() ) / MidiTime::stepsPerTact()
				&& note->length() <= 0 )
			{
				found = true;
				break;
			}
		}
		if( found == false )
		{
			addNote( Note( MidiTime( 0 ), MidiTime( ( i *
				MidiTime::ticksPerTact() ) /
					MidiTime::stepsPerTact() ) ), false );
		}
	}

	// remove notes we no longer need:
	// that is, disabled notes that no longer fall to the steps of the new time sig

	for( NoteVector::Iterator it = m_notes.begin(); it != m_notes.end(); )
	{
		bool needed = false;
		Note *note = *it;

		for( int i = 0; i < m_steps; ++i )
		{
			if( note->pos() == ( i * MidiTime::ticksPerTact() ) / MidiTime::stepsPerTact()
				|| note->length() != 0 )
			{
				needed = true;
				break;
			}
		}
		if( needed == false )
		{
			delete note;
			it = m_notes.erase( it );
		}
		else {
			++it;
		}
	}
}
Example #4
0
void AutomationPattern::addObject( AutomatableModel * _obj, bool _search_dup )
{
	if( _search_dup )
	{
		for( objectVector::iterator it = m_objects.begin();
					it != m_objects.end(); ++it )
		{
			if( *it == _obj )
			{
				TextFloat::displayMessage( _obj->displayName(), tr( "Model is already connected "
												"to this pattern." ), embed::getIconPixmap( "automation" ), 2000 );
				return;
			}
		}
	}

	// the automation track is unconnected and there is nothing in the track
	if( m_objects.isEmpty() && hasAutomation() == false )
	{
		// then initialize first value
		putValue( MidiTime(0), _obj->inverseScaledValue( _obj->value<float>() ), false );
	}

	m_objects += _obj;

	connect( _obj, SIGNAL( destroyed( jo_id_t ) ),
			this, SLOT( objectDestroyed( jo_id_t ) ),
						Qt::DirectConnection );

	emit dataChanged();

}
bool AutomationPattern::addObject( AutomatableModel * _obj, bool _search_dup )
{
	if( _search_dup )
	{
		for( objectVector::iterator it = m_objects.begin();
					it != m_objects.end(); ++it )
		{
			if( *it == _obj )
			{				
				return false;
			}
		}
	}

	// the automation track is unconnected and there is nothing in the track
	if( m_objects.isEmpty() && hasAutomation() == false )
	{
		// then initialize first value
		putValue( MidiTime(0), _obj->inverseScaledValue( _obj->value<float>() ), false );
	}

	m_objects += _obj;

	connect( _obj, SIGNAL( destroyed( jo_id_t ) ),
			this, SLOT( objectDestroyed( jo_id_t ) ),
						Qt::DirectConnection );

	emit dataChanged();

	return true;
}
void AutomationTrackView::dropEvent( QDropEvent * _de )
{
	QString type = StringPairDrag::decodeKey( _de );
	QString val = StringPairDrag::decodeValue( _de );
	if( type == "automatable_model" )
	{
		AutomatableModel * mod = dynamic_cast<AutomatableModel *>(
				Engine::projectJournal()->
					journallingObject( val.toInt() ) );
		if( mod != NULL )
		{
			MidiTime pos = MidiTime( trackContainerView()->
							currentPosition() +
				( _de->pos().x() -
					getTrackContentWidget()->x() ) *
						MidiTime::ticksPerTact() /
		static_cast<int>( trackContainerView()->pixelsPerTact() ) )
				.toAbsoluteTact();

			if( pos.getTicks() < 0 )
			{
				pos.setTicks( 0 );
			}

			TrackContentObject * tco = getTrack()->createTCO( pos );
			AutomationPattern * pat = dynamic_cast<AutomationPattern *>( tco );
			pat->addObject( mod );
			pat->movePosition( pos );
		}
	}

	update();
}
Example #7
0
void NotePlayHandle::noteOff( const f_cnt_t _s )
{
	if( m_released )
	{
		return;
	}

	// first note-off all sub-notes
	for( NotePlayHandleList::Iterator it = m_subNotes.begin(); it != m_subNotes.end(); ++it )
	{
		( *it )->noteOff( _s );
	}

	// then set some variables indicating release-state
	m_framesBeforeRelease = _s;
	m_releaseFramesToDo = qMax<f_cnt_t>( 0, actualReleaseFramesToDo() );

	if( hasParent() || ! m_instrumentTrack->isArpeggioEnabled() )
	{
		// send MidiNoteOff event
		m_instrumentTrack->processOutEvent(
				MidiEvent( MidiNoteOff, midiChannel(), midiKey(), 0 ),
				MidiTime::fromFrames( _s, engine::framesPerTick() ), 
				_s );
	}

	// inform attached components about MIDI finished (used for recording in Piano Roll)
	if( m_origin == OriginMidiInput )
	{
		setLength( MidiTime( static_cast<f_cnt_t>( totalFramesPlayed() / engine::framesPerTick() ) ) );
		m_instrumentTrack->midiNoteOff( *this );
	}

	m_released = true;
}
Example #8
0
AutomationPattern::AutomationPattern( AutomationTrack * _auto_track ) :
	TrackContentObject( _auto_track ),
	m_autoTrack( _auto_track ),
	m_objects(),
	m_tension( 1.0 ),
	m_progressionType( DiscreteProgression ),
	m_dragging( false ),
	m_isRecording( false ),
	m_lastRecordedValue( 0 )
{
	changeLength( MidiTime( 1, 0 ) );
	if( getTrack() )
	{
		switch( getTrack()->trackContainer()->type() )
		{
			case TrackContainer::BBContainer:
				setAutoResize( true );
				break;

			case TrackContainer::SongContainer:
				// move down
			default:
				setAutoResize( false );
				break;
		}
	}
}
Example #9
0
void AutomationPattern::addObject( AutomatableModel * _obj, bool _search_dup )
{
	if( _search_dup )
	{
		for( objectVector::iterator it = m_objects.begin();
					it != m_objects.end(); ++it )
		{
			if( *it == _obj )
			{
				// Already exists
				// TODO: Maybe let the user know in some non-annoying way
				return;
			}
		}
	}

	// the automation track is unconnected and there is nothing in the track
	if( m_objects.isEmpty() && hasAutomation() == false )
	{
		// then initialize first value
		putValue( MidiTime(0), _obj->value<float>(), false );
	}

	m_objects += _obj;

	connect( _obj, SIGNAL( destroyed( jo_id_t ) ),
			this, SLOT( objectDestroyed( jo_id_t ) ),
						Qt::DirectConnection );

	emit dataChanged();

}
Example #10
0
	smfMidiCC & putValue( MidiTime time, AutomatableModel * objModel, float value )
	{
		if( !ap || time > lastPos + DefaultTicksPerTact )
		{
			MidiTime pPos = MidiTime( time.getTact(), 0 );
			ap = dynamic_cast<AutomationPattern*>(
				at->createTCO(0) );
			ap->movePosition( pPos );
			ap->addObject( objModel );
		}

		lastPos = time;
		time = time - ap->startPosition();
		ap->putValue( time, value, false );
		ap->changeLength( MidiTime( time.getTact() + 1, 0 ) ); 

		return *this;
	}
Example #11
0
File: Song.cpp Project: LMMS/lmms
void Song::startExport()
{
	stop();
	if (m_renderBetweenMarkers)
	{
		m_exportSongBegin = m_exportLoopBegin = m_playPos[Mode_PlaySong].m_timeLine->loopBegin();
		m_exportSongEnd = m_exportLoopEnd = m_playPos[Mode_PlaySong].m_timeLine->loopEnd();

		m_playPos[Mode_PlaySong].setTicks( m_playPos[Mode_PlaySong].m_timeLine->loopBegin().getTicks() );
	}
	else
	{
		m_exportSongEnd = MidiTime(m_length, 0);
        
		// Handle potentially ridiculous loop points gracefully.
		if (m_loopRenderCount > 1 && m_playPos[Mode_PlaySong].m_timeLine->loopEnd() > m_exportSongEnd) 
		{
			m_exportSongEnd = m_playPos[Mode_PlaySong].m_timeLine->loopEnd();
		}

		if (!m_exportLoop) 
			m_exportSongEnd += MidiTime(1,0);
        
		m_exportSongBegin = MidiTime(0,0);
		m_exportLoopBegin = m_playPos[Mode_PlaySong].m_timeLine->loopBegin() < m_exportSongEnd && 
			m_playPos[Mode_PlaySong].m_timeLine->loopEnd() <= m_exportSongEnd ?
			m_playPos[Mode_PlaySong].m_timeLine->loopBegin() : MidiTime(0,0);
		m_exportLoopEnd = m_playPos[Mode_PlaySong].m_timeLine->loopBegin() < m_exportSongEnd && 
			m_playPos[Mode_PlaySong].m_timeLine->loopEnd() <= m_exportSongEnd ?
			m_playPos[Mode_PlaySong].m_timeLine->loopEnd() : MidiTime(0,0);

		m_playPos[Mode_PlaySong].setTicks( 0 );
	}

	m_exportEffectiveLength = (m_exportLoopBegin - m_exportSongBegin) + (m_exportLoopEnd - m_exportLoopBegin) 
		* m_loopRenderCount + (m_exportSongEnd - m_exportLoopEnd);
	m_loopRenderRemaining = m_loopRenderCount;

	playSong();

	m_exporting = true;

	m_vstSyncController.setPlaybackState( true );
}
Example #12
0
std::pair<MidiTime, MidiTime> Song::getExportEndpoints() const
{
	if ( m_renderBetweenMarkers )
	{
		return std::pair<MidiTime, MidiTime>(
			m_playPos[Mode_PlaySong].m_timeLine->loopBegin(),
			m_playPos[Mode_PlaySong].m_timeLine->loopEnd()
		);
	}
	else if ( m_exportLoop )
	{
		return std::pair<MidiTime, MidiTime>( MidiTime(0, 0), MidiTime(m_length, 0) );
	}
	else
	{
		// if not exporting as a loop, we leave one bar of padding at the end of the song to accomodate reverb, etc.
		return std::pair<MidiTime, MidiTime>( MidiTime(0, 0), MidiTime(m_length+1, 0) );
	}
}
Example #13
0
AutomationPattern::AutomationPattern( AutomationTrack * _auto_track ) :
	trackContentObject( _auto_track ),
	m_autoTrack( _auto_track ),
	m_objects(),
	m_tension( 1.0 ),
	m_progressionType( DiscreteProgression ),
	m_dragging( false ),
	m_isRecording( false ),
	m_lastRecordedValue( 0 )
{
	changeLength( MidiTime( 1, 0 ) );
}
Example #14
0
bbTCO::bbTCO( track * _track, unsigned int _color ) :
	trackContentObject( _track ),
	m_color( _color > 0 ? _color : defaultColor() )
{
	tact_t t = engine::getBBTrackContainer()->lengthOfBB( bbTrackIndex() );
	if( t > 0 )
	{
		saveJournallingState( false );
		changeLength( MidiTime( t, 0 ) );
		restoreJournallingState();
	}
}
Example #15
0
	void addNote( Note & n )
	{
		if( !p || n.pos() > lastEnd + DefaultTicksPerTact )
		{
			MidiTime pPos = MidiTime( n.pos().getTact(), 0 );
			p = dynamic_cast<Pattern*>( it->createTCO( 0 ) );
			p->movePosition( pPos );
		}
		hasNotes = true;
		lastEnd = n.pos() + n.length();
		n.setPos( n.pos( p->startPosition() ) );
		p->addNote( n, false );
	}
Example #16
0
bbTCO::bbTCO( track * _track, unsigned int _color ) :
	trackContentObject( _track ),
	m_color( _color > 0 ? _color : qRgb( 64, 128, 255 ) )
{
	tact_t t = engine::getBBTrackContainer()->lengthOfBB(
					bbTrack::numOfBBTrack( getTrack() ) );
	if( t > 0 )
	{
		saveJournallingState( false );
		changeLength( MidiTime( t, 0 ) );
		restoreJournallingState();
	}
}
Example #17
0
BBTCO::BBTCO( Track * _track ) :
	TrackContentObject( _track ),
	m_color( 128, 128, 128 ),
	m_useStyleColor( true )
{
	tact_t t = Engine::getBBTrackContainer()->lengthOfBB( bbTrackIndex() );
	if( t > 0 )
	{
		saveJournallingState( false );
		changeLength( MidiTime( t, 0 ) );
		restoreJournallingState();
	}
}
Example #18
0
void pattern::loadSettings( const QDomElement & _this )
{
	unfreeze();

	m_patternType = static_cast<PatternTypes>( _this.attribute( "type"
								).toInt() );
	setName( _this.attribute( "name" ) );
	if( _this.attribute( "pos" ).toInt() >= 0 )
	{
		movePosition( _this.attribute( "pos" ).toInt() );
	}
	changeLength( MidiTime( _this.attribute( "len" ).toInt() ) );
	if( _this.attribute( "muted" ).toInt() != isMuted() )
	{
		toggleMute();
	}

	clearNotes();

	QDomNode node = _this.firstChild();
	while( !node.isNull() )
	{
		if( node.isElement() &&
			!node.toElement().attribute( "metadata" ).toInt() )
		{
			note * n = new note;
			n->restoreState( node.toElement() );
			m_notes.push_back( n );
		}
		node = node.nextSibling();
        }

	m_steps = _this.attribute( "steps" ).toInt();
	if( m_steps == 0 )
	{
		m_steps = MidiTime::stepsPerTact();
	}

	ensureBeatNotes();
	checkType();
/*	if( _this.attribute( "frozen" ).toInt() )
	{
		freeze();
	}*/

	emit dataChanged();

	updateBBTrack();
}
Example #19
0
MidiTime Pattern::length() const
{
	if( m_patternType == BeatPattern )
	{
		return beatPatternLength();
	}

	tick_t max_length = MidiTime::ticksPerTact();

	for( NoteVector::ConstIterator it = m_notes.begin();
						it != m_notes.end(); ++it )
	{
		if( ( *it )->length() > 0 )
		{
			max_length = qMax<tick_t>( max_length,
							( *it )->endPos() );
		}
	}
	return MidiTime( max_length ).nextFullTact() *
						MidiTime::ticksPerTact();
}
	void testInlineAutomation()
	{
		auto song = Engine::getSong();

		InstrumentTrack* instrumentTrack =
				dynamic_cast<InstrumentTrack*>(Track::create(Track::InstrumentTrack, song));

		Pattern* notePattern = dynamic_cast<Pattern*>(instrumentTrack->createTCO(0));
		notePattern->changeLength(MidiTime(4, 0));
		Note* note = notePattern->addNote(Note(MidiTime(4, 0)), false);
		note->createDetuning();

		DetuningHelper* dh = note->detuning();
		auto pattern = dh->automationPattern();
		pattern->setProgressionType( AutomationPattern::LinearProgression );
		pattern->putValue(MidiTime(0, 0), 0.0);
		pattern->putValue(MidiTime(4, 0), 1.0);

		QCOMPARE(pattern->valueAt(MidiTime(0, 0)), 0.0f);
		QCOMPARE(pattern->valueAt(MidiTime(1, 0)), 0.25f);
		QCOMPARE(pattern->valueAt(MidiTime(2, 0)), 0.5f);
		QCOMPARE(pattern->valueAt(MidiTime(4, 0)), 1.0f);
	}
Example #21
0
MidiTime Pattern::beatPatternLength() const
{
	tick_t max_length = MidiTime::ticksPerTact();

	for( NoteVector::ConstIterator it = m_notes.begin();
						it != m_notes.end(); ++it )
	{
		if( ( *it )->length() < 0 )
		{
			max_length = qMax<tick_t>( max_length,
				( *it )->pos() +
					MidiTime::ticksPerTact() /
						MidiTime::stepsPerTact() );
		}
	}

	if( m_steps != MidiTime::stepsPerTact() )
	{
		max_length = m_steps * MidiTime::ticksPerTact() /
						MidiTime::stepsPerTact() ;
	}

	return MidiTime( max_length ).nextFullTact() * MidiTime::ticksPerTact();
}
Example #22
0
MidiTime AutomationPattern::length() const
{
	if( m_timeMap.isEmpty() ) return 0;
	timeMap::const_iterator it = m_timeMap.end();	
	return MidiTime( qMax( MidiTime( (it-1).key() ).getTact() + 1, 1 ), 0 );
}
Example #23
0
void AutomationPattern::flipX( int length )
{
	timeMap tempMap;

	timeMap::ConstIterator iterate = m_timeMap.lowerBound(0);
	float tempValue = 0;
	int numPoints = 0;

	for( int i = 0; ( iterate + i + 1 ) != m_timeMap.end() && ( iterate + i ) != m_timeMap.end() ; i++)
	{
		numPoints++;
	}

	float realLength = ( iterate + numPoints ).key();

	if ( length != -1 && length != realLength)
	{
		if ( realLength < length )
		{
			tempValue = valueAt( ( iterate + numPoints ).key() );
			putValue( MidiTime( length ) , tempValue, false);
			numPoints++;
			for( int i = 0; i <= numPoints; i++ )
			{
				tempValue = valueAt( ( iterate + i ).key() );
				MidiTime newTime = MidiTime( length - ( iterate + i ).key() );
				tempMap[newTime] = tempValue;
			}
		}
		else
		{
			for( int i = 0; i <= numPoints; i++ )
			{
				tempValue = valueAt( ( iterate + i ).key() );
				MidiTime newTime;

				if ( ( iterate + i ).key() <= length )
				{
					newTime = MidiTime( length - ( iterate + i ).key() );
				}
				else
				{
					newTime = MidiTime( ( iterate + i ).key() );
				}
				tempMap[newTime] = tempValue;
			}
		}
	}
	else
	{
		for( int i = 0; i <= numPoints; i++ )
		{
			tempValue = valueAt( ( iterate + i ).key() );
			cleanObjects();
			MidiTime newTime = MidiTime( realLength - ( iterate + i ).key() );
			tempMap[newTime] = tempValue;
		}
	}

	m_timeMap.clear();

	m_timeMap = tempMap;

	generateTangents();
	emit dataChanged();
}
Example #24
0
MidiTime MidiTime::fromFrames( const f_cnt_t frames, const float framesPerTick )
{
	return MidiTime( static_cast<int>( frames / framesPerTick ) );
}
Example #25
0
bool FlpImport::tryImport( TrackContainer* tc )
{
	const int mappedFilter[] =
	{
		BasicFilters<>::LowPass,// fast LP
		BasicFilters<>::LowPass,
		BasicFilters<>::BandPass_CSG,
		BasicFilters<>::HiPass,
		BasicFilters<>::Notch,
		BasicFilters<>::NumFilters+BasicFilters<>::LowPass,
		BasicFilters<>::LowPass,
		BasicFilters<>::NumFilters+BasicFilters<>::LowPass
	} ;

	const InstrumentFunctionArpeggio::ArpDirections mappedArpDir[] =
	{
		InstrumentFunctionArpeggio::ArpDirUp,
		InstrumentFunctionArpeggio::ArpDirUp,
		InstrumentFunctionArpeggio::ArpDirDown,
		InstrumentFunctionArpeggio::ArpDirUpAndDown,
		InstrumentFunctionArpeggio::ArpDirUpAndDown,
		InstrumentFunctionArpeggio::ArpDirRandom
	} ;

	QMap<QString, int> mappedPluginTypes;

	// instruments
	mappedPluginTypes["sampler"] = FL_Plugin::Sampler;
	mappedPluginTypes["ts404"] = FL_Plugin::TS404;
	mappedPluginTypes["3x osc"] = FL_Plugin::Fruity_3x_Osc;
	mappedPluginTypes["beepmap"] = FL_Plugin::BeepMap;
	mappedPluginTypes["buzz generator adapter"] = FL_Plugin::BuzzGeneratorAdapter;
	mappedPluginTypes["fruit kick"] = FL_Plugin::FruitKick;
	mappedPluginTypes["fruity drumsynth live"] = FL_Plugin::FruityDrumSynthLive;
	mappedPluginTypes["fruity dx10"] = FL_Plugin::FruityDX10;
	mappedPluginTypes["fruity granulizer"] = FL_Plugin::FruityGranulizer;
	mappedPluginTypes["fruity slicer"] = FL_Plugin::FruitySlicer;
	mappedPluginTypes["fruity soundfont player"] = FL_Plugin::FruitySoundfontPlayer;
	mappedPluginTypes["fruity vibrator"] = FL_Plugin::FruityVibrator;
	mappedPluginTypes["midi out"] = FL_Plugin::MidiOut;
	mappedPluginTypes["plucked!"] = FL_Plugin::Plucked;
	mappedPluginTypes["simsynth"] = FL_Plugin::SimSynth;
	mappedPluginTypes["sytrus"] = FL_Plugin::Sytrus;
	mappedPluginTypes["wasp"] = FL_Plugin::WASP;

	// effects
	mappedPluginTypes["fruity 7 band EQ"] = FL_Plugin::Fruity7BandEq;
	mappedPluginTypes["fruity balance"] = FL_Plugin::FruityBalance;
	mappedPluginTypes["fruity bass boost"] = FL_Plugin::FruityBassBoost;
	mappedPluginTypes["fruity big clock"] = FL_Plugin::FruityBigClock;
	mappedPluginTypes["fruity blood overdrive"] = FL_Plugin::FruityBloodOverdrive;
	mappedPluginTypes["fruity center"] = FL_Plugin::FruityCenter;
	mappedPluginTypes["fruity chorus"] = FL_Plugin::FruityChorus;
	mappedPluginTypes["fruity compressor"] = FL_Plugin::FruityCompressor;
	mappedPluginTypes["fruity db meter"] = FL_Plugin::FruityDbMeter;
	mappedPluginTypes["fruity delay"] = FL_Plugin::FruityDelay;
	mappedPluginTypes["fruity delay 2"] = FL_Plugin::FruityDelay2;
	mappedPluginTypes["fruity fast dist"] = FL_Plugin::FruityFastDist;
	mappedPluginTypes["fruity fast lp"] = FL_Plugin::FruityFastLP;
	mappedPluginTypes["fruity filter"] = FL_Plugin::FruityFilter;
	mappedPluginTypes["fruity flanger"] = FL_Plugin::FruityFlanger;
	mappedPluginTypes["fruity formula controller"] = FL_Plugin::FruityFormulaController;
	mappedPluginTypes["fruity free filter"] = FL_Plugin::FruityFreeFilter;
	mappedPluginTypes["fruity html notebook"] = FL_Plugin::FruityHTMLNotebook;
	mappedPluginTypes["fruity lsd"] = FL_Plugin::FruityLSD;
	mappedPluginTypes["fruity mute 2"] = FL_Plugin::FruityMute2;
	mappedPluginTypes["fruity notebook"] = FL_Plugin::FruityNotebook;
	mappedPluginTypes["fruity panomatic"] = FL_Plugin::FruityPanOMatic;
	mappedPluginTypes["fruity parametric eq"] = FL_Plugin::FruityParametricEQ;
	mappedPluginTypes["fruity peak controller"] = FL_Plugin::FruityPeakController;
	mappedPluginTypes["fruity phase inverter"] = FL_Plugin::FruityPhaseInverter;
	mappedPluginTypes["fruity phaser"] = FL_Plugin::FruityPhaser;
	mappedPluginTypes["fruity reeverb"] = FL_Plugin::FruityReeverb;
	mappedPluginTypes["fruity scratcher"] = FL_Plugin::FruityScratcher;
	mappedPluginTypes["fruity send"] = FL_Plugin::FruitySend;
	mappedPluginTypes["fruity soft clipper"] = FL_Plugin::FruitySoftClipper;
	mappedPluginTypes["fruity spectroman"] = FL_Plugin::FruitySpectroman;
	mappedPluginTypes["fruity stereo enhancer"] = FL_Plugin::FruityStereoEnhancer;
	mappedPluginTypes["fruity x-y controller"] = FL_Plugin::FruityXYController;


	FL_Project p;

	if( openFile() == false )
	{
		return false;
	}

	if( readID() != makeID( 'F', 'L', 'h', 'd' ) )
	{
		qWarning( "FlpImport::tryImport(): not a valid FL project\n" );
		return false;
	}

	const int header_len = read32LE();
	if( header_len != 6 )
	{
		qWarning( "FlpImport::tryImport(): invalid file format\n" );
		return false;
	}

	const int type = read16LE();
	if( type != 0 )
	{
		qWarning( "FlpImport::tryImport(): type %d format is not "
							"supported\n", type );
		return false;
	}

	p.numChannels = read16LE();
	if( p.numChannels < 1 || p.numChannels > 1000 )
	{
		qWarning( "FlpImport::tryImport(): invalid number of channels "
						"(%d)\n", p.numChannels );
		return false;
	}

	const int ppq = read16LE();
	if( ppq < 0 )
	{
		qWarning( "FlpImport::tryImport(): invalid ppq\n" );
		return false;
	}

	QProgressDialog progressDialog(
			TrackContainer::tr( "Importing FLP-file..." ),
			TrackContainer::tr( "Cancel" ), 0, p.numChannels );
	progressDialog.setWindowTitle( TrackContainer::tr( "Please wait..." ) );
	progressDialog.show();

	bool valid = false;

	// search for FLdt chunk
	while( 1 )
	{
		int32_t id = readID();
		const int len = read32LE();
		if( file().atEnd() )
		{
			qWarning( "FlpImport::tryImport(): unexpected "
						"end of file\n" );
			return false;
		}
		if( len < 0 || len >= 0x10000000 )
		{
			qWarning( "FlpImport::tryImport(): invalid "
						"chunk length %d\n", len );
			return false;
		}
		if( id == makeID( 'F', 'L', 'd', 't' ) )
		{
			valid = true;
			break;
		}
		skip( len );
	}

	if( valid == false )
	{
		return false;
	}

	for( int i = 0; i < p.numChannels; ++i )
	{
		p.channels += FL_Channel();
	}

	qDebug( "channels: %d\n", p.numChannels );


	char * text = NULL;
	int text_len = 0;
	FL_Plugin::PluginTypes last_plugin_type = FL_Plugin::UnknownPlugin;
	
	int cur_channel = -1;

	const bool is_journ = Engine::projectJournal()->isJournalling();
	Engine::projectJournal()->setJournalling( false );


	while( file().atEnd() == false )
	{
		FLP_Events ev = static_cast<FLP_Events>( readByte() );
		uint32_t data = readByte();

		if( ev >= FLP_Word && ev < FLP_Text )
		{
			data = data | ( readByte() << 8 );
		}

		if( ev >= FLP_Int && ev < FLP_Text )
		{
			data = data | ( readByte() << 16 );
			data = data | ( readByte() << 24 );
		}


		if( ev >= FLP_Text )
		{
			text_len = data & 0x7F;
			uint8_t shift = 0;
			while( data & 0x80 )
			{
				data = readByte();
				text_len = text_len | ( ( data & 0x7F ) <<
							( shift += 7 ) );
			}

			delete[] text;
			text = new char[text_len+1];
			if( readBlock( text, text_len ) <= 0 )
			{
				qWarning( "could not read string (len: %d)\n",
							text_len );
			}

			text[text_len] = 0;
		}
		const unsigned char * puc = (const unsigned char*) text;
		const int * pi = (const int *) text;


		FL_Channel * cc = cur_channel >= 0 ?
					&p.channels[cur_channel] : NULL;

		switch( ev )
		{
			// BYTE EVENTS
			case FLP_Byte:
				qDebug( "undefined byte %d\n", data );
				break;

			case FLP_NoteOn:
				qDebug( "note on: %d\n", data );
				// data = pos   how to handle?
				break;

			case FLP_Vol:
				qDebug( "vol %d\n", data );
				break;

			case FLP_Pan:
				qDebug( "pan %d\n", data );
				break;

			case FLP_LoopActive:
				qDebug( "active loop: %d\n", data );
				break;

			case FLP_ShowInfo:
				qDebug( "show info: %d\n", data );
				break;

			case FLP_Shuffle:
				qDebug( "shuffle: %d\n", data );
				break;

			case FLP_MainVol:
				p.mainVolume = data * 100 / 128;
				break;

			case FLP_PatLength:
				qDebug( "pattern length: %d\n", data );
				break;

			case FLP_BlockLength:
				qDebug( "block length: %d\n", data );
				break;

			case FLP_UseLoopPoints:
				cc->sampleUseLoopPoints = true;
				break;

			case FLP_LoopType:
				qDebug( "loop type: %d\n", data );
				break;

			case FLP_ChanType:
				qDebug( "channel type: %d\n", data );
				if( cc )
				{
		switch( data )
		{
			case 0: cc->pluginType = FL_Plugin::Sampler; break;
			case 1: cc->pluginType = FL_Plugin::TS404; break;
//			case 2: cc->pluginType = FL_Plugin::Fruity_3x_Osc; break;
			case 3: cc->pluginType = FL_Plugin::Layer; break;
			default:
				break;
		}
				}
				break;

			case FLP_MixSliceNum:
				cc->fxChannel = data+1;
				break;

			case FLP_EffectChannelMuted:
if( p.currentEffectChannel <= NumFLFxChannels )
{
	p.effectChannels[p.currentEffectChannel].isMuted =
					( data & 0x08 ) > 0 ? false : true;
}
				break;


			// WORD EVENTS
			case FLP_NewChan:
				cur_channel = data;
				qDebug( "new channel: %d\n", data );
				break;

			case FLP_NewPat:
				p.currentPattern = data - 1;
				if( p.currentPattern > p.maxPatterns )
				{
					p.maxPatterns = p.currentPattern;
				}
				break;

			case FLP_Tempo:
				p.tempo = data;
				break;

			case FLP_CurrentPatNum:
				p.activeEditPattern = data;
				break;

			case FLP_FX:
				qDebug( "FX: %d\n", data );
				break;

			case FLP_Fade_Stereo:
				if( data & 0x02 )
				{
					cc->sampleReversed = true;
				}
				else if( data & 0x100 )
				{
					cc->sampleReverseStereo = true;
				}
				qDebug( "fade stereo: %d\n", data );
				break;

			case FLP_CutOff:
				qDebug( "cutoff (sample): %d\n", data );
				break;

			case FLP_PreAmp:
				cc->sampleAmp = 100 + data * 100 / 256;
				break;

			case FLP_Decay:
				qDebug( "decay (sample): %d\n", data );
				break;

			case FLP_Attack:
				qDebug( "attack (sample): %d\n", data );
				break;

			case FLP_MainPitch:
				p.mainPitch = data;
				break;

			case FLP_Resonance:
				qDebug( "reso (sample): %d\n", data );
				break;

			case FLP_LoopBar:
				qDebug( "loop bar: %d\n", data );
				break;

			case FLP_StDel:
				qDebug( "stdel (delay?): %d\n", data );
				break;

			case FLP_FX3:
				qDebug( "FX 3: %d\n", data );
				break;

			case FLP_ShiftDelay:
				qDebug( "shift delay: %d\n", data );
				break;

			case FLP_Dot:
				cc->dots.push_back( ( data & 0xff ) +
						( p.currentPattern << 8 ) );
				break;

			case FLP_LayerChans:
				p.channels[data].layerParent = cur_channel;
				break;

			// DWORD EVENTS
			case FLP_Color:
				cc->color = data;
				break;

			case FLP_PlayListItem:
			{
				FL_PlayListItem i;
				i.position = ( data & 0xffff ) *
							DefaultTicksPerTact;
				i.length = DefaultTicksPerTact;
				i.pattern = ( data >> 16 ) - 1;
				p.playListItems.push_back( i );
				if( i.pattern > p.maxPatterns )
				{
					p.maxPatterns = i.pattern;
				}
				break;
			}

			case FLP_FXSine:
				qDebug( "fx sine: %d\n", data );
				break;

			case FLP_CutCutBy:
				qDebug( "cut cut by: %d\n", data );
				break;

			case FLP_MiddleNote:
				cc->baseNote = data+9;
				break;

			case FLP_DelayReso:
				qDebug( "delay resonance: %d\n", data );
				break;

			case FLP_Reverb:
				qDebug( "reverb (sample): %d\n", data );
				break;

			case FLP_IntStretch:
				qDebug( "int stretch (sample): %d\n", data );
				break;

			// TEXT EVENTS
			case FLP_Text_ChanName:
				cc->name = text;
				break;

			case FLP_Text_PatName:
				p.patternNames[p.currentPattern] = text;
				break;

			case FLP_Text_CommentRTF:
			{
				QByteArray ba( text, text_len );
				QBuffer buf( &ba );
				buf.open( QBuffer::ReadOnly );
				lineno = 0;
				attr_clear_all();
				op = html_init();
				hash_init();
				Word * word = word_read( &buf );
				QString out;
				word_print( word, out );
				word_free( word );
				op_free( op );

				p.projectNotes = out;
				outstring = "";
				break;
			}

			case FLP_Text_Title:
				p.projectTitle = text;
				break;

			case FLP_Text_SampleFileName:
			{
				QString f = text;
/*				if( f.mid( 1, 11 ) == "Instruments" )
				{
					f = "\\Patches\\Packs" +
								f.mid( 12 );
				}*/
				f.replace( '\\', QDir::separator() );
				if( QFileInfo( ConfigManager::inst()->flDir() +
						"/Data/" ).exists() )
				{
					f = ConfigManager::inst()->flDir() +
								"/Data/" + f;
				}
				else
				{
					// FL 3 compat
					f = ConfigManager::inst()->flDir() +
							"/Samples/" + f;
				}
				cc->sampleFileName = f;
				break;
			}

			case FLP_Text_Version:
			{
				qDebug( "FLP version: %s\n", text );
				p.versionString = text;
				QStringList l = p.versionString.split( '.' );
				p.version = ( l[0].toInt() << 8 ) +
						( l[1].toInt() << 4 ) +
						( l[2].toInt() << 0 );
				if( p.version >= 0x600 )
				{
					p.versionSpecificFactor = 100;
				}
				break;
			}

			case FLP_Text_PluginName:
				if( mappedPluginTypes.
					contains( QString( text ).toLower() ) )
				{
	const FL_Plugin::PluginTypes t = static_cast<FL_Plugin::PluginTypes>(
				mappedPluginTypes[QString( text ).toLower()] );
					if( t > FL_Plugin::EffectPlugin )
					{
						qDebug( "recognized new effect %s\n", text );
						p.effects.push_back( FL_Effect( t ) );
					}
					else if( cc )
					{
						qDebug( "recognized new plugin %s\n", text );
						cc->pluginType = t;
					}
					last_plugin_type = t;
				}
				else
				{
					qDebug( "unsupported plugin: %s!\n", text );
				}
				break;

			case FLP_Text_EffectChanName:
				++p.currentEffectChannel;
				if( p.currentEffectChannel <= NumFLFxChannels )
				{
					p.effectChannels[p.currentEffectChannel].name = text;
				}
				break;

			case FLP_Text_Delay:
				qDebug( "delay data: " );
				// pi[1] seems to be volume or similiar and
				// needs to be divided
				// by p.versionSpecificFactor
				dump_mem( text, text_len );
				break;

			case FLP_Text_TS404Params:
				if( cc && cc->pluginType == FL_Plugin::UnknownPlugin &&
						cc->pluginSettings == NULL )
				{
					cc->pluginSettings = new char[text_len];
					memcpy( cc->pluginSettings, text, text_len );
					cc->pluginSettingsLength = text_len;
					cc->pluginType = FL_Plugin::TS404;
				}
				break;

			case FLP_Text_NewPlugin:
				if( last_plugin_type > FL_Plugin::EffectPlugin )
				{
					FL_Effect * e = &p.effects.last();
					e->fxChannel = puc[0];
					e->fxPos = puc[4];
					qDebug( "new effect: " );
				}
				else
				{
					qDebug( "new plugin: " );
				}
				dump_mem( text, text_len );
				break;

			case FLP_Text_PluginParams:
				if( cc && cc->pluginSettings == NULL )
				{
					cc->pluginSettings = new char[text_len];
					memcpy( cc->pluginSettings, text,
								text_len );
					cc->pluginSettingsLength = text_len;
				}
				qDebug( "plugin params: " );
				dump_mem( text, text_len );
				break;

			case FLP_Text_ChanParams:
				cc->arpDir = mappedArpDir[pi[10]];
				cc->arpRange = pi[11];
				cc->selectedArp = pi[12];
	if( cc->selectedArp < 8 )
	{
		const int mappedArps[] = { 0, 1, 5, 6, 2, 3, 4 } ;
		cc->selectedArp = mappedArps[cc->selectedArp];
	}
				cc->arpTime = ( ( pi[13]+1 ) * p.tempo ) /
								( 4*16 ) + 1;
				cc->arpGate = ( pi[14] * 100.0f ) / 48.0f;
				cc->arpEnabled = pi[10] > 0;

				qDebug( "channel params: " );
				dump_mem( text, text_len );
				break;

			case FLP_Text_EnvLfoParams:
			{
				const float scaling = 1.0 / 65536.0f;
				FL_Channel_Envelope e;

		switch( cc->envelopes.size() )
		{
			case 1:
				e.target = InstrumentSoundShaping::Volume;
				break;
			case 2:
				e.target = InstrumentSoundShaping::Cut;
				break;
			case 3:
				e.target = InstrumentSoundShaping::Resonance;
				break;
			default:
				e.target = InstrumentSoundShaping::NumTargets;
				break;
		}
				e.predelay = pi[2] * scaling;
				e.attack = pi[3] * scaling;
				e.hold = pi[4] * scaling;
				e.decay = pi[5] * scaling;
				e.sustain = 1-pi[6] / 128.0f;
				e.release = pi[7] * scaling;
				if( e.target == InstrumentSoundShaping::Volume )
				{
					e.amount = pi[1] ? 1 : 0;
				}
				else
				{
					e.amount = pi[8] / 128.0f;
				}
//				e.lfoAmount = pi[11] / 128.0f;
				cc->envelopes.push_back( e );

				qDebug( "envelope and lfo params:\n" );
				dump_mem( text, text_len );

				break;
			}

			case FLP_Text_BasicChanParams:
		cc->volume = ( pi[1] / p.versionSpecificFactor ) * 100 / 128;
		cc->panning = ( pi[0] / p.versionSpecificFactor ) * 200 / 128 -
								PanningRight;
				if( text_len > 12 )
				{
			cc->filterType = mappedFilter[puc[20]];
			cc->filterCut = puc[12] / ( 255.0f * 2.5f );
			cc->filterRes = 0.01f + puc[16] / ( 256.0f * 2 );
			cc->filterEnabled = ( puc[13] == 0 );
			if( puc[20] >= 6 )
			{
				cc->filterCut *= 0.5f;
			}
				}
				qDebug( "basic chan params: " );
				dump_mem( text, text_len );
				break;

			case FLP_Text_OldFilterParams:
				cc->filterType = mappedFilter[puc[8]];
				cc->filterCut = puc[0] / ( 255.0f * 2.5 );
				cc->filterRes = 0.1f + puc[4] / ( 256.0f * 2 );
				cc->filterEnabled = ( puc[1] == 0 );
				if( puc[8] >= 6 )
				{
					cc->filterCut *= 0.5;
				}
				qDebug( "old filter params: " );
				dump_mem( text, text_len );
				break;

			case FLP_Text_AutomationData:
			{
				const int bpae = 12;
				const int imax = text_len / bpae;
				qDebug( "automation data (%d items)\n", imax );
				for( int i = 0; i < imax; ++i )
				{
					FL_Automation a;
					a.pos = pi[3*i+0] /
						( 4*ppq / DefaultTicksPerTact );
					a.value = pi[3*i+2];
					a.channel = pi[3*i+1] >> 16;
					a.control = pi[3*i+1] & 0xffff;
					if( a.channel >= 0 &&
						a.channel < p.numChannels )
					{
						qDebug( "add channel %d at %d  val %d  control:%d\n",
									a.channel, a.pos, a.value, a.control );
						p.channels[a.channel].automationData += a;
					}
//					dump_mem( text+i*bpae, bpae );
				}
				break;
			}

			case FLP_Text_PatternNotes:
			{
				//dump_mem( text, text_len );
				const int bpn = 20;
				const int imax = ( text_len + bpn - 1 ) / bpn;
				for( int i = 0; i < imax; ++i )
				{
					int ch = *( puc + i*bpn + 6 );
					int pan = *( puc + i*bpn + 16 );
					int vol = *( puc + i*bpn + 17 );
					int pos = *( (int *)( puc + i*bpn ) );
					int key = *( puc + i*bpn + 12 );
					int len = *( (int*)( puc + i*bpn +
									8 ) );
					pos /= (4*ppq) / DefaultTicksPerTact;
					len /= (4*ppq) / DefaultTicksPerTact;
					Note n( len, pos, key, vol * 100 / 128,
							pan*200 / 128 - 100 );
					if( ch < p.numChannels )
					{
	p.channels[ch].notes.push_back( qMakePair( p.currentPattern, n ) );
					}
					else
					{
						qDebug( "invalid " );
					}
					qDebug( "note: " );
					dump_mem( text+i*bpn, bpn );
				}
				break;
			}

			case FLP_Text_ChanGroupName:
				qDebug( "channel group name: %s\n", text );
				break;

			// case 216: pi[2] /= p.versionSpecificFactor
			// case 229: pi[1] /= p.versionSpecificFactor

			case FLP_Event_EffectParams:
			{
				enum FLP_EffectParams
				{
					EffectParamVolume = 0x1fc0
				} ;

				const int bpi = 12;
				const int imax = text_len / bpi;
				for( int i = 0; i < imax; ++i )
				{
					const int param = pi[i*3+1] & 0xffff;
					const int ch = ( pi[i*3+1] >> 22 )
									& 0x7f;
					if( ch < 0 || ch > NumFLFxChannels )
					{
						continue;
					}
					const int val = pi[i*3+2];
					if( param == EffectParamVolume )
					{
p.effectChannels[ch].volume = ( val / p.versionSpecificFactor ) * 100 / 128;
					}
					else
					{
qDebug( "FX-ch: %d  param: %x  value:%x\n", ch, param, val );
					}
				}
				break;
			}

			case FLP_Event_PlaylistItems:	// playlist items
			{
				const int bpi = 28;
				const int imax = text_len / bpi;
				for( int i = 0; i < imax; ++i )
				{
const int pos = pi[i*bpi/sizeof(int)+0] / ( (4*ppq) / DefaultTicksPerTact );
const int len = pi[i*bpi/sizeof(int)+2] / ( (4*ppq) / DefaultTicksPerTact );
const int pat = pi[i*bpi/sizeof(int)+3] & 0xfff;
if( pat > 2146 && pat <= 2278 )	// whatever these magic numbers are for...
{
	FL_PlayListItem i;
	i.position = pos;
	i.length = len;
	i.pattern = 2278 - pat;
	p.playListItems += i;
}
else
{
	qDebug( "unknown playlist item: " );
	dump_mem( text+i*bpi, bpi );
}
				}
				break;
			}

			default:
				if( ev >= FLP_Text )
				{
					qDebug( "!! unhandled text (ev: %d, len: %d): ",
								ev, text_len );
					dump_mem( text, text_len );
				}
				else
				{
					qDebug( "!! handling of FLP-event %d not implemented yet "
							"(data=%d).\n", ev, data );
				}
				break;
		}
	}


	// now create a project from FL_Project data structure
	Engine::getSong()->clearProject();

	// configure the mixer
	for( int i=0; i<NumFLFxChannels; ++i )
	{
		Engine::fxMixer()->createChannel();
	}
	gui->fxMixerView()->refreshDisplay();

	// set global parameters
	Engine::getSong()->setMasterVolume( p.mainVolume );
	Engine::getSong()->setMasterPitch( p.mainPitch );
	Engine::getSong()->setTempo( p.tempo );

	// set project notes
	gui->getProjectNotes()->setText( p.projectNotes );


	progressDialog.setMaximum( p.maxPatterns + p.channels.size() +
								p.effects.size() );
	int cur_progress = 0;

	// create BB tracks
	QList<BBTrack *> bb_tracks;
	QList<InstrumentTrack *> i_tracks;

	while( Engine::getBBTrackContainer()->numOfBBs() <= p.maxPatterns )
	{
		const int cur_pat = bb_tracks.size();
		BBTrack * bbt = dynamic_cast<BBTrack *>(
			Track::create( Track::BBTrack, Engine::getSong() ) );
		if( p.patternNames.contains( cur_pat ) )
		{
			bbt->setName( p.patternNames[cur_pat] );
		}
		bb_tracks += bbt;
		progressDialog.setValue( ++cur_progress );
		qApp->processEvents();
	}

	// create instrument-track for each channel
	for( QList<FL_Channel>::Iterator it = p.channels.begin();
						it != p.channels.end(); ++it )
	{
		InstrumentTrack * t = dynamic_cast<InstrumentTrack *>(
			Track::create( Track::InstrumentTrack,
					Engine::getBBTrackContainer() ) );
		Engine::getBBTrackContainer()->updateAfterTrackAdd();
		i_tracks.push_back( t );
		switch( it->pluginType )
		{
			case FL_Plugin::Fruity_3x_Osc:
				it->instrumentPlugin =
					t->loadInstrument( "tripleoscillator" );
				break;
			case FL_Plugin::Plucked:
				it->instrumentPlugin =
					t->loadInstrument( "vibedstrings" );
				break;
			case FL_Plugin::FruitKick:
				it->instrumentPlugin =
					t->loadInstrument( "kicker" );
				break;
			case FL_Plugin::TS404:
				it->instrumentPlugin =
					t->loadInstrument( "lb302" );
				break;
			case FL_Plugin::FruitySoundfontPlayer:
				it->instrumentPlugin =
					t->loadInstrument( "sf2player" );
				break;
			case FL_Plugin::Sampler:
			case FL_Plugin::UnknownPlugin:
			default:
				it->instrumentPlugin =
					t->loadInstrument( "audiofileprocessor" );
				break;
		}
		processPluginParams( &( *it ) );

		t->setName( it->name );
		t->volumeModel()->setValue( it->volume );
		t->panningModel()->setValue( it->panning );
		t->baseNoteModel()->setValue( it->baseNote );
		t->effectChannelModel()->setValue( it->fxChannel );

		InstrumentSoundShaping * iss = &t->m_soundShaping;
		iss->m_filterModel.setValue( it->filterType );
		iss->m_filterCutModel.setValue( it->filterCut *
			( iss->m_filterCutModel.maxValue() -
				iss->m_filterCutModel.minValue() ) +
					iss->m_filterCutModel.minValue() );
		iss->m_filterResModel.setValue( it->filterRes *
			( iss->m_filterResModel.maxValue() -
				iss->m_filterResModel.minValue() ) +
					iss->m_filterResModel.minValue() );
		iss->m_filterEnabledModel.setValue( it->filterEnabled );

		for( QList<FL_Channel_Envelope>::iterator jt = it->envelopes.begin();
					jt != it->envelopes.end(); ++jt )
		{
			if( jt->target != InstrumentSoundShaping::NumTargets )
			{
				EnvelopeAndLfoParameters * elp =
					iss->m_envLfoParameters[jt->target];

				elp->m_predelayModel.setValue( jt->predelay );
				elp->m_attackModel.setValue( jt->attack );
				elp->m_holdModel.setValue( jt->hold );
				elp->m_decayModel.setValue( jt->decay );
				elp->m_sustainModel.setValue( jt->sustain );
				elp->m_releaseModel.setValue( jt->release );
				elp->m_amountModel.setValue( jt->amount );
				elp->updateSampleVars();
			}
		}

		InstrumentFunctionArpeggio * arp = &t->m_arpeggio;
		arp->m_arpDirectionModel.setValue( it->arpDir );
		arp->m_arpRangeModel.setValue( it->arpRange );
		arp->m_arpModel.setValue( it->selectedArp );
		arp->m_arpTimeModel.setValue( it->arpTime );
		arp->m_arpGateModel.setValue( it->arpGate );
		arp->m_arpEnabledModel.setValue( it->arpEnabled );

		// process all dots
		for( QList<int>::ConstIterator jt = it->dots.begin();
						jt != it->dots.end(); ++jt )
		{
			const int pat = *jt / 256;
			const int pos = *jt % 256;
			Pattern* p = dynamic_cast<Pattern*>( t->getTCO( pat ) );
			if( p == NULL )
			{
				continue;
			}
			p->setStep( pos, true );
		}

		// TODO: use future layering feature
		if( it->layerParent >= 0 )
		{
			it->notes += p.channels[it->layerParent].notes;
		}

		// process all notes
		for( FL_Channel::noteVector::ConstIterator jt = it->notes.begin();
						jt != it->notes.end(); ++jt )
		{
			const int pat = jt->first;

			if( pat > 100 )
			{
				continue;
			}
			Pattern* p = dynamic_cast<Pattern*>( t->getTCO( pat ) );
			if( p != NULL )
			{
				p->addNote( jt->second, false );
			}
		}

		// process automation data
		for( QList<FL_Automation>::ConstIterator jt =
						it->automationData.begin();
					jt != it->automationData.end(); ++jt )
		{
			AutomatableModel * m = NULL;
			float value = jt->value;
			bool scale = false;
			switch( jt->control )
			{
				case FL_Automation::ControlVolume:
					m = t->volumeModel();
					value *= ( 100.0f / 128.0f ) / p.versionSpecificFactor;
					break;
				case FL_Automation::ControlPanning:
					m = t->panningModel();
	value = ( value / p.versionSpecificFactor ) *200/128 - PanningRight;
					break;
				case FL_Automation::ControlPitch:
					m = t->pitchModel();
					break;
				case FL_Automation::ControlFXChannel:
					m = t->effectChannelModel();
					value = value*200/128 - PanningRight;
					break;
				case FL_Automation::ControlFilterCut:
					scale = true;
					m = &t->m_soundShaping.m_filterCutModel;
					value /= ( 255 * 2.5f );
					break;
				case FL_Automation::ControlFilterRes:
					scale = true;
					m = &t->m_soundShaping.m_filterResModel;
					value = 0.1f + value / ( 256.0f * 2 );
					break;
				case FL_Automation::ControlFilterType:
					m = &t->m_soundShaping.m_filterModel;
					value = mappedFilter[jt->value];
					break;
				default:
					qDebug( "handling automation data of "
							"control %d not implemented "
							"yet\n", jt->control );
					break;
			}
			if( m )
			{
if( scale )
{
	value = m->minValue<float>() + value *
				( m->maxValue<float>() - m->minValue<float>() );
}
AutomationPattern * p = AutomationPattern::globalAutomationPattern( m );
p->putValue( jt->pos, value, false );
			}
		}

		progressDialog.setValue( ++cur_progress );
		qApp->processEvents();
	}

	// process all effects
	EffectKeyList effKeys;
	Plugin::DescriptorList pluginDescs;
	Plugin::getDescriptorsOfAvailPlugins( pluginDescs );
	for( Plugin::DescriptorList::ConstIterator it = pluginDescs.begin();
											it != pluginDescs.end(); ++it )
	{
		if( it->type != Plugin::Effect )
		{
			continue;
		}
		if( it->subPluginFeatures )
		{
			it->subPluginFeatures->listSubPluginKeys( &( *it ), effKeys );
		}
		else
		{
			effKeys << EffectKey( &( *it ), it->name );
		}
	}

	for( int fx_ch = 0; fx_ch <= NumFLFxChannels ; ++fx_ch )
	{
		FxChannel * ch = Engine::fxMixer()->effectChannel( fx_ch );
		if( !ch )
		{
			continue;
		}
		FL_EffectChannel * flch = &p.effectChannels[fx_ch];
		if( !flch->name.isEmpty() )
		{
			ch->m_name = flch->name;
		}
		ch->m_volumeModel.setValue( flch->volume / 100.0f );
		ch->m_muteModel.setValue( flch->isMuted );
	}

	for( QList<FL_Effect>::ConstIterator it = p.effects.begin();
										it != p.effects.end(); ++it )
	{
		QString effName;
		switch( it->pluginType )
		{
			case FL_Plugin::Fruity7BandEq:
				effName = "C* Eq2x2";
				break;
			case FL_Plugin::FruityBassBoost:
				effName = "BassBooster";
				break;
			case FL_Plugin::FruityChorus:
				effName = "TAP Chorus";
				break;
			case FL_Plugin::FruityCompressor:
				//effName = "C* Compress";
				effName = "Fast Lookahead limiter";
				break;
			case FL_Plugin::FruityDelay:
			case FL_Plugin::FruityDelay2:
//				effName = "Feedback Delay Line (Maximum Delay 5s)";
				break;
			case FL_Plugin::FruityBloodOverdrive:
			case FL_Plugin::FruityFastDist:
			case FL_Plugin::FruitySoftClipper:
				effName = "C* Clip";
				break;
			case FL_Plugin::FruityFastLP:
				effName = "Low Pass Filter";
				break;
			case FL_Plugin::FruityPhaser:
				effName = "C* PhaserI";
				break;
			case FL_Plugin::FruityReeverb:
				effName = "C* Plate2x2";
				break;
			case FL_Plugin::FruitySpectroman:
				effName = "Spectrum Analyzer";
				break;
			default:
				break;
		}
		if( effName.isEmpty() || it->fxChannel < 0 ||
						it->fxChannel > NumFLFxChannels )
		{
			continue;
		}
		EffectChain * ec = &Engine::fxMixer()->
					effectChannel( it->fxChannel )->m_fxChain;
		qDebug( "adding %s to %d\n", effName.toUtf8().constData(),
								it->fxChannel );
		for( EffectKeyList::Iterator jt = effKeys.begin();
						jt != effKeys.end(); ++jt )
		{
			if( QString( jt->desc->displayName ).contains( effName ) ||
				( jt->desc->subPluginFeatures != NULL &&
					jt->name.contains( effName ) ) )
			{
				qDebug( "instantiate %s\n", jt->desc->name );
				::Effect * e = Effect::instantiate( jt->desc->name, ec, &( *jt ) );
				ec->appendEffect( e );
				ec->setEnabled( true );
				break;
			}
		}

		progressDialog.setValue( ++cur_progress );
		qApp->processEvents();
	}



	// process all playlist-items
	for( QList<FL_PlayListItem>::ConstIterator it = p.playListItems.begin();
					it != p.playListItems.end(); ++it )
	{
		if( it->pattern > p.maxPatterns )
		{
			continue;
		}
		TrackContentObject * tco = bb_tracks[it->pattern]->createTCO( MidiTime() );
		tco->movePosition( it->position );
		if( it->length != DefaultTicksPerTact )
		{
			tco->changeLength( it->length );
		}
	}



	// set current pattern
	if( p.activeEditPattern < Engine::getBBTrackContainer()->numOfBBs() )
	{
		Engine::getBBTrackContainer()->setCurrentBB(
							p.activeEditPattern );
	}

	// restore journalling settings
	Engine::projectJournal()->setJournalling( is_journ );

	return true;
}
Example #26
0
MidiTime AutomationPattern::timeMapLength() const
{
	if( m_timeMap.isEmpty() ) return 0;
	timeMap::const_iterator it = m_timeMap.end();
	return MidiTime( MidiTime( (it-1).key() ).nextFullTact(), 0 );
}
Example #27
0
void Song::stop()
{
	// do not stop/reset things again if we're stopped already
	if( m_playMode == Mode_None )
	{
		return;
	}

	TimeLineWidget * tl = m_playPos[m_playMode].m_timeLine;
	m_paused = false;
	m_recording = true;

	if( tl != NULL )
	{

		switch( tl->behaviourAtStop() )
		{
			case TimeLineWidget::BackToZero:
				m_playPos[m_playMode].setTicks( 0 );
				m_elapsedMilliSeconds = 0;
				if( gui && gui->songEditor() &&
						( tl->autoScroll() == TimeLineWidget::AutoScrollEnabled ) )
				{
					gui->songEditor()->m_editor->updatePosition(0);
				}
				break;

			case TimeLineWidget::BackToStart:
				if( tl->savedPos() >= 0 )
				{
					m_playPos[m_playMode].setTicks( tl->savedPos().getTicks() );
					setToTime(tl->savedPos());

					if( gui && gui->songEditor() &&
							( tl->autoScroll() == TimeLineWidget::AutoScrollEnabled ) )
					{
						gui->songEditor()->m_editor->updatePosition( MidiTime(tl->savedPos().getTicks() ) );
					}
					tl->savePos( -1 );
				}
				break;

			case TimeLineWidget::KeepStopPosition:
			default:
				break;
		}
	}
	else
	{
		m_playPos[m_playMode].setTicks( 0 );
		m_elapsedMilliSeconds = 0;
	}
	m_playing = false;

	m_playPos[m_playMode].setCurrentFrame( 0 );

	m_vstSyncController.setPlaybackState( m_exporting );
	m_vstSyncController.setAbsolutePosition( m_playPos[m_playMode].getTicks() );

	// remove all note-play-handles that are active
	Engine::mixer()->clear();

	m_playMode = Mode_None;

	emit playbackStateChanged();
}
Example #28
0
void MidiAlsaSeq::run()
{
	// watch the pipe and sequencer input events
	int pollfd_count = snd_seq_poll_descriptors_count( m_seqHandle,
								POLLIN );
	struct pollfd * pollfd_set = new struct pollfd[pollfd_count + 1];
	snd_seq_poll_descriptors( m_seqHandle, pollfd_set + 1, pollfd_count,
								POLLIN );
	pollfd_set[0].fd = m_pipe[0];
	pollfd_set[0].events = POLLIN;
	++pollfd_count;

	while( m_quit == false )
	{
		int pollRet = poll( pollfd_set, pollfd_count, EventPollTimeOut );
		if( pollRet == 0 )
		{
			continue;
		}
		else if( pollRet == -1 )
		{
			// gdb may interrupt the poll
			if( errno == EINTR )
			{
				continue;
			}
			qCritical( "error while polling ALSA sequencer handle" );
			break;
		}
		// shutdown?
		if( m_quit )
		{
			break;
		}

		m_seqMutex.lock();

		// while event queue is not empty
		while( snd_seq_event_input_pending( m_seqHandle, true ) > 0 )
		{
			snd_seq_event_t * ev;
			if( snd_seq_event_input( m_seqHandle, &ev ) < 0 )
			{
				m_seqMutex.unlock();

				qCritical( "error while fetching MIDI event from sequencer" );
				break;
			}
			m_seqMutex.unlock();

			snd_seq_addr_t * source = NULL;
			MidiPort * dest = NULL;
			for( int i = 0; i < m_portIDs.size(); ++i )
			{
				if( m_portIDs.values()[i][0] == ev->dest.port )
				{
					dest = m_portIDs.keys()[i];
				}
				if( ( m_portIDs.values()[i][1] != -1 &&
						m_portIDs.values()[i][1] == ev->source.port ) ||
							m_portIDs.values()[i][0] == ev->source.port )
				{
					source = &ev->source;
				}
			}

			if( dest == NULL )
			{
				continue;
			}

			switch( ev->type )
			{
				case SND_SEQ_EVENT_NOTEON:
					dest->processInEvent( MidiEvent( MidiNoteOn,
								ev->data.note.channel,
								ev->data.note.note -
								KeysPerOctave,
								ev->data.note.velocity,
								source
								),
							MidiTime( ev->time.tick ) );
					break;

				case SND_SEQ_EVENT_NOTEOFF:
					dest->processInEvent( MidiEvent( MidiNoteOff,
								ev->data.note.channel,
								ev->data.note.note -
								KeysPerOctave,
								ev->data.note.velocity,
								source
								),
							MidiTime( ev->time.tick) );
					break;

				case SND_SEQ_EVENT_KEYPRESS:
					dest->processInEvent( MidiEvent(
									MidiKeyPressure,
								ev->data.note.channel,
								ev->data.note.note -
								KeysPerOctave,
								ev->data.note.velocity,
								source
								), MidiTime() );
					break;

				case SND_SEQ_EVENT_CONTROLLER:
					dest->processInEvent( MidiEvent(
								MidiControlChange,
							ev->data.control.channel,
							ev->data.control.param,
							ev->data.control.value, source ),
									MidiTime() );
					break;

				case SND_SEQ_EVENT_PGMCHANGE:
					dest->processInEvent( MidiEvent(
								MidiProgramChange,
							ev->data.control.channel,
							ev->data.control.param,
							ev->data.control.value, source ),
									MidiTime() );
					break;

				case SND_SEQ_EVENT_CHANPRESS:
					dest->processInEvent( MidiEvent(
								MidiChannelPressure,
							ev->data.control.channel,
							ev->data.control.param,
							ev->data.control.value, source ),
									MidiTime() );
					break;

				case SND_SEQ_EVENT_PITCHBEND:
					dest->processInEvent( MidiEvent( MidiPitchBend,
							ev->data.control.channel,
							ev->data.control.value + 8192, 0, source ),
									MidiTime() );
					break;

				case SND_SEQ_EVENT_SENSING:
				case SND_SEQ_EVENT_CLOCK:
					break;

				default:
					fprintf( stderr,
						"ALSA-sequencer: unhandled input "
							"event %d\n", ev->type );
					break;
			}	// end switch

			m_seqMutex.lock();

		}	// end while

		m_seqMutex.unlock();

	}

	delete[] pollfd_set;
}
void InstrumentTrack::processInEvent( const MidiEvent& event, const MidiTime& time, f_cnt_t offset )
{
	bool eventHandled = false;

	switch( event.type() )
	{
		// we don't send MidiNoteOn, MidiNoteOff and MidiKeyPressure
		// events to instrument as NotePlayHandle will send them on its
		// own
		case MidiNoteOn:
			if( event.velocity() > 0 )
			{
				NotePlayHandle* nph;
				m_notesMutex.lock();
				if( m_notes[event.key()] == NULL )
				{
					nph = NotePlayHandleManager::acquire( this, offset,
								typeInfo<f_cnt_t>::max() / 2,
								Note( MidiTime(), MidiTime(), event.key(), event.volume( midiPort()->baseVelocity() ) ),
								NULL, event.channel(),
								NotePlayHandle::OriginMidiInput );
					m_notes[event.key()] = nph;
					if( ! Engine::mixer()->addPlayHandle( nph ) )
					{
						m_notes[event.key()] = NULL;
					}
				}
				m_notesMutex.unlock();
				eventHandled = true;
				break;
			}

		case MidiNoteOff:
			m_notesMutex.lock();
			if( m_notes[event.key()] != NULL )
			{
				// do actual note off and remove internal reference to NotePlayHandle (which itself will
				// be deleted later automatically)
				m_notes[event.key()]->noteOff( offset );
				m_notes[event.key()] = NULL;
			}
			m_notesMutex.unlock();
			eventHandled = true;
			break;

		case MidiKeyPressure:
			if( m_notes[event.key()] != NULL )
			{
				// setVolume() calls processOutEvent() with MidiKeyPressure so the
				// attached instrument will receive the event as well
				m_notes[event.key()]->setVolume( event.volume( midiPort()->baseVelocity() ) );
			}
			eventHandled = true;
			break;

		case MidiPitchBend:
			// updatePitch() is connected to m_pitchModel::dataChanged() which will send out
			// MidiPitchBend events
			m_pitchModel.setValue( m_pitchModel.minValue() + event.pitchBend() * m_pitchModel.range() / MidiMaxPitchBend );
			break;

		case MidiControlChange:
			if( event.controllerNumber() == MidiControllerSustain )
			{
				if( event.controllerValue() > MidiMaxControllerValue/2 )
				{
					m_sustainPedalPressed = true;
				}
				else
				{
					m_sustainPedalPressed = false;
				}
			}
			if( event.controllerNumber() == MidiControllerAllSoundOff ||
				event.controllerNumber() == MidiControllerAllNotesOff ||
				event.controllerNumber() == MidiControllerOmniOn ||
				event.controllerNumber() == MidiControllerOmniOff ||
				event.controllerNumber() == MidiControllerMonoOn ||
				event.controllerNumber() == MidiControllerPolyOn )
			{
				silenceAllNotes();
			}
			break;

		case MidiMetaEvent:
			// handle special cases such as note panning
			switch( event.metaEvent() )
			{
				case MidiNotePanning:
					if( m_notes[event.key()] != NULL )
					{
						eventHandled = true;
						m_notes[event.key()]->setPanning( event.panning() );
					}
					break;
				default:
					qWarning( "InstrumentTrack: unhandled MIDI meta event: %i", event.metaEvent() );
					break;
			}
			break;

		default:
			break;
	}

	if( eventHandled == false && instrument()->handleMidiEvent( event, time, offset ) == false )
	{
		qWarning( "InstrumentTrack: unhandled MIDI event %d", event.type() );
	}

}
Example #30
0
void InstrumentFunctionArpeggio::processNote( NotePlayHandle * _n )
{
	const int base_note_key = _n->key();
	if( _n->origin() == NotePlayHandle::OriginArpeggio ||
		_n->origin() == NotePlayHandle::OriginNoteStacking ||
		!m_arpEnabledModel.value() ||
		( _n->isReleased() && _n->releaseFramesDone() >= _n->actualReleaseFramesToDo() ) )
	{
		return;
	}


	const int selected_arp = m_arpModel.value();

	ConstNotePlayHandleList cnphv = NotePlayHandle::nphsOfInstrumentTrack( _n->instrumentTrack() );

	if( m_arpModeModel.value() != FreeMode && cnphv.size() == 0 )
	{
		// maybe we're playing only a preset-preview-note?
		cnphv = PresetPreviewPlayHandle::nphsOfInstrumentTrack( _n->instrumentTrack() );
		if( cnphv.size() == 0 )
		{
			// still nothing found here, so lets return
			//return;
			cnphv.push_back( _n );
		}
	}

	const InstrumentFunctionNoteStacking::ChordTable & chord_table = InstrumentFunctionNoteStacking::ChordTable::getInstance();
	const int cur_chord_size = chord_table[selected_arp].size();
	const int range = (int)( cur_chord_size * m_arpRangeModel.value() );
	const int total_range = range * cnphv.size();

	// number of frames that every note should be played
	const f_cnt_t arp_frames = (f_cnt_t)( m_arpTimeModel.value() / 1000.0f * Engine::mixer()->processingSampleRate() );
	const f_cnt_t gated_frames = (f_cnt_t)( m_arpGateModel.value() * arp_frames / 100.0f );

	// used for calculating remaining frames for arp-note, we have to add
	// arp_frames-1, otherwise the first arp-note will not be setup
	// correctly... -> arp_frames frames silence at the start of every note!
	int cur_frame = ( ( m_arpModeModel.value() != FreeMode ) ?
						cnphv.first()->totalFramesPlayed() :
						_n->totalFramesPlayed() ) + arp_frames - 1;
	// used for loop
	f_cnt_t frames_processed = ( m_arpModeModel.value() != FreeMode ) ? cnphv.first()->noteOffset() : _n->noteOffset();

	while( frames_processed < Engine::mixer()->framesPerPeriod() )
	{
		const f_cnt_t remaining_frames_for_cur_arp = arp_frames - ( cur_frame % arp_frames );
		// does current arp-note fill whole audio-buffer?
		if( remaining_frames_for_cur_arp > Engine::mixer()->framesPerPeriod() )
		{
			// then we don't have to do something!
			break;
		}

		frames_processed += remaining_frames_for_cur_arp;

		// in sorted mode: is it our turn or do we have to be quiet for
		// now?
		if( m_arpModeModel.value() == SortMode &&
				( ( cur_frame / arp_frames ) % total_range ) / range != (f_cnt_t) _n->index() )
		{
			// Set master note if not playing arp note or it will play as an ordinary note
			_n->setMasterNote();
			// update counters
			frames_processed += arp_frames;
			cur_frame += arp_frames;
			continue;
		}

		// Skip notes randomly
		if( m_arpSkipModel.value() )
		{

			if( 100 * ( (float) rand() / (float)( RAND_MAX + 1.0f ) ) < m_arpSkipModel.value() )
			{
				// Set master note to prevent the note to extend over skipped notes
				// This may only be needed for lb302
				_n->setMasterNote();
				// update counters
				frames_processed += arp_frames;
				cur_frame += arp_frames;
				continue;
			}
		}

		int dir = m_arpDirectionModel.value();

		// Miss notes randomly. We intercept int dir and abuse it
		// after need.  :)

		if( m_arpMissModel.value() )
		{
			if( 100 * ( (float) rand() / (float)( RAND_MAX + 1.0f ) ) < m_arpMissModel.value() )
			{
				dir = ArpDirRandom;
			}
		}

		int cur_arp_idx = 0;
		// process according to arpeggio-direction...
		if( dir == ArpDirUp )
		{
			cur_arp_idx = ( cur_frame / arp_frames ) % range;
		}
		else if( dir == ArpDirDown )
		{
			cur_arp_idx = range - ( cur_frame / arp_frames ) %
								range - 1;
		}
		else if( dir == ArpDirUpAndDown && range > 1 )
		{
			// imagine, we had to play the arp once up and then
			// once down -> makes 2 * range possible notes...
			// because we don't play the lower and upper notes
			// twice, we have to subtract 2
			cur_arp_idx = ( cur_frame / arp_frames ) % ( range * 2 - 2 );
			// if greater than range, we have to play down...
			// looks like the code for arp_dir==DOWN... :)
			if( cur_arp_idx >= range )
			{
				cur_arp_idx = range - cur_arp_idx % ( range - 1 ) - 1;
			}
		}
		else if( dir == ArpDirDownAndUp && range > 1 )
		{
			// copied from ArpDirUpAndDown above
			cur_arp_idx = ( cur_frame / arp_frames ) % ( range * 2 - 2 );
			// if greater than range, we have to play down...
			// looks like the code for arp_dir==DOWN... :)
			if( cur_arp_idx >= range )
			{
				cur_arp_idx = range - cur_arp_idx % ( range - 1 ) - 1;
			}
			// inverts direction
			cur_arp_idx = range - cur_arp_idx - 1;
		}
		else if( dir == ArpDirRandom )
		{
			// just pick a random chord-index
			cur_arp_idx = (int)( range * ( (float) rand() / (float) RAND_MAX ) );
		}

		// Cycle notes
		if( m_arpCycleModel.value() && dir != ArpDirRandom )
		{
			cur_arp_idx *= m_arpCycleModel.value() + 1;
			cur_arp_idx %= range;
		}

		// now calculate final key for our arp-note
		const int sub_note_key = base_note_key + (cur_arp_idx / cur_chord_size ) *
							KeysPerOctave + chord_table[selected_arp][cur_arp_idx % cur_chord_size];

		// range-checking
		if( sub_note_key >= NumKeys ||
			sub_note_key < 0 ||
			Engine::mixer()->criticalXRuns() )
		{
			continue;
		}

		float vol_level = 1.0f;
		if( _n->isReleased() )
		{
			vol_level = _n->volumeLevel( cur_frame + gated_frames );
		}

		// create new arp-note

		// create sub-note-play-handle, only ptr to note is different
		// and is_arp_note=true
		Engine::mixer()->addPlayHandle(
				NotePlayHandleManager::acquire( _n->instrumentTrack(),
							frames_processed,
							gated_frames,
							Note( MidiTime( 0 ), MidiTime( 0 ), sub_note_key, (volume_t) qRound( _n->getVolume() * vol_level ),
									_n->getPanning(), _n->detuning() ),
							_n, -1, NotePlayHandle::OriginArpeggio )
				);

		// update counters
		frames_processed += arp_frames;
		cur_frame += arp_frames;
	}

	// make sure note is handled as arp-base-note, even
	// if we didn't add a sub-note so far
	if( m_arpModeModel.value() != FreeMode )
	{
		_n->setMasterNote();
	}
}