Пример #1
0
MidiTime AutomationPattern::putValue( const MidiTime & _time,
							const float _value,
							const bool _quant_pos )
{
	cleanObjects();

	MidiTime newTime = _quant_pos && engine::automationEditor() ?
		note::quantized( _time,
			engine::automationEditor()->quantization() ) :
		_time;

	m_timeMap[newTime] = _value;
	timeMap::const_iterator it = m_timeMap.find( newTime );
	if( it != m_timeMap.begin() )
	{
		it--;
	}
	generateTangents(it, 3);

	// we need to maximize our length in case we're part of a hidden
	// automation track as the user can't resize this pattern
	if( getTrack() && getTrack()->type() == track::HiddenAutomationTrack )
	{
		changeLength( length() );
	}

	emit dataChanged();

	return newTime;
}
Пример #2
0
void AutomationPattern::removeValue( const MidiTime & _time,
									 const bool _quant_pos )
{
	cleanObjects();

	MidiTime newTime = _quant_pos && engine::automationEditor() ?
		note::quantized( _time,
			engine::automationEditor()->quantization() ) :
		_time;

	m_timeMap.remove( newTime );
	m_tangents.remove( newTime );
	timeMap::const_iterator it = m_timeMap.lowerBound( newTime );
	if( it != m_timeMap.begin() )
	{
		it--;
	}
	generateTangents(it, 3);

	if( getTrack() &&
		getTrack()->type() == track::HiddenAutomationTrack )
	{
		changeLength( length() );
	}

	emit dataChanged();
}
Пример #3
0
AutomationPattern::AutomationPattern( AutomationTrack * _auto_track ) :
	TrackContentObject( _auto_track ),
	m_autoTrack( _auto_track ),
	m_objects(),
	m_tension( 1.0 ),
	m_progressionType( DiscreteProgression ),
	m_dragging( false ),
	m_isRecording( false ),
	m_lastRecordedValue( 0 )
{
	changeLength( MidiTime( 1, 0 ) );
	if( getTrack() )
	{
		switch( getTrack()->trackContainer()->type() )
		{
			case TrackContainer::BBContainer:
				setAutoResize( true );
				break;

			case TrackContainer::SongContainer:
				// move down
			default:
				setAutoResize( false );
				break;
		}
	}
}
Пример #4
0
//--------------------------------------------------------------------------------
void ofxTLUIHeader::onButtonEvent(ofxDatGuiButtonEvent e)
{
    // we have a couple ways to figure out which button was clicked //
    
    // we can compare our button pointer to the target of the event //
//    if (e.target == guiTrackName)
//    {
//        
//        
//    }

    // else // or we can check against the label of the event target //
    cout << "ofxTLUIHeader :: button event from : " << e.target->getLabel() <<endl;

    if(e.target == guiTrackName)
    {
        string newTrackName = ofSystemTextBoxDialog("Track name ?");
        if(newTrackName!="")
        {
            guiTrackName->setLabel(newTrackName);
            getTrack()->setDisplayName(newTrackName);
        }
    }
    
    if (e.target->getLabel() == "OSC OUT")
    {
        modified = true;

    }
    else if(e.target->getLabel() == "OSC IN")
    {
        modified = true;
        
    }
    else if(e.target->getLabel() == "PLAY SOLO")
    {
        //if(e.target->getEnabled())
        {
            cout << "PLAY SOLO PRESSED" << endl;
            modified = true;
            getTrack()->togglePlay();
        }
    }
    else if(e.target->getLabel() == "DELETE")
    {
        setShouldDelete(true);
        modified = true;
    }
    else if(e.target->getLabel()=="OSC ADDRESS     ::")
    {
        cout << "TLUHeader:: OSC Folder Event";
        string add = ofSystemTextBoxDialog("OSC address ?");
        if(add!="") guiOscAddressLabel->setLabel(add);
    }

    
}
Пример #5
0
SampleTCO::SampleTCO( Track * _track ) :
	TrackContentObject( _track ),
	m_sampleBuffer( new SampleBuffer ),
	m_isPlaying( false )
{
	saveJournallingState( false );
	setSampleFile( "" );
	restoreJournallingState();

	// we need to receive bpm-change-events, because then we have to
	// change length of this TCO
	connect( Engine::getSong(), SIGNAL( tempoChanged( bpm_t ) ),
					this, SLOT( updateLength() ), Qt::DirectConnection );
	connect( Engine::getSong(), SIGNAL( timeSignatureChanged( int,int ) ),
					this, SLOT( updateLength() ) );

	//care about positionmarker
	TimeLineWidget * timeLine = Engine::getSong()->getPlayPos( Engine::getSong()->Mode_PlaySong ).m_timeLine;
	if( timeLine )
	{
		connect( timeLine, SIGNAL( positionMarkerMoved() ), this, SLOT( playbackPositionChanged() ) );
	}
	//playbutton clicked or space key / on Export Song set isPlaying to false
	connect( Engine::getSong(), SIGNAL( playbackStateChanged() ),
			this, SLOT( playbackPositionChanged() ), Qt::DirectConnection );
	//care about loops
	connect( Engine::getSong(), SIGNAL( updateSampleTracks() ),
			this, SLOT( playbackPositionChanged() ), Qt::DirectConnection );
	//care about mute TCOs
	connect( this, SIGNAL( dataChanged() ), this, SLOT( playbackPositionChanged() ) );
	//care about mute track
	connect( getTrack()->getMutedModel(), SIGNAL( dataChanged() ),
			this, SLOT( playbackPositionChanged() ), Qt::DirectConnection );
	//care about TCO position
	connect( this, SIGNAL( positionChanged() ), this, SLOT( updateTrackTcos() ) );

	switch( getTrack()->trackContainer()->type() )
	{
		case TrackContainer::BBContainer:
			setAutoResize( true );
			break;

		case TrackContainer::SongContainer:
			// move down
		default:
			setAutoResize( false );
			break;
	}
	updateTrackTcos();
}
Пример #6
0
void TrackView::editBiasValue(float amount)
{
	SyncDocument *doc = getDocument();

	if (0 == getTrackCount()) {
		QApplication::beep();
		return;
	}

	QRect selection = getSelection();

	doc->beginMacro("bias");
	for (int track = selection.left(); track <= selection.right(); ++track) {
		Q_ASSERT(track < getTrackCount());
		SyncTrack *t = getTrack(track);

		for (int row = selection.top(); row <= selection.bottom(); ++row) {
			if (t->isKeyFrame(row)) {
				SyncTrack::TrackKey k = t->getKeyFrame(row); // copy old key
				k.value += amount; // modify value

				// add sub-command
				doc->setKeyFrame(t, k);
			}
		}
	}
	doc->endMacro();

	dirtyCurrentValue();
}
Пример #7
0
void TrackView::editToggleInterpolationType()
{
	SyncDocument *doc = getDocument();

	if (editTrack < getTrackCount()) {
		SyncTrack *t = getTrack(editTrack);
		QMap<int, SyncTrack::TrackKey> keyMap = t->getKeyMap();

		QMap<int, SyncTrack::TrackKey>::const_iterator it = keyMap.lowerBound(editRow);
		if (it != keyMap.constBegin() && it.key() != editRow)
			--it;

		if (it.key() > editRow || it == keyMap.constEnd()) {
			QApplication::beep();
			return;
		}

		// copy and modify
		SyncTrack::TrackKey newKey = *it;
		newKey.type = (SyncTrack::TrackKey::KeyType)
		    ((newKey.type + 1) % SyncTrack::TrackKey::KEY_TYPE_COUNT);

		// apply change to data-set
		doc->setKeyFrame(t, newKey);

		// update user interface
		dirtyCurrentValue();
	} else
		QApplication::beep();
}
Пример #8
0
void TrackView::editEnterValue()
{
	SyncDocument *doc = getDocument();
	if (!lineEdit->isVisible())
		return;

	if (lineEdit->text().length() > 0 && editTrack < getTrackCount()) {
		SyncTrack *t = getTrack(editTrack);

		SyncTrack::TrackKey newKey;
		newKey.type = SyncTrack::TrackKey::STEP;
		newKey.row = editRow;
		if (t->isKeyFrame(editRow))
			newKey = t->getKeyFrame(editRow); // copy old key
		QString text = lineEdit->text();
		text.remove(lineEdit->validator()->locale().groupSeparator()); // workaround QTBUG-40456
		newKey.value = lineEdit->validator()->locale().toFloat(text); // modify value

		doc->setKeyFrame(t, newKey);

		dirtyCurrentValue();
	} else
		QApplication::beep();

	lineEdit->hide();
	setFocus();
}
Пример #9
0
void AutomationTrackView::dropEvent( QDropEvent * _de )
{
	QString type = StringPairDrag::decodeKey( _de );
	QString val = StringPairDrag::decodeValue( _de );
	if( type == "automatable_model" )
	{
		AutomatableModel * mod = dynamic_cast<AutomatableModel *>(
				Engine::projectJournal()->
					journallingObject( val.toInt() ) );
		if( mod != NULL )
		{
			MidiTime pos = MidiTime( trackContainerView()->
							currentPosition() +
				( _de->pos().x() -
					getTrackContentWidget()->x() ) *
						MidiTime::ticksPerTact() /
		static_cast<int>( trackContainerView()->pixelsPerTact() ) )
				.toAbsoluteTact();

			if( pos.getTicks() < 0 )
			{
				pos.setTicks( 0 );
			}

			TrackContentObject * tco = getTrack()->createTCO( pos );
			AutomationPattern * pat = dynamic_cast<AutomationPattern *>( tco );
			pat->addObject( mod );
			pat->movePosition( pos );
		}
	}

	update();
}
Пример #10
0
  void Hunter<MuonAccuracy, SingleAccuracy, EntryAccuracy>::processCurrentEntry(InfoAccessor& infoAccessor, cereal::BinaryOutputArchive& outputArchive){

    auto flavour = infoAccessor.getFlavour(entrySorter);
    muonShowerWindow.setEndTime(infoAccessor.getEntry().triggerTime + 1);
    
    if(flavour == Flavour::Muon){
      
      auto muon = infoAccessor.getAsMuon<MuonAccuracy>();
      if(muon.getTrack() != Segment<MuonAccuracy>())
	muonShowerWindow.emplaceEvent(std::move(muon), neutronTimeWindowBounds);
    
    }
    else if(flavour == Flavour::Neutron){
      
      auto neutron = infoAccessor.getAsSingle<SingleAccuracy>();
      pairSeeker.catchDelayed(neutron);
      
      if(pairSeeker.caughtDelayed()){
	
	outputArchive(CandidateTree<SingleAccuracy,MuonAccuracy>(pairSeeker.getCandidatePair(), muonShowerWindow));
	pairSeeker.reset();
	
      }
      else{
      
	for(auto& muonShower : muonShowerWindow)
	  muonShower.emplaceFollower(neutron);
	
      }
      
    }
    else if(flavour == Flavour::Candidate) pairSeeker.catchPrompt(infoAccessor.getAsSingle<SingleAccuracy>());
    
  }
Пример #11
0
/** Finds the starting position which is closest to the kart.
 *  \param kart The kart for which a rescue position needs to be determined.
 */
unsigned int WorldWithRank::getRescuePositionIndex(AbstractKart *kart)
{
    // find closest point to drop kart on
    const int start_spots_amount = getNumberOfRescuePositions();
    assert(start_spots_amount > 0);

    int closest_id = -1;
    float closest_distance = 999999999.0f;

    for (int n=0; n<start_spots_amount; n++)
    {
        const btTransform &s = getTrack()->getStartTransform(n);
        const Vec3 &v = s.getOrigin();

        float abs_distance = (v - kart->getXYZ()).length();

        if (abs_distance < closest_distance)
        {
            closest_distance = abs_distance;
            closest_id = n;
        }
    }

    assert(closest_id != -1);
    return closest_id;
}   // getRescuePositionIndex
Пример #12
0
SampleTCO::SampleTCO( Track * _track ) :
	TrackContentObject( _track ),
	m_sampleBuffer( new SampleBuffer )
{
	saveJournallingState( false );
	setSampleFile( "" );
	restoreJournallingState();

	// we need to receive bpm-change-events, because then we have to
	// change length of this TCO
	connect( Engine::getSong(), SIGNAL( tempoChanged( bpm_t ) ),
					this, SLOT( updateLength( bpm_t ) ) );
	switch( getTrack()->trackContainer()->type() )
	{
		case TrackContainer::BBContainer:
			setAutoResize( true );
			break;

		case TrackContainer::SongContainer:
			// move down
		default:
			setAutoResize( false );
			break;
	}
}
Пример #13
0
void K3b::AudioDoc::addTracks( const QList<QUrl>& urls, int position )
{
    QList<QUrl> allUrls = extractUrlList( K3b::convertToLocalUrls(urls) );
    QList<QUrl>::iterator end( allUrls.end());
    for( QList<QUrl>::iterator it = allUrls.begin(); it != end; it++, position++ ) {
        QUrl& url = *it;
        if( url.toLocalFile().right(3).toLower() == "cue" ) {
            // try adding a cue file
            if( K3b::AudioTrack* newAfter = importCueFile( url.toLocalFile(), getTrack(position) ) ) {
                position = newAfter->trackNumber();
                continue;
            }
        }

        if( K3b::AudioTrack* track = createTrack( url ) ) {
            addTrack( track, position );

            K3b::AudioDecoder* dec = static_cast<K3b::AudioFile*>( track->firstSource() )->decoder();
            track->setTitle( dec->metaInfo( K3b::AudioDecoder::META_TITLE ) );
            track->setArtist( dec->metaInfo( K3b::AudioDecoder::META_ARTIST ) );
            track->setSongwriter( dec->metaInfo( K3b::AudioDecoder::META_SONGWRITER ) );
            track->setComposer( dec->metaInfo( K3b::AudioDecoder::META_COMPOSER ) );
            track->setCdTextMessage( dec->metaInfo( K3b::AudioDecoder::META_COMMENT ) );
        }
    }

    emit changed();
}
Пример #14
0
status NISTFile::writeHeader()
{
    Track *track = getTrack();

    char header[NIST_SPHERE_HEADER_LENGTH];
    int printed = snprintf(header, NIST_SPHERE_HEADER_LENGTH,
                           "NIST_1A\n   1024\n"
                           "channel_count -i %d\n"
                           "sample_count -i %d\n"
                           "sample_rate -i %d\n"
                           "sample_n_bytes -i %d\n"
                           "sample_byte_format -s%d %s\n"
                           "sample_sig_bits -i %d\n"
                           "sample_coding -s%d %s\n"
                           "end_head\n",
                           track->f.channelCount,
                           (int) track->totalfframes,
                           (int) track->f.sampleRate,
                           (int) _af_format_sample_size(&track->f, false),
                           (int) _af_format_sample_size(&track->f, false), sample_byte_format(&track->f),
                           track->f.sampleWidth,
                           (int) strlen(sample_coding(&track->f)), sample_coding(&track->f));

    /* Fill the remaining space in the buffer with space characters. */
    if (printed < NIST_SPHERE_HEADER_LENGTH)
        memset(header + printed, ' ', NIST_SPHERE_HEADER_LENGTH - printed);

    return m_fh->write(header, NIST_SPHERE_HEADER_LENGTH) == NIST_SPHERE_HEADER_LENGTH ? AF_SUCCEED : AF_FAIL;
}
Пример #15
0
void pattern::updateBBTrack()
{
	if( getTrack()->trackContainer() == engine::getBBTrackContainer() )
	{
		engine::getBBTrackContainer()->updateBBTrack( this );
	}
}
Пример #16
0
void SmackerDecoder::handleAudioTrack(byte track, uint32 chunkSize, uint32 unpackedSize) {
	if (_header.audioInfo[track].hasAudio && chunkSize > 0 && track == 0) {
		// Get the audio track, which start at offset 1 (first track is video)
		SmackerAudioTrack *audioTrack = (SmackerAudioTrack *)getTrack(track + 1);

		// If it's track 0, play the audio data
		byte *soundBuffer = (byte *)malloc(chunkSize + 1);
		// Padding to keep the SmallHuffmanTrees from reading past the data end
		soundBuffer[chunkSize] = 0x00;

		_fileStream->read(soundBuffer, chunkSize);

		if (_header.audioInfo[track].compression == kCompressionRDFT || _header.audioInfo[track].compression == kCompressionDCT) {
			// TODO: Compressed audio (Bink RDFT/DCT encoded)
			free(soundBuffer);
			return;
		} else if (_header.audioInfo[track].compression == kCompressionDPCM) {
			// Compressed audio (Huffman DPCM encoded)
			audioTrack->queueCompressedBuffer(soundBuffer, chunkSize + 1, unpackedSize);
			free(soundBuffer);
		} else {
			// Uncompressed audio (PCM)
			audioTrack->queuePCM(soundBuffer, chunkSize);
		}
	} else {
		// Ignore the rest of the audio tracks, if they exist
		// TODO: Are there any Smacker videos with more than one audio stream?
		// If yes, we should play the rest of the audio streams as well
		if (chunkSize > 0)
			_fileStream->skip(chunkSize);
	}
}
Пример #17
0
bool MoviePlayerDXA::processFrame() {
	Graphics::Surface *screen = _vm->_system->lockScreen();
	copyFrameToBuffer((byte *)screen->getPixels(), (_vm->_screenWidth - getWidth()) / 2, (_vm->_screenHeight - getHeight()) / 2, screen->pitch);
	_vm->_system->unlockScreen();

	uint32 soundTime = _mixer->getSoundElapsedTime(_bgSound);
	uint32 nextFrameStartTime = ((Video::VideoDecoder::VideoTrack *)getTrack(0))->getNextFrameStartTime();

	if ((_bgSoundStream == NULL) || soundTime < nextFrameStartTime) {

		if (_bgSoundStream && _mixer->isSoundHandleActive(_bgSound)) {
			while (_mixer->isSoundHandleActive(_bgSound) && soundTime < nextFrameStartTime) {
				_vm->_system->delayMillis(10);
				soundTime = _mixer->getSoundElapsedTime(_bgSound);
			}
			// In case the background sound ends prematurely, update
			// _ticks so that we can still fall back on the no-sound
			// sync case for the subsequent frames.
			_ticks = _vm->_system->getMillis();
		} else {
			_ticks += getTimeToNextFrame();
			while (_vm->_system->getMillis() < _ticks)
				_vm->_system->delayMillis(10);
		}

		return true;
	}

	warning("dropped frame %i", getCurFrame());
	return false;
}
Пример #18
0
Pattern::Pattern( const Pattern& other ) :
	TrackContentObject( other.m_instrumentTrack ),
	m_instrumentTrack( other.m_instrumentTrack ),
	m_patternType( other.m_patternType ),
	m_steps( other.m_steps )
{
	for( NoteVector::ConstIterator it = other.m_notes.begin(); it != other.m_notes.end(); ++it )
	{
		m_notes.push_back( new Note( **it ) );
	}

	init();
	switch( getTrack()->trackContainer()->type() )
	{
		case TrackContainer::BBContainer:
			setAutoResize( true );
			break;

		case TrackContainer::SongContainer:
			// move down
		default:
			setAutoResize( false );
			break;
	}
}
Пример #19
0
status IFFFile::writeBODY()
{
	uint32_t chunkSize;

	Track *track = getTrack();

	if (m_BODY_offset == 0)
		m_BODY_offset = m_fh->tell();
	else
		m_fh->seek(m_BODY_offset, File::SeekFromBeginning);

	m_fh->write("BODY", 4);

	/*
		IFF/8SVX supports only one channel, so the number of
		frames is equal to the number of samples, and each
		sample is one byte.
	*/
	chunkSize = track->totalfframes;
	writeU32(&chunkSize);

	if (track->fpos_first_frame == 0)
		track->fpos_first_frame = m_fh->tell();

	/* Add a pad byte to the end of the chunk if the chunk size is odd. */
	if ((chunkSize % 2) == 1)
	{
		uint8_t zero = 0;
		m_fh->seek(m_BODY_offset + 8 + chunkSize, File::SeekFromBeginning);
		writeU8(&zero);
	}

	return AF_SUCCEED;
}
Пример #20
0
AutomationPattern::AutomationPattern( const AutomationPattern & _pat_to_copy ) :
	TrackContentObject( _pat_to_copy.m_autoTrack ),
	m_autoTrack( _pat_to_copy.m_autoTrack ),
	m_objects( _pat_to_copy.m_objects ),
	m_tension( _pat_to_copy.m_tension ),
	m_progressionType( _pat_to_copy.m_progressionType )
{
	for( timeMap::const_iterator it = _pat_to_copy.m_timeMap.begin();
				it != _pat_to_copy.m_timeMap.end(); ++it )
	{
		m_timeMap[it.key()] = it.value();
		m_tangents[it.key()] = _pat_to_copy.m_tangents[it.key()];
	}
	switch( getTrack()->trackContainer()->type() )
	{
		case TrackContainer::BBContainer:
			setAutoResize( true );
			break;

		case TrackContainer::SongContainer:
			// move down
		default:
			setAutoResize( false );
			break;
	}
}
Пример #21
0
status WAVEFile::writeFrameCount()
{
	uint32_t factSize = 4;
	uint32_t totalFrameCount;

	Track *track = getTrack();

	/* Omit the fact chunk only for uncompressed integer audio formats. */
	if (track->f.compressionType == AF_COMPRESSION_NONE &&
		(track->f.sampleFormat == AF_SAMPFMT_TWOSCOMP ||
		track->f.sampleFormat == AF_SAMPFMT_UNSIGNED))
		return AF_SUCCEED;

	/*
		If the offset for the fact chunk hasn't been set yet,
		set it to the file's current position.
	*/
	if (factOffset == 0)
		factOffset = fh->tell();
	else
		fh->seek(factOffset, File::SeekFromBeginning);

	fh->write("fact", 4);
	writeU32(&factSize);

	totalFrameCount = track->totalfframes;
	writeU32(&totalFrameCount);

	return AF_SUCCEED;
}
Пример #22
0
/*
	Parse voice header chunk.
*/
status IFFFile::parseVHDR(const Tag &type, size_t size)
{
	assert(type == "VHDR");

	Track *track = getTrack();

	uint32_t oneShotSamples, repeatSamples, samplesPerRepeat;
	uint16_t sampleRate;
	uint8_t octaves, compression;
	uint32_t volume;

	readU32(&oneShotSamples);
	readU32(&repeatSamples);
	readU32(&samplesPerRepeat);
	readU16(&sampleRate);
	readU8(&octaves);
	readU8(&compression);
	readU32(&volume);

	track->f.sampleWidth = 8; 
	track->f.sampleRate = sampleRate;
	track->f.sampleFormat = AF_SAMPFMT_TWOSCOMP;
	track->f.compressionType = AF_COMPRESSION_NONE;
	track->f.byteOrder = AF_BYTEORDER_BIGENDIAN;
	track->f.channelCount = 1;

	track->f.framesPerPacket = 1;
	track->f.computeBytesPerPacketPCM();

	_af_set_sample_format(&track->f, track->f.sampleFormat, track->f.sampleWidth);

	return AF_SUCCEED;
}
Пример #23
0
/** Initializes the soccer world. It sets up the data structure
 *  to keep track of points etc. for each kart.
 */
void SoccerWorld::init()
{
    m_kart_team_map.clear();
    m_kart_position_map.clear();
    WorldWithRank::init();
    m_display_rank = false;
    m_goal_timer = 0.0f;
    m_ball_hitter = -1;
    m_ball = NULL;
    m_ball_body = NULL;
    m_goal_target = race_manager->getMaxGoal();
    m_goal_sound = SFXManager::get()->createSoundSource("goal_scored");

    TrackObjectManager* tom = getTrack()->getTrackObjectManager();
    assert(tom);
    PtrVector<TrackObject>& objects = tom->getObjects();
    for (unsigned int i = 0; i < objects.size(); i++)
    {
        TrackObject* obj = objects.get(i);
        if(!obj->isSoccerBall())
            continue;
        m_ball = obj;
        m_ball_body = m_ball->getPhysicalObject()->getBody();
        // Handle one ball only
        break;
    }
    if (!m_ball)
        Log::fatal("SoccerWorld","Ball is missing in soccer field, abort.");

    m_bgd.init(m_ball->getPhysicalObject()->getRadius());

}   // init
Пример #24
0
void AVIDecoder::readNextPacket() {
	uint32 nextTag = _fileStream->readUint32BE();
	uint32 size = _fileStream->readUint32LE();

	if (_fileStream->eos())
		return;

	if (nextTag == ID_LIST) {
		// A list of audio/video chunks
		int32 startPos = _fileStream->pos();

		if (_fileStream->readUint32BE() != ID_REC)
			error("Expected 'rec ' LIST");

		size -= 4; // subtract list type

		// Decode chunks in the list
		while (_fileStream->pos() < startPos + (int32)size)
			readNextPacket();

		return;
	} else if (nextTag == ID_JUNK || nextTag == ID_IDX1) {
		skipChunk(size);
		return;
	}

	Track *track = getTrack(getStreamIndex(nextTag));

	if (!track)
		error("Cannot get track from tag '%s'", tag2str(nextTag));

	Common::SeekableReadStream *chunk = 0;

	if (size != 0) {
		chunk = _fileStream->readStream(size);
		_fileStream->skip(size & 1);
	}

	if (track->getTrackType() == Track::kTrackTypeAudio) {
		if (getStreamType(nextTag) != kStreamTypeAudio)
			error("Invalid audio track tag '%s'", tag2str(nextTag));

		assert(chunk);
		((AVIAudioTrack *)track)->queueSound(chunk);
	} else {
		AVIVideoTrack *videoTrack = (AVIVideoTrack *)track;

		if (getStreamType(nextTag) == kStreamTypePaletteChange) {
			// Palette Change
			videoTrack->loadPaletteFromChunk(chunk);
		} else if (getStreamType(nextTag) == kStreamTypeRawVideo) {
			// TODO: Check if this really is uncompressed. Many videos
			// falsely put compressed data in here.
			error("Uncompressed AVI frame found");
		} else {
			// Otherwise, assume it's a compressed frame
			videoTrack->decodeFrame(chunk);
		}
	}
}
Пример #25
0
//-----------------------------------------------------------------------------
// 
// VEvent::setTriggerTime( pTime );
// 
// Apply the given trigger time to the object.
// 
// If the project was built using the VT_EDITOR preprocessor argument, then
// the validity of the passed value is verified. It also cannot be changed
// while the controller is playing.
// 
//-----------------------------------------------------------------------------
void VEvent::setTriggerTime( const S32 &pTime )
{
#ifdef VT_EDITOR

    VTrack *track = getTrack();
    if ( !track )
    {
        // Apply Time.
        mTriggerTime = pTime;

        return;
    }

    if ( track->isControllerPlaying() )
    {
        // Don't Change While Playing.
        return;
    }

    /*
    // Check For Overlap.
    for ( ITreeNode *node = mChildNode; node != NULL; node = node->mSiblingNextNode )
    {
        VEvent *event = ( VEvent* )node;
        if ( event == this )
        {
            // Skip.
            continue;
        }

        const U32 startTime  = getStartTime();
        const U32 finishTime = getFinishTime();

        if ( ( pTime > startTime && pTime < finishTime )
             || ( ( pTime + mDuration ) > startTime && ( pTime + mDuration ) < finishTime )
             || ( pTime < startTime && ( pTime + mDuration ) > finishTime ) )
        {
            // Overlap!
            return;
        }
    }
    */

    // Apply Time.
    mTriggerTime = mClamp( pTime, 0, getControllerDuration() );

    // Sort Events.
    track->sort();

    // Reset Track.
    track->onControllerReset( getControllerTime(), isControllerPlayingForward() );

#else

    // Apply Time.
    mTriggerTime = pTime;

#endif
}
Пример #26
0
const Common::List<Common::Rect> *FlicDecoder::getDirtyRects() const {
	const Track *track = getTrack(0);

	if (track)
		return ((const FlicVideoTrack *)track)->getDirtyRects();

	return 0;
}
/*! \brief Create and assign a new FX Channel for this track */
void InstrumentTrackView::createFxLine()
{
	int channelIndex = gui->fxMixerView()->addNewChannel();

	Engine::fxMixer()->effectChannel( channelIndex )->m_name = getTrack()->name();

	assignFxLine(channelIndex);
}
Пример #28
0
void SampleTCO::updateTrackTcos()
{
	SampleTrack * sampletrack = dynamic_cast<SampleTrack*>( getTrack() );
	if( sampletrack)
	{
		sampletrack->updateTcos();
	}
}
Пример #29
0
/* Parse an adtl sub-chunk within a LIST chunk. */
status WAVEFile::parseADTLSubChunk(const Tag &id, uint32_t size)
{
	Track *track = getTrack();

	AFfileoffset endPos = fh->tell() + size;

	while (fh->tell() < endPos)
	{
		Tag chunkID;
		uint32_t chunkSize;

		readTag(&chunkID);
		readU32(&chunkSize);

		if (chunkID == "labl" || chunkID == "note")
		{
			uint32_t id;
			long length=chunkSize-4;
			char *p = (char *) _af_malloc(length);

			readU32(&id);
			fh->read(p, length);

			Marker *marker = track->getMarker(id);

			if (marker)
			{
				if (chunkID == "labl")
				{
					free(marker->name);
					marker->name = p;
				}
				else if (chunkID == "note")
				{
					free(marker->comment);
					marker->comment = p;
				}
				else
					free(p);
			}
			else
				free(p);

			/*
				If chunkSize is odd, skip an extra byte
				at the end of the chunk.
			*/
			if ((chunkSize % 2) != 0)
				fh->seek(1, File::SeekFromCurrent);
		}
		else
		{
			/* If chunkSize is odd, skip an extra byte. */
			fh->seek(chunkSize + (chunkSize % 2), File::SeekFromCurrent);
		}
	}
	return AF_SUCCEED;
}
Пример #30
0
void TrackView::paintTrack(QStylePainter &painter, const QRegion &region, int track)
{
	const QRect &rect = region.boundingRect();
	int firstRow = qBound(0, getRowFromPhysicalY(qMax(rect.top(), topMarginHeight)), getRows() - 1);
	int lastRow = qBound(0, getRowFromPhysicalY(qMax(rect.bottom(), topMarginHeight)), getRows() - 1);

	QRect selection = getSelection();

	const SyncTrack *t = getTrack(track);

	for (int row = firstRow; row <= lastRow; ++row) {
		QRect patternDataRect(getPhysicalX(track), getPhysicalY(row), trackWidth, rowHeight);
		if (!region.intersects(patternDataRect))
			continue;

		const SyncTrack::TrackKey *key = t->getPrevKeyFrame(row);

		SyncTrack::TrackKey::KeyType interpolationType = key ? key->type : SyncTrack::TrackKey::STEP;
		bool selected = selection.contains(track, row);

		QBrush baseBrush = bgBaseBrush;
		QBrush darkBrush = bgDarkBrush;

		if (selected) {
			baseBrush = selectBaseBrush;
			darkBrush = selectDarkBrush;
		}

		QBrush bgBrush = (row % 8 == 0) ? darkBrush : baseBrush;

		QRect fillRect = patternDataRect;
		painter.fillRect(fillRect, bgBrush);
		if (row % 8 == 0) {
			painter.setPen(selected ? rowSelectPen : rowPen);
			painter.drawLine(QPointF(patternDataRect.left() + 0.5, patternDataRect.top() + 0.5),
			                 QPointF(patternDataRect.right() + 0.5, patternDataRect.top() + 0.5));
		}

		if (interpolationType != SyncTrack::TrackKey::STEP) {
			painter.setPen(getInterpolationPen(interpolationType));
			painter.drawLine(QPoint(patternDataRect.right(), patternDataRect.top() + 1),
			                 QPoint(patternDataRect.right(), patternDataRect.bottom()));
		}

		if (row == editRow && track == editTrack) {
			QRectF selectRect = QRectF(patternDataRect).adjusted(0.5, 0.5, -0.5, -0.5);
			painter.setPen(QColor(0, 0, 0));
			painter.drawRect(selectRect);
		}

		painter.setPen(selected ?
		    palette().color(QPalette::HighlightedText) :
		    palette().color(QPalette::WindowText));
		painter.drawText(patternDataRect, t->isKeyFrame(row) ?
		                 QString::number(t->getKeyFrame(row).value, 'f', 2) :
		                 "  ---");
	}
}