示例#1
0
void SamplePlayerNodeTestApp::fileDrop( FileDropEvent event )
{
	const fs::path &filePath = event.getFile( 0 );
	CI_LOG_V( "File dropped: " << filePath );

	setSourceFile( loadFile( filePath ) );
	mSamplePlayerNode->seek( 0 );

	CI_LOG_V( "output samplerate: " << mSourceFile->getSampleRate() );

	auto bufferPlayer = dynamic_pointer_cast<audio::BufferPlayerNode>( mSamplePlayerNode );
	if( bufferPlayer ) {
		bufferPlayer->loadBuffer( mSourceFile );
		mWaveformPlot.load( bufferPlayer->getBuffer(), getWindowBounds() );
	}
	else {
		auto filePlayer = dynamic_pointer_cast<audio::FilePlayerNode>( mSamplePlayerNode );
		CI_ASSERT_MSG( filePlayer, "expected sample player to be either BufferPlayerNode or FilePlayerNode" );

		filePlayer->setSourceFile( mSourceFile );
	}

	mLoopBeginSlider.mMax = mLoopEndSlider.mMax = (float)mSamplePlayerNode->getNumSeconds();

	CI_LOG_V( "loaded and set new source buffer, channels: " << mSourceFile->getNumChannels() << ", frames: " << mSourceFile->getNumFrames() );
	PRINT_GRAPH( audio::master() );
}
示例#2
0
void SamplePlayerNodeTestApp::setupFilePlayerNode()
{
	mGain->disconnectAllInputs();

	auto ctx = audio::master();

//	mSourceFile->setMaxFramesPerRead( 8192 );
	bool asyncRead = mAsyncButton.mEnabled;
	CI_LOG_V( "async read: " << asyncRead );
	mSamplePlayerNode = ctx->makeNode( new audio::FilePlayerNode( mSourceFile, asyncRead ) );

	// TODO: it is pretty surprising when you recreate mMonitor here without checking if there has already been one added.
	//	- user will no longer see the old mMonitor, but the context still owns a reference to it, so another gets added each time we call this method.
	if( ! mMonitor )
		mMonitor = ctx->makeNode( new audio::MonitorNode( audio::MonitorNode::Format().windowSize( 1024 ) ) );

	// when these connections are called, some (GainNode and Pan) will already be connected, but this is okay, they should silently no-op.

	// or connect in series (it is added to the Context's 'auto pulled list')
	mSamplePlayerNode >> mGain >> mPan >> ctx->getOutput();
	mPan >> mMonitor;

	mSamplePlayerNode->setLoopEnabled( mLoopButton.mEnabled );
	mSamplePlayerNode->setLoopBeginTime( mLoopBeginSlider.mValueScaled );
	mSamplePlayerNode->setLoopEndTime( mLoopEndSlider.mValueScaled != 0 ? mLoopEndSlider.mValueScaled : mSamplePlayerNode->getNumSeconds() );

	PRINT_GRAPH( audio::master() );
}
示例#3
0
void SamplePlayerNodeTestApp::processDrag( Vec2i pos )
{
	if( mGainSlider.hitTest( pos ) )
		mGain->setValue( mGainSlider.mValueScaled );
	else if( mPanSlider.hitTest( pos ) )
		mPan->setPos( mPanSlider.mValueScaled );
	else if( mLoopBeginSlider.hitTest( pos ) )
		mSamplePlayerNode->setLoopBeginTime( mLoopBeginSlider.mValueScaled );
	else if( mLoopEndSlider.hitTest( pos ) )
		mSamplePlayerNode->setLoopEndTime( mLoopEndSlider.mValueScaled );
	else if( pos.y > getWindowCenter().y )
		seek( pos.x );
}
示例#4
0
void SamplePlayerNodeTestApp::keyDown( KeyEvent event )
{
	if( event.getCode() == KeyEvent::KEY_c )
		testConverter();
	if( event.getCode() == KeyEvent::KEY_w )
		testWrite();
	if( event.getCode() == KeyEvent::KEY_s )
		mSamplePlayerNode->seekToTime( 1.0 );
}
示例#5
0
void SamplePlayerNodeTestApp::processDrag( ivec2 pos )
{
	if( mGainSlider.hitTest( pos ) )
		mGain->setValue( mGainSlider.mValueScaled );
	else if( mPanSlider.hitTest( pos ) ) {
#if TEST_STEREO_INPUT_PANNING
		mPan->getParamPos()->applyRamp( mPanSlider.mValueScaled, 0.6f );
#else
		mPan->setPos( mPanSlider.mValueScaled );
#endif
	}
	else if( mLoopBeginSlider.hitTest( pos ) )
		mSamplePlayerNode->setLoopBeginTime( mLoopBeginSlider.mValueScaled );
	else if( mLoopEndSlider.hitTest( pos ) )
		mSamplePlayerNode->setLoopEndTime( mLoopEndSlider.mValueScaled );
	else if( mTriggerDelaySlider.hitTest( pos ) ) {
	}
	else if( pos.y > getWindowCenter().y )
		seek( pos.x );
}
示例#6
0
void SamplePlayerNodeTestApp::triggerStartStop( bool start )
{
	float delaySeconds = mTriggerDelaySlider.mValueScaled;
	if( delaySeconds <= 0.001f ) {
		if( start )
			mSamplePlayerNode->start();
		else
			mSamplePlayerNode->stop();
	}
	else {
		CI_LOG_V( "scheduling " << ( start ? "start" : "stop" ) << " with delay: " << delaySeconds
				 << "\n\tprocessed frames: " << audio::master()->getNumProcessedFrames() << ", seconds: " << audio::master()->getNumProcessedSeconds() );

		double when = audio::master()->getNumProcessedSeconds() + delaySeconds;
		if( start )
			mSamplePlayerNode->start( when );
		else
			mSamplePlayerNode->stop( when );
	}
}
示例#7
0
void SamplePlayerNodeTestApp::update()
{
	// light up rects if an xrun was detected
	const float xrunFadeTime = 1.3f;
	auto filePlayer = dynamic_pointer_cast<audio::FilePlayerNode>( mSamplePlayerNode );
	if( filePlayer ) {
		if( filePlayer->getLastUnderrun() )
			timeline().apply( &mUnderrunFade, 1.0f, 0.0f, xrunFadeTime );
		if( filePlayer->getLastOverrun() )
			timeline().apply( &mOverrunFade, 1.0f, 0.0f, xrunFadeTime );
	}

	// print SamplePlayerNode start / stop times
	if( mSamplePlayerNodeEnabledState != mSamplePlayerNode->isEnabled() ) {
		mSamplePlayerNodeEnabledState = mSamplePlayerNode->isEnabled();
		string stateStr = mSamplePlayerNodeEnabledState ? "started" : "stopped";
		CI_LOG_V( "mSamplePlayerNode " << stateStr << " at " << to_string( getElapsedSeconds() ) << ", isEof: " << boolalpha << mSamplePlayerNode->isEof() << dec );
	}

	bool testIsRecorder = ( mTestSelector.currentSection() == "recorder" );
	mRecordButton.mHidden = mWriteButton.mHidden = mAutoResizeButton.mHidden = ! testIsRecorder;

	// test auto resizing the Recorder's buffer depending on how full it is
	if( testIsRecorder && mAutoResizeButton.mEnabled ) {
		CI_ASSERT( mRecorder );

		size_t writePos = mRecorder->getWritePosition();
		size_t numFrames = mRecorder->getNumFrames();

		if( writePos + mRecorder->getSampleRate() / 2 > numFrames ) {
			size_t resizeFrames = numFrames + mRecorder->getSampleRate();
			CI_LOG_V( "writePos: " << writePos << ", numFrames: " << numFrames << ", resizing frames to: " << resizeFrames );
			mRecorder->setNumFrames( resizeFrames );
		}

		if( mRecorder->getLastOverrun() )
			timeline().apply( &mRecorderOverrunFade, 1.0f, 0.0f, xrunFadeTime );
	}

}
示例#8
0
void SamplePlayerNodeTestApp::draw()
{
	gl::clear();

	if( mTestSelector.currentSection() == "recorder" ) {
		audio::BufferRef recordedBuffer = mRecorder->getRecordedCopy();
		drawAudioBuffer( *recordedBuffer, getWindowBounds() );
	}
	else {
		auto bufferPlayer = dynamic_pointer_cast<audio::BufferPlayerNode>( mSamplePlayerNode );
		if( bufferPlayer )
			mWaveformPlot.draw();
		else if( mMonitor && mMonitor->isInitialized() )
			drawAudioBuffer( mMonitor->getBuffer(), getWindowBounds() );

		float readPos = (float)getWindowWidth() * mSamplePlayerNode->getReadPosition() / mSamplePlayerNode->getNumFrames();
		gl::color( ColorA( 0, 1, 0, 0.7f ) );
		gl::drawSolidRoundedRect( Rectf( readPos - 2, 0, readPos + 2, (float)getWindowHeight() ), 2 );
	}

	if( mUnderrunFade > 0.0001f ) {
		gl::color( ColorA( 1, 0.5f, 0, mUnderrunFade ) );
		gl::drawSolidRect( mUnderrunRect );
		gl::drawStringCentered( "play underrun", mUnderrunRect.getCenter(), Color::black() );
	}
	if( mOverrunFade > 0.0001f ) {
		gl::color( ColorA( 1, 0.5f, 0, mOverrunFade ) );
		gl::drawSolidRect( mOverrunRect );
		gl::drawStringCentered( "play overrun", mOverrunRect.getCenter(), Color::black() );
	}

	if( mRecorderOverrunFade > 0.0001f ) {
		gl::color( ColorA( 1, 0.5f, 0, mRecorderOverrunFade ) );
		gl::drawSolidRect( mRecorderOverrunRect );
		gl::drawStringCentered( "rec overrun", mRecorderOverrunRect.getCenter(), Color::black() );
	}

	drawWidgets( mWidgets );
}
示例#9
0
void SamplePlayerNodeTestApp::processTap( ivec2 pos )
{
	if( mEnableSamplePlayerNodeButton.hitTest( pos ) )
		mSamplePlayerNode->setEnabled( ! mSamplePlayerNode->isEnabled() );
	else if( mStartPlaybackButton.hitTest( pos ) )
		triggerStartStop( true );
	else if( mStopPlaybackButton.hitTest( pos ) )
		triggerStartStop( false );
	else if( mLoopButton.hitTest( pos ) )
		mSamplePlayerNode->setLoopEnabled( ! mSamplePlayerNode->isLoopEnabled() );
	else if( mRecordButton.hitTest( pos ) ) {
		if( mRecordButton.mEnabled )
			mRecorder->start();
		else
			mRecorder->disable();
	}
	else if( mWriteButton.hitTest( pos ) )
		writeRecordedToFile();
	else if( mAutoResizeButton.hitTest( pos ) )
		;
	else if( mAsyncButton.hitTest( pos ) )
		;
	else if( pos.y > getWindowCenter().y )
		seek( pos.x );

	size_t currentIndex = mTestSelector.mCurrentSectionIndex;
	if( mTestSelector.hitTest( pos ) && currentIndex != mTestSelector.mCurrentSectionIndex ) {
		string currentTest = mTestSelector.currentSection();
		CI_LOG_V( "selected: " << currentTest );

		if( currentTest == "BufferPlayerNode" )
			setupBufferPlayerNode();
		if( currentTest == "FilePlayerNode" )
			setupFilePlayerNode();
		if( currentTest == "recorder" )
			setupBufferRecorderNode();
	}
}
示例#10
0
void SamplePlayerNodeTestApp::setupBufferPlayerNode()
{
	auto bufferPlayer = audio::master()->makeNode( new audio::BufferPlayerNode() );

	auto loadFn = [bufferPlayer, this] {
		bufferPlayer->loadBuffer( mSourceFile );
		mWaveformPlot.load( bufferPlayer->getBuffer(), getWindowBounds() );
		CI_LOG_V( "loaded source buffer, frames: " << bufferPlayer->getBuffer()->getNumFrames() );

	};

	auto connectFn = [bufferPlayer, this] {
		mGain->disconnectAllInputs();
		mSamplePlayerNode = bufferPlayer;
		mSamplePlayerNode >> mGain >> mPan >> audio::master()->getOutput();
		PRINT_GRAPH( audio::master() );
		mSamplePlayerNode->setLoopEnabled( mLoopButton.mEnabled );
		mSamplePlayerNode->setLoopBeginTime( mLoopBeginSlider.mValueScaled );
		mSamplePlayerNode->setLoopEndTime( mLoopEndSlider.mValueScaled != 0 ? mLoopEndSlider.mValueScaled : mSamplePlayerNode->getNumSeconds() );
	};

	bool asyncLoad = mAsyncButton.mEnabled;
	CI_LOG_V( "async load: " << boolalpha << asyncLoad << dec );
	if( asyncLoad ) {
		mWaveformPlot.clear();
		mAsyncLoadFuture = std::async( [=] {
			loadFn();
			dispatchAsync( [=] {
				connectFn();
			} );
		} );
	}
	else {
		loadFn();
		connectFn();
	};
}
示例#11
0
void SamplePlayerNodeTestApp::seek( size_t xPos )
{
	mSamplePlayerNode->seek( mSamplePlayerNode->getNumFrames() * xPos / getWindowWidth() );
}
示例#12
0
void SamplePlayerNodeTestApp::setupUI()
{
	const float padding = 10.0f;

	auto buttonRect = Rectf( padding, padding, 200, 60 );
	mEnableSamplePlayerNodeButton.mIsToggle = true;
	mEnableSamplePlayerNodeButton.mTitleNormal = "player off";
	mEnableSamplePlayerNodeButton.mTitleEnabled = "player on";
	mEnableSamplePlayerNodeButton.mBounds = buttonRect;
	mWidgets.push_back( &mEnableSamplePlayerNodeButton );

	buttonRect += Vec2f( buttonRect.getWidth() + padding, 0 );
	mStartPlaybackButton.mIsToggle = false;
	mStartPlaybackButton.mTitleNormal = "start";
	mStartPlaybackButton.mBounds = buttonRect;
	mWidgets.push_back( &mStartPlaybackButton );

	buttonRect += Vec2f( buttonRect.getWidth() + padding, 0 );
	buttonRect.x2 -= 30;
	mLoopButton.mIsToggle = true;
	mLoopButton.mTitleNormal = "loop off";
	mLoopButton.mTitleEnabled = "loop on";
	mLoopButton.setEnabled( mSamplePlayerNode->isLoopEnabled() );
	mLoopButton.mBounds = buttonRect;
	mWidgets.push_back( &mLoopButton );

	buttonRect += Vec2f( buttonRect.getWidth() + padding, 0 );
	mAsyncButton.mIsToggle = true;
	mAsyncButton.mTitleNormal = "async off";
	mAsyncButton.mTitleEnabled = "async on";
	mAsyncButton.mBounds = buttonRect;
	mWidgets.push_back( &mAsyncButton );

	buttonRect = Rectf( padding, buttonRect.y2 + padding, padding + buttonRect.getWidth(), buttonRect.y2 + buttonRect.getHeight() + padding );
	mRecordButton.mIsToggle = true;
	mRecordButton.mTitleNormal = "record off";
	mRecordButton.mTitleEnabled = "record on";
	mRecordButton.mBounds = buttonRect;
	mWidgets.push_back( &mRecordButton );

	buttonRect += Vec2f( buttonRect.getWidth() + padding, 0 );
	mWriteButton.mIsToggle = false;
	mWriteButton.mTitleNormal = "write to file";
	mWriteButton.mBounds = buttonRect;
	mWidgets.push_back( &mWriteButton );

	buttonRect += Vec2f( buttonRect.getWidth() + padding, 0 );
	mAutoResizeButton.mIsToggle = true;
	mAutoResizeButton.mTitleNormal = "auto resize off";
	mAutoResizeButton.mTitleEnabled = "auto resize on";
	mAutoResizeButton.mBounds = buttonRect;
	mWidgets.push_back( &mAutoResizeButton );

	Vec2f sliderSize( 200, 30 );
	Rectf selectorRect( getWindowWidth() - sliderSize.x - padding, padding, getWindowWidth() - padding, sliderSize.y * 3 + padding );
	mTestSelector.mSegments.push_back( "BufferPlayerNode" );
	mTestSelector.mSegments.push_back( "FilePlayerNode" );
	mTestSelector.mSegments.push_back( "recorder" );
	mTestSelector.mBounds = selectorRect;
	mWidgets.push_back( &mTestSelector );

	Rectf sliderRect( selectorRect.x1, selectorRect.y2 + padding, selectorRect.x2, selectorRect.y2 + padding + sliderSize.y );
//	Rectf sliderRect( getWindowWidth() - 200.0f, kPadding, getWindowWidth(), 50.0f );
	mGainSlider.mBounds = sliderRect;
	mGainSlider.mTitle = "GainNode";
	mGainSlider.set( mGain->getValue() );
	mWidgets.push_back( &mGainSlider );

	sliderRect += Vec2f( 0, sliderRect.getHeight() + padding );
	mPanSlider.mBounds = sliderRect;
	mPanSlider.mTitle = "Pan";
	mPanSlider.set( mPan->getPos() );
	mWidgets.push_back( &mPanSlider );

	sliderRect += Vec2f( 0, sliderRect.getHeight() + padding );
	mLoopBeginSlider.mBounds = sliderRect;
	mLoopBeginSlider.mTitle = "Loop Begin";
	mLoopBeginSlider.mMax = (float)mSamplePlayerNode->getNumSeconds();
	mLoopBeginSlider.set( (float)mSamplePlayerNode->getLoopBeginTime() );
	mWidgets.push_back( &mLoopBeginSlider );

	sliderRect += Vec2f( 0, sliderRect.getHeight() + padding );
	mLoopEndSlider.mBounds = sliderRect;
	mLoopEndSlider.mTitle = "Loop End";
	mLoopEndSlider.mMax = (float)mSamplePlayerNode->getNumSeconds();
	mLoopEndSlider.set( (float)mSamplePlayerNode->getLoopEndTime() );
	mWidgets.push_back( &mLoopEndSlider );

	Vec2f xrunSize( 80, 26 );
	mUnderrunRect = Rectf( padding, getWindowHeight() - xrunSize.y - padding, xrunSize.x + padding, getWindowHeight() - padding );
	mOverrunRect = mUnderrunRect + Vec2f( xrunSize.x + padding, 0 );
	mRecorderOverrunRect = mOverrunRect + Vec2f( xrunSize.x + padding, 0 );

	getWindow()->getSignalMouseDown().connect( [this] ( MouseEvent &event ) { processTap( event.getPos() ); } );
	getWindow()->getSignalMouseDrag().connect( [this] ( MouseEvent &event ) { processDrag( event.getPos() ); } );
	getWindow()->getSignalTouchesBegan().connect( [this] ( TouchEvent &event ) { processTap( event.getTouches().front().getPos() ); } );
	getWindow()->getSignalTouchesMoved().connect( [this] ( TouchEvent &event ) {
		for( const TouchEvent::Touch &touch : getActiveTouches() )
			processDrag( touch.getPos() );
	} );

	gl::enableAlphaBlending();
}