예제 #1
0
void DeviceTestApp::setup()
{
    console() << audio::Device::printDevicesToString();

    auto ctx = audio::master();

    mMonitor = ctx->makeNode( new audio::MonitorNode( audio::MonitorNode::Format().windowSize( 1024 ) ) );
    mGain = ctx->makeNode( new audio::GainNode() );
    mGain->setValue( 0.4f );

    mGain->connect( mMonitor );

    setOutputDevice( audio::Device::getDefaultOutput() );
    setInputDevice( audio::Device::getDefaultInput() );
    //setInputDevice( audio::Device::getDefaultInput(), 1 ); // force mono input

    //setupMultiChannelDevice( "PreSonus FIREPOD (1431)" );
//	setupMultiChannelDeviceWindows( "MOTU Analog (MOTU Audio Wave for 64 bit)" );

    mRecorder = ctx->makeNode( new audio::BufferRecorderNode( RECORD_SECONDS * ctx->getSampleRate() ) );
    mRecorder->setEnabled( false );
    mGain >> mRecorder;


//	setupInputPulled();
//	setupIOClean();

    PRINT_GRAPH( ctx );

    setupUI();

    CI_LOG_V( "Context samplerate: " << ctx->getSampleRate() );
}
예제 #2
0
void SamplePlayerNodeTestApp::setupFilePlayerNode()
{
	mGain->disconnectAllInputs();

	auto ctx = audio::master();

//	mSourceFile->setMaxFramesPerRead( 8192 );
	bool asyncRead = mAsyncButton.mEnabled;
	CI_LOG_V( "async read: " << asyncRead );
	mSamplePlayerNode = ctx->makeNode( new audio::FilePlayerNode( mSourceFile, asyncRead ) );

	// TODO: it is pretty surprising when you recreate mMonitor here without checking if there has already been one added.
	//	- user will no longer see the old mMonitor, but the context still owns a reference to it, so another gets added each time we call this method.
	if( ! mMonitor )
		mMonitor = ctx->makeNode( new audio::MonitorNode( audio::MonitorNode::Format().windowSize( 1024 ) ) );

	// when these connections are called, some (GainNode and Pan) will already be connected, but this is okay, they should silently no-op.

	// or connect in series (it is added to the Context's 'auto pulled list')
	mSamplePlayerNode >> mGain >> mPan >> ctx->getOutput();
	mPan >> mMonitor;

	mSamplePlayerNode->setLoopEnabled( mLoopButton.mEnabled );
	mSamplePlayerNode->setLoopBeginTime( mLoopBeginSlider.mValueScaled );
	mSamplePlayerNode->setLoopEndTime( mLoopEndSlider.mValueScaled != 0 ? mLoopEndSlider.mValueScaled : mSamplePlayerNode->getNumSeconds() );

	PRINT_GRAPH( audio::master() );
}
예제 #3
0
void ParamTestApp::setup()
{
	auto ctx = audio::master();
	mGain = ctx->makeNode( new audio::GainNode() );
	mGain->setValue( 0.8f );

	mPan = ctx->makeNode( new audio::Pan2dNode() );

	mGen = ctx->makeNode( new audio::GenSineNode() );
//	mGen = ctx->makeNode( new audio::GenTriangleNode() );
//	mGen = ctx->makeNode( new audio::GenPhasorNode() );
	mGen = ctx->makeNode( new audio::GenPulseNode );

	mGen->setFreq( 220 );

	mLowPass = ctx->makeNode( new audio::FilterLowPassNode() );

	setupBasic();

	setupUI();

	PRINT_GRAPH( ctx );

	testApply();
//	testApply2();
//	connectProcessor();
}
예제 #4
0
void SamplePlayerNodeTestApp::setup()
{
	mUnderrunFade = mOverrunFade = mRecorderOverrunFade = 0;
	mSamplePlayerNodeEnabledState = false;

	printSupportedExtensions();

	setSourceFile( loadResource( INITIAL_AUDIO_RES ) );

	auto ctx = audio::master();

	mPan = ctx->makeNode( new audio::Pan2dNode() );
//	mPan->setStereoInputModeEnabled( false );

	mGain = ctx->makeNode( new audio::GainNode() );
	mGain->setValue( 0.6f );

	mGain >> mPan >> ctx->getOutput();

	setupBufferPlayerNode();
//	setupFilePlayerNode();

	setupUI();

	ctx->enable();
	mEnableSamplePlayerNodeButton.setEnabled( true );

	CI_LOG_V( "context samplerate: " << ctx->getSampleRate() );
}
예제 #5
0
void DelayFeedback::mouseDrag( MouseEvent event )
{
	float freq = quantizePitch( event.getPos() );
	float gain = 1.0f - (float)event.getPos().y / (float)getWindowHeight();

	gain *= MAX_VOLUME;

	mOsc->getParamFreq()->applyRamp( freq, 0.04f );
	mGain->getParam()->applyRamp( gain, 0.1f );

	addSplash( event.getPos() );
}
예제 #6
0
void SamplePlayerNodeTestApp::processDrag( Vec2i pos )
{
	if( mGainSlider.hitTest( pos ) )
		mGain->setValue( mGainSlider.mValueScaled );
	else if( mPanSlider.hitTest( pos ) )
		mPan->setPos( mPanSlider.mValueScaled );
	else if( mLoopBeginSlider.hitTest( pos ) )
		mSamplePlayerNode->setLoopBeginTime( mLoopBeginSlider.mValueScaled );
	else if( mLoopEndSlider.hitTest( pos ) )
		mSamplePlayerNode->setLoopEndTime( mLoopEndSlider.mValueScaled );
	else if( pos.y > getWindowCenter().y )
		seek( pos.x );
}
예제 #7
0
void DeviceTestApp::setupIOProcessed()
{
    auto ctx = audio::master();
    auto mod = ctx->makeNode( new audio::GenSineNode( audio::Node::Format().autoEnable() ) );
    mod->setFreq( 200 );

    auto ringMod = audio::master()->makeNode( new audio::GainNode );
    ringMod->setName( "RingModGain" );
    ringMod->getParam()->setProcessor( mod );

    mGain->disconnectAllInputs();
    mInputDeviceNode >> ringMod >> mGain;

    mInputDeviceNode->enable();
}
예제 #8
0
void WebAudioApp::setup()
{

  Context::setMaster( new ContextWebAudio, new DeviceManagerWebAudio );

  auto ctx = audio::master();
  mGen = ctx->makeNode( new audio::GenSineNode );
  mGain = ctx->makeNode( new audio::GainNode );

  mGen->setFreq( 220 );
  mGain->setValue( 0.5f );

  // connections can be made this way or with connect(). The master Context's getOutput() is the speakers by default.
  mGen >> mGain >> ctx->getOutput();
  mGen->enable();
  ctx->enable();
}
예제 #9
0
void NodeBasic::setup()
{
	// You use the audio::Context to make new audio::Node instances (audio::master() is the speaker-facing Context).
	auto ctx = audio::master();
	mGen = ctx->makeNode( new audio::GenSineNode );
	mGain = ctx->makeNode( new audio::GainNode );

	mGen->setFreq( 220 );
	mGain->setValue( 0.5f );

	// connections can be made this way or with connect(). The master Context's getOutput() is the speakers by default.
	mGen >> mGain >> ctx->getOutput();

	// Node's need to be enabled to process audio. EffectNode's are enabled by default, while NodeSource's (like Gen) need to be switched on.
	mGen->enable();

	// Context also must be started. Starting and stopping this controls the entire DSP graph.
	ctx->enable();
}
예제 #10
0
void SamplePlayerNodeTestApp::processDrag( ivec2 pos )
{
	if( mGainSlider.hitTest( pos ) )
		mGain->setValue( mGainSlider.mValueScaled );
	else if( mPanSlider.hitTest( pos ) ) {
#if TEST_STEREO_INPUT_PANNING
		mPan->getParamPos()->applyRamp( mPanSlider.mValueScaled, 0.6f );
#else
		mPan->setPos( mPanSlider.mValueScaled );
#endif
	}
	else if( mLoopBeginSlider.hitTest( pos ) )
		mSamplePlayerNode->setLoopBeginTime( mLoopBeginSlider.mValueScaled );
	else if( mLoopEndSlider.hitTest( pos ) )
		mSamplePlayerNode->setLoopEndTime( mLoopEndSlider.mValueScaled );
	else if( mTriggerDelaySlider.hitTest( pos ) ) {
	}
	else if( pos.y > getWindowCenter().y )
		seek( pos.x );
}
예제 #11
0
void NodeAdvancedApp::setup()
{
	auto ctx = audio::Context::master();

	// Here we're using a GenTriangleNode, which generates a triangle waveform that contains many upper harmonics.
	// To reduce the sharpness, a lowpass filter is used to cut down the higher frequences.
	mGen = ctx->makeNode( new audio::GenTriangleNode( audio::Node::Format().autoEnable() ) );
	mLowpass = ctx->makeNode( new audio::FilterLowPassNode );
	mGain = ctx->makeNode( new audio::GainNode );
	mMonitor = ctx->makeNode( new audio::MonitorNode );

	mLowpass->setFreq( 400 );

	// Below we tell the Gain's Param to ramp from 0 to 0.5 over 2 seconds, making it slowly fade in.!
	mGain->getParam()->applyRamp( 0, 0.5f, 2.0f );

	// make the synthesis connection
	mGen >> mLowpass >> mGain >> ctx->getOutput();

	// Also feed the Gain to our Scope so that we can see what the waveform looks like.
	mGain >> mMonitor;

	ctx->enable();

	// Many times it is easier to specify musical pitches in MIDI format, which is linear rather than in hertz.
	// Below is the pentatonic notes for the C major scale from C3-C5, represented in MIDI values.
	mCPentatonicScale.push_back( 48 );
	mCPentatonicScale.push_back( 50 );
	mCPentatonicScale.push_back( 52 );
	mCPentatonicScale.push_back( 55 );
	mCPentatonicScale.push_back( 57 );
	mCPentatonicScale.push_back( 60 );
	mCPentatonicScale.push_back( 62 );
	mCPentatonicScale.push_back( 64 );
	mCPentatonicScale.push_back( 67 );
	mCPentatonicScale.push_back( 69 );
	mCPentatonicScale.push_back( 72 );

	mFreqRampTime = 0.015f;
}
예제 #12
0
void DeviceTestApp::setupTest( string test )
{
    if( test.empty() )
        test = "sinewave";

    CI_LOG_V( "test: " << test );

    // FIXME: Switching from 'noise' to 'i/o' on mac is causing a deadlock when initializing InputDeviceNodeAudioUnit.
    //	- it shouldn't have to be stopped, need to check why.
    //  - temp fix: stop / start context around reconfig
    audio::master()->disable();

    mGain->disconnectAllInputs();

    if( test == "sinewave" )
        setupSine();
    else if( test == "noise" )
        setupNoise();
    else if( test == "input (pulled)" )
        setupInputPulled();
    else if( test == "I/O (clean)" )
        setupIOClean();
    else if( test == "I/O (processed)" )
        setupIOProcessed();
    else if( test == "I/O and sine" )
        setupIOAndSine();
    else if( test == "send" )
        setupSend();
    else if( test == "send stereo" )
        setupSendStereo();
    else
        CI_ASSERT_NOT_REACHABLE();

    if( mPlayButton.mEnabled )
        audio::master()->enable();

    PRINT_GRAPH( audio::master() );
}
예제 #13
0
void SamplePlayerNodeTestApp::setupBufferPlayerNode()
{
	auto bufferPlayer = audio::master()->makeNode( new audio::BufferPlayerNode() );

	auto loadFn = [bufferPlayer, this] {
		bufferPlayer->loadBuffer( mSourceFile );
		mWaveformPlot.load( bufferPlayer->getBuffer(), getWindowBounds() );
		CI_LOG_V( "loaded source buffer, frames: " << bufferPlayer->getBuffer()->getNumFrames() );

	};

	auto connectFn = [bufferPlayer, this] {
		mGain->disconnectAllInputs();
		mSamplePlayerNode = bufferPlayer;
		mSamplePlayerNode >> mGain >> mPan >> audio::master()->getOutput();
		PRINT_GRAPH( audio::master() );
		mSamplePlayerNode->setLoopEnabled( mLoopButton.mEnabled );
		mSamplePlayerNode->setLoopBeginTime( mLoopBeginSlider.mValueScaled );
		mSamplePlayerNode->setLoopEndTime( mLoopEndSlider.mValueScaled != 0 ? mLoopEndSlider.mValueScaled : mSamplePlayerNode->getNumSeconds() );
	};

	bool asyncLoad = mAsyncButton.mEnabled;
	CI_LOG_V( "async load: " << boolalpha << asyncLoad << dec );
	if( asyncLoad ) {
		mWaveformPlot.clear();
		mAsyncLoadFuture = std::async( [=] {
			loadFn();
			dispatchAsync( [=] {
				connectFn();
			} );
		} );
	}
	else {
		loadFn();
		connectFn();
	};
}
예제 #14
0
void NodeEffectsTestApp::makeNodes()
{
	auto ctx = audio::master();

	mGain = ctx->makeNode( new audio::GainNode );
	mGain->setValue( 0.25f );

	mPan = ctx->makeNode( new audio::Pan2dNode );

	CI_LOG_V( "gen button enabled: " << mGenButton.mEnabled );
	auto genFmt = audio::Node::Format().autoEnable();
	if( mGenButton.mEnabled )
		mGen = ctx->makeNode( new audio::GenSineNode( 220, genFmt ) );
	else
		mGen = ctx->makeNode( new audio::GenNoiseNode( genFmt ) );

	mLowPass = ctx->makeNode( new audio::FilterLowPassNode() );
	mLowPass->setCutoffFreq( 400 );
//	mLowPass = ctx->makeNode( new audio::FilterHighPassNode() );

	mDelay = ctx->makeNode( new audio::DelayNode );
	mDelay->setDelaySeconds( 0.5f );
//	mDelay->setDelaySeconds( 100.0f / (float)ctx->getSampleRate() );
}
예제 #15
0
void DelayFeedback::mouseUp( MouseEvent event )
{
	mGain->getParam()->applyRamp( 0, 1.5, audio::Param::Options().rampFn( &audio::rampOutQuad ) );
}
예제 #16
0
void DeviceTestApp::processDrag( ivec2 pos )
{
    if( mGainSlider.hitTest( pos ) )
        mGain->getParam()->applyRamp( mGainSlider.mValueScaled, 0.025f );
}
예제 #17
0
void DeviceTestApp::setupUI()
{
    mInputDeviceNodeUnderrunFade = mInputDeviceNodeOverrunFade = mOutputDeviceNodeClipFade = 0;
    mViewYOffset = 0;

    mPlayButton = Button( true, "stopped", "playing" );
    mWidgets.push_back( &mPlayButton );

    mRecordButton = Button( false, "record" );
    mWidgets.push_back( &mRecordButton );

    mTestSelector.mSegments.push_back( "sinewave" );
    mTestSelector.mSegments.push_back( "noise" );
    mTestSelector.mSegments.push_back( "input (pulled)" );
    mTestSelector.mSegments.push_back( "I/O (clean)" );
    mTestSelector.mSegments.push_back( "I/O (processed)" );
    mTestSelector.mSegments.push_back( "I/O and sine" );
    mTestSelector.mSegments.push_back( "send" );
    mTestSelector.mSegments.push_back( "send stereo" );
    mWidgets.push_back( &mTestSelector );

#if defined( CINDER_COCOA_TOUCH )
    mPlayButton.mBounds = Rectf( 0, 0, 120, 60 );
    mRecordButton.mBounds = Rectf( 130, 0, 190, 34 );
    mTestSelector.mBounds = Rectf( getWindowWidth() - 190, 0, getWindowWidth(), 180 );
#else
    mPlayButton.mBounds = Rectf( 0, 0, 200, 60 );
    mRecordButton.mBounds = Rectf( 210, 0, 310, 40 );
    mTestSelector.mBounds = Rectf( getWindowCenter().x + 110, 0, (float)getWindowWidth(), 180 );
#endif

    mGainSlider.mBounds = Rectf( mTestSelector.mBounds.x1, mTestSelector.mBounds.y2 + 10, mTestSelector.mBounds.x2, mTestSelector.mBounds.y2 + 50 );
    mGainSlider.mTitle = "GainNode";
    mGainSlider.set( mGain->getValue() );
    mWidgets.push_back( &mGainSlider );

    mOutputSelector.mTitle = "Output Devices";
    mOutputSelector.mBounds = Rectf( mTestSelector.mBounds.x1, getWindowCenter().y + 40, (float)getWindowWidth(), (float)getWindowHeight() );
    if( mOutputDeviceNode ) {
        for( const auto &dev : audio::Device::getOutputDevices() ) {
            if( dev == mOutputDeviceNode->getDevice() )
                mOutputSelector.mCurrentSectionIndex = mOutputSelector.mSegments.size();
            mOutputSelector.mSegments.push_back( dev->getName() );
        }
    }
    mWidgets.push_back( &mOutputSelector );

    mInputSelector.mTitle = "Input Devices";
    mInputSelector.mBounds = mOutputSelector.mBounds - vec2( mOutputSelector.mBounds.getWidth() + 10, 0 );
    if( mOutputDeviceNode ) {
        for( const auto &dev : audio::Device::getInputDevices() ) {
            if( dev == mInputDeviceNode->getDevice() )
                mInputSelector.mCurrentSectionIndex = mInputSelector.mSegments.size();
            mInputSelector.mSegments.push_back( dev->getName() );
        }
    }
    mWidgets.push_back( &mInputSelector );

    Rectf textInputBounds( 0, getWindowCenter().y + 40, 200, getWindowCenter().y + 70  );
    mSamplerateInput.mBounds = textInputBounds;
    mSamplerateInput.mTitle = "samplerate";
    mSamplerateInput.setValue( audio::master()->getSampleRate() );
    mWidgets.push_back( &mSamplerateInput );

    textInputBounds += vec2( 0, textInputBounds.getHeight() + 24 );
    mFramesPerBlockInput.mBounds = textInputBounds;
    mFramesPerBlockInput.mTitle = "frames per block";
    mFramesPerBlockInput.setValue( audio::master()->getFramesPerBlock() );
    mWidgets.push_back( &mFramesPerBlockInput );

    textInputBounds += vec2( 0, textInputBounds.getHeight() + 24 );
    mNumInChannelsInput.mBounds = textInputBounds;
    mNumInChannelsInput.mTitle = "num inputs";
    if( mInputDeviceNode )
        mNumInChannelsInput.setValue( mInputDeviceNode->getNumChannels() );
    mWidgets.push_back( &mNumInChannelsInput );

    textInputBounds += vec2( 0, textInputBounds.getHeight() + 24 );
    mNumOutChannelsInput.mBounds = textInputBounds;
    mNumOutChannelsInput.mTitle = "num outputs";
    if( mOutputDeviceNode )
        mNumOutChannelsInput.setValue( mOutputDeviceNode->getNumChannels() );
    mWidgets.push_back( &mNumOutChannelsInput );

    textInputBounds += vec2( 0, textInputBounds.getHeight() + 24 );
    mSendChannelInput.mBounds = textInputBounds;
    mSendChannelInput.mTitle = "send channel";
    mSendChannelInput.setValue( 2 );
    mWidgets.push_back( &mSendChannelInput );

    vec2 xrunSize( 80, 26 );
    mUnderrunRect = Rectf( 0, mPlayButton.mBounds.y2 + 10, xrunSize.x, mPlayButton.mBounds.y2 + xrunSize.y + 10 );
    mOverrunRect = mUnderrunRect + vec2( xrunSize.x + 10, 0 );
    mClipRect = mOverrunRect + vec2( xrunSize.x + 10, 0 );

    getWindow()->getSignalMouseDown().connect( [this] ( MouseEvent &event ) {
        processTap( event.getPos() );
    } );
    getWindow()->getSignalMouseDrag().connect( [this] ( MouseEvent &event ) {
        processDrag( event.getPos() );
    } );
    getWindow()->getSignalTouchesBegan().connect( [this] ( TouchEvent &event ) {
        processTap( event.getTouches().front().getPos() );
    } );
    getWindow()->getSignalTouchesMoved().connect( [this] ( TouchEvent &event ) {
        for( const TouchEvent::Touch &touch : getActiveTouches() )
            processDrag( touch.getPos() );
    } );

#if defined( CINDER_COCOA_TOUCH )
    getSignalKeyboardWillShow().connect( [this] { timeline().apply( &mViewYOffset, -100.0f, 0.3f, EaseInOutCubic() );	} );
    getSignalKeyboardWillHide().connect( [this] { timeline().apply( &mViewYOffset, 0.0f, 0.3f, EaseInOutCubic() ); } );
#endif

    gl::enableAlphaBlending();
}
예제 #18
0
void NodeBasic::draw()
{
	gl::clear( Color( 0, mGain->getValue(), 0.2f ) );
}
예제 #19
0
void NodeBasic::mouseDrag( MouseEvent event )
{
	mGen->setFreq( event.getPos().x );
	mGain->setValue( 1.0f - (float)event.getPos().y / (float)getWindowHeight() );
}
예제 #20
0
void SamplePlayerNodeTestApp::setupUI()
{
	const float padding = 10.0f;

	auto buttonRect = Rectf( padding, padding, 200, 60 );
	mEnableSamplePlayerNodeButton.mIsToggle = true;
	mEnableSamplePlayerNodeButton.mTitleNormal = "player off";
	mEnableSamplePlayerNodeButton.mTitleEnabled = "player on";
	mEnableSamplePlayerNodeButton.mBounds = buttonRect;
	mWidgets.push_back( &mEnableSamplePlayerNodeButton );

	buttonRect += Vec2f( buttonRect.getWidth() + padding, 0 );
	mStartPlaybackButton.mIsToggle = false;
	mStartPlaybackButton.mTitleNormal = "start";
	mStartPlaybackButton.mBounds = buttonRect;
	mWidgets.push_back( &mStartPlaybackButton );

	buttonRect += Vec2f( buttonRect.getWidth() + padding, 0 );
	buttonRect.x2 -= 30;
	mLoopButton.mIsToggle = true;
	mLoopButton.mTitleNormal = "loop off";
	mLoopButton.mTitleEnabled = "loop on";
	mLoopButton.setEnabled( mSamplePlayerNode->isLoopEnabled() );
	mLoopButton.mBounds = buttonRect;
	mWidgets.push_back( &mLoopButton );

	buttonRect += Vec2f( buttonRect.getWidth() + padding, 0 );
	mAsyncButton.mIsToggle = true;
	mAsyncButton.mTitleNormal = "async off";
	mAsyncButton.mTitleEnabled = "async on";
	mAsyncButton.mBounds = buttonRect;
	mWidgets.push_back( &mAsyncButton );

	buttonRect = Rectf( padding, buttonRect.y2 + padding, padding + buttonRect.getWidth(), buttonRect.y2 + buttonRect.getHeight() + padding );
	mRecordButton.mIsToggle = true;
	mRecordButton.mTitleNormal = "record off";
	mRecordButton.mTitleEnabled = "record on";
	mRecordButton.mBounds = buttonRect;
	mWidgets.push_back( &mRecordButton );

	buttonRect += Vec2f( buttonRect.getWidth() + padding, 0 );
	mWriteButton.mIsToggle = false;
	mWriteButton.mTitleNormal = "write to file";
	mWriteButton.mBounds = buttonRect;
	mWidgets.push_back( &mWriteButton );

	buttonRect += Vec2f( buttonRect.getWidth() + padding, 0 );
	mAutoResizeButton.mIsToggle = true;
	mAutoResizeButton.mTitleNormal = "auto resize off";
	mAutoResizeButton.mTitleEnabled = "auto resize on";
	mAutoResizeButton.mBounds = buttonRect;
	mWidgets.push_back( &mAutoResizeButton );

	Vec2f sliderSize( 200, 30 );
	Rectf selectorRect( getWindowWidth() - sliderSize.x - padding, padding, getWindowWidth() - padding, sliderSize.y * 3 + padding );
	mTestSelector.mSegments.push_back( "BufferPlayerNode" );
	mTestSelector.mSegments.push_back( "FilePlayerNode" );
	mTestSelector.mSegments.push_back( "recorder" );
	mTestSelector.mBounds = selectorRect;
	mWidgets.push_back( &mTestSelector );

	Rectf sliderRect( selectorRect.x1, selectorRect.y2 + padding, selectorRect.x2, selectorRect.y2 + padding + sliderSize.y );
//	Rectf sliderRect( getWindowWidth() - 200.0f, kPadding, getWindowWidth(), 50.0f );
	mGainSlider.mBounds = sliderRect;
	mGainSlider.mTitle = "GainNode";
	mGainSlider.set( mGain->getValue() );
	mWidgets.push_back( &mGainSlider );

	sliderRect += Vec2f( 0, sliderRect.getHeight() + padding );
	mPanSlider.mBounds = sliderRect;
	mPanSlider.mTitle = "Pan";
	mPanSlider.set( mPan->getPos() );
	mWidgets.push_back( &mPanSlider );

	sliderRect += Vec2f( 0, sliderRect.getHeight() + padding );
	mLoopBeginSlider.mBounds = sliderRect;
	mLoopBeginSlider.mTitle = "Loop Begin";
	mLoopBeginSlider.mMax = (float)mSamplePlayerNode->getNumSeconds();
	mLoopBeginSlider.set( (float)mSamplePlayerNode->getLoopBeginTime() );
	mWidgets.push_back( &mLoopBeginSlider );

	sliderRect += Vec2f( 0, sliderRect.getHeight() + padding );
	mLoopEndSlider.mBounds = sliderRect;
	mLoopEndSlider.mTitle = "Loop End";
	mLoopEndSlider.mMax = (float)mSamplePlayerNode->getNumSeconds();
	mLoopEndSlider.set( (float)mSamplePlayerNode->getLoopEndTime() );
	mWidgets.push_back( &mLoopEndSlider );

	Vec2f xrunSize( 80, 26 );
	mUnderrunRect = Rectf( padding, getWindowHeight() - xrunSize.y - padding, xrunSize.x + padding, getWindowHeight() - padding );
	mOverrunRect = mUnderrunRect + Vec2f( xrunSize.x + padding, 0 );
	mRecorderOverrunRect = mOverrunRect + Vec2f( xrunSize.x + padding, 0 );

	getWindow()->getSignalMouseDown().connect( [this] ( MouseEvent &event ) { processTap( event.getPos() ); } );
	getWindow()->getSignalMouseDrag().connect( [this] ( MouseEvent &event ) { processDrag( event.getPos() ); } );
	getWindow()->getSignalTouchesBegan().connect( [this] ( TouchEvent &event ) { processTap( event.getTouches().front().getPos() ); } );
	getWindow()->getSignalTouchesMoved().connect( [this] ( TouchEvent &event ) {
		for( const TouchEvent::Touch &touch : getActiveTouches() )
			processDrag( touch.getPos() );
	} );

	gl::enableAlphaBlending();
}