void _TBOX_PREFIX_App::update()
{
    mPcmBuffer = mMonitorNode->getBuffer();
    
    if ( !mPcmBuffer.isEmpty() )
        mXtract->update( mPcmBuffer.getData() );
}
void NodeSubclassingApp::draw()
{
	gl::clear();

	if( mMonitorNode && mMonitorNode->isEnabled() ) {
		Rectf scopeRect( 10, 10, (float)getWindowWidth() - 10, (float)getWindowHeight() - 10 );
		drawAudioBuffer( mMonitorNode->getBuffer(), scopeRect, true, Color( 0.9f, 0.4f, 0 ) );
	}
}
Beispiel #3
0
void DeviceTestApp::draw()
{
    gl::clear();
    gl::color( 0, 0.9f, 0 );

    gl::pushMatrices();
    gl::translate( 0, mViewYOffset );

    if( mMonitor && mMonitor->isEnabled() ) {
        const audio::Buffer &buffer = mMonitor->getBuffer();

        float padding = 20;
        float waveHeight = ((float)getWindowHeight() - padding * 3.0f ) / (float)buffer.getNumChannels();

        float yOffset = padding;
        float xScale = (float)getWindowWidth() / (float)buffer.getNumFrames();
        for( size_t ch = 0; ch < buffer.getNumChannels(); ch++ ) {
            PolyLine2f waveform;
            const float *channel = buffer.getChannel( ch );
            for( size_t i = 0; i < buffer.getNumFrames(); i++ ) {
                float x = i * xScale;
                float y = ( channel[i] * 0.5f + 0.5f ) * waveHeight + yOffset;
                waveform.push_back( vec2( x, y ) );
            }
            gl::draw( waveform );
            yOffset += waveHeight + padding;
        }

        float volume = mMonitor->getVolume();
        Rectf volumeRect( mGainSlider.mBounds.x1, mGainSlider.mBounds.y2 + padding, mGainSlider.mBounds.x1 + mGainSlider.mBounds.getWidth() * volume, mGainSlider.mBounds.y2 + padding + 20 );
        gl::drawSolidRect( volumeRect );
    }

    drawWidgets( mWidgets );

    if( mInputDeviceNodeUnderrunFade > 0.0001f ) {
        gl::color( ColorA( 0.8f, 0.2f, 0, mInputDeviceNodeUnderrunFade ) );
        gl::drawSolidRect( mUnderrunRect );
        gl::drawStringCentered( "in underrun", mUnderrunRect.getCenter(), Color::black() );
    }
    if( mInputDeviceNodeOverrunFade > 0.0001f ) {
        gl::color( ColorA( 0.8f, 0.2f, 0, mInputDeviceNodeOverrunFade ) );
        gl::drawSolidRect( mOverrunRect );
        gl::drawStringCentered( "in overrun", mOverrunRect.getCenter(), Color::black() );
    }
    if( mOutputDeviceNodeClipFade > 0.0001f ) {
        gl::color( ColorA( 0.8f, 0.2f, 0, mOutputDeviceNodeClipFade ) );
        gl::drawSolidRect( mClipRect );
        gl::drawStringCentered( "out clip", mClipRect.getCenter(), Color::black() );
    }

    gl::popMatrices();
}
Beispiel #4
0
void VoiceTestApp::draw()
{
	gl::clear();

	if( mMonitor && mMonitor->getNumConnectedInputs() ) {
		Vec2f padding( 20, 4 );

		Rectf scopeRect( padding.x, padding.y, getWindowWidth() - padding.x, getWindowHeight() - padding.y );
		drawAudioBuffer( mMonitor->getBuffer(), scopeRect, true );
	}

	drawWidgets( mWidgets );
}
void SamplePlayerNodeTestApp::draw()
{
	gl::clear();

	if( mTestSelector.currentSection() == "recorder" ) {
		audio::BufferRef recordedBuffer = mRecorder->getRecordedCopy();
		drawAudioBuffer( *recordedBuffer, getWindowBounds() );
	}
	else {
		auto bufferPlayer = dynamic_pointer_cast<audio::BufferPlayerNode>( mSamplePlayerNode );
		if( bufferPlayer )
			mWaveformPlot.draw();
		else if( mMonitor && mMonitor->isInitialized() )
			drawAudioBuffer( mMonitor->getBuffer(), getWindowBounds() );

		float readPos = (float)getWindowWidth() * mSamplePlayerNode->getReadPosition() / mSamplePlayerNode->getNumFrames();
		gl::color( ColorA( 0, 1, 0, 0.7f ) );
		gl::drawSolidRoundedRect( Rectf( readPos - 2, 0, readPos + 2, (float)getWindowHeight() ), 2 );
	}

	if( mUnderrunFade > 0.0001f ) {
		gl::color( ColorA( 1, 0.5f, 0, mUnderrunFade ) );
		gl::drawSolidRect( mUnderrunRect );
		gl::drawStringCentered( "play underrun", mUnderrunRect.getCenter(), Color::black() );
	}
	if( mOverrunFade > 0.0001f ) {
		gl::color( ColorA( 1, 0.5f, 0, mOverrunFade ) );
		gl::drawSolidRect( mOverrunRect );
		gl::drawStringCentered( "play overrun", mOverrunRect.getCenter(), Color::black() );
	}

	if( mRecorderOverrunFade > 0.0001f ) {
		gl::color( ColorA( 1, 0.5f, 0, mRecorderOverrunFade ) );
		gl::drawSolidRect( mRecorderOverrunRect );
		gl::drawStringCentered( "rec overrun", mRecorderOverrunRect.getCenter(), Color::black() );
	}

	drawWidgets( mWidgets );
}
void NodeAdvancedApp::draw()
{
	gl::clear();

	// Draw the Scope's recorded Buffer in the upper right.
	if( mMonitor && mMonitor->getNumConnectedInputs() ) {
		Rectf scopeRect( getWindowWidth() - 210, 10, getWindowWidth() - 10, 110 );
		drawAudioBuffer( mMonitor->getBuffer(), scopeRect, true );
	}

	// Visualize the Gen's current pitch with a circle.

	float pitchMin = mCPentatonicScale.front();
	float pitchMax = mCPentatonicScale.back();
	float currentPitch = audio::freqToMidi( mGen->getFreq() ); // MIDI values do not have to be integers for us.

	float percent = ( currentPitch - pitchMin ) / ( pitchMax - pitchMin );

	float circleX = percent * getWindowWidth();

	gl::color( 0, 0.8f, 0.8f );
	gl::drawSolidCircle( vec2( circleX, getWindowCenter().y ), 50 );
}