Esempio n. 1
0
void MilluminApp::draw()
{    
	gl::enableAlphaBlending();
	gl::clear( Color( 0.1f, 0.1f, 0.1f ) );

	//draw our spiral
	gl::pushModelView();
	gl::translate(Vec2f(getWindowWidth()/2, getWindowHeight()/2));
	gl::rotate(mRot);
	gl::scale(Vec3f(4.f, 4.f, 1.f));
	gl::color(ColorA(1.f, 0.f, 0.f, 1.f));
	archimedes.draw();
	gl::popModelView();
	
	//draw our publishable texture
	if(mTex){
		gl::color(ColorA(1.f, 1.f, 1.f, 1.f));
		gl::draw(mTex);
	}
	
	mScreenSyphon.publishScreen(); //publish the screen
	mTextureSyphon.publishTexture(&mTex); //publish our texture
	
	//anything that we draw after here will not be published
	
	mClientSyphon.draw(Vec2f(300.0f, 0.0f)); //draw our client image
}
void SyphonBasicApp::setup()
{
	try {
		mLogo = gl::Texture::create( loadImage( loadAsset("cinder_logo_alpha.png") ) );
	}
	catch( ... ) {
		std::cout << "unable to load the texture file!" << std::endl;
	}
	
	try {
		mShader = gl::GlslProg::create( loadAsset("passThru_vert.glsl"), loadAsset("gaussianBlur_frag.glsl") );
	}
	catch( gl::GlslProgCompileExc &exc ) {
		std::cout << "Shader compile error: " << std::endl;
		std::cout << exc.what();
	}
	catch( ... ) {
		std::cout << "Unable to load shader" << std::endl;
	}
	
	mAngle = 0.0f;
	
	mScreenSyphon.setName("Screen Output"); // set a name for each item to be published
	mTextureSyphon.setName("Texture Output");
	
	mClientSyphon.setup();
    
	// in order for this to work, you must run simple server which is a syphon test application
    // feel free to change the app and server name for your specific case
    mClientSyphon.set("", "Simple Server");
    
    mClientSyphon.bind();
}
Esempio n. 3
0
void MilluminApp::setup()
{
	listener.setup(5001);
	host = "127.0.0.1";
	port = 5000;
	sender.setup(host, port);
    
    
	mTex = gl::Texture(200, 100); //create our texture to publish
	mSurface = Surface8u(200, 100, false); //create a surface to manipulate
	randomizeSurface(&mSurface); //randomize our surface
	mTex.update(mSurface); //tell the texture about our changes
	
	archimedes.set(100.f, 0.6f); //set up and calculate our spiral
	archimedes.calc();
	mRot = 0.f;
	
	mScreenSyphon.setName("Cinder Screen"); // set a name for each item to be published
	mTextureSyphon.setName("Cinder Texture");
	
	mClientSyphon.setup();
    
	// in order for this to work, you must run simple server from the testapps directory
	// any other syphon item you create would work as well, just change the name
    mClientSyphon.setApplicationName("Simple Server");
    mClientSyphon.setServerName("");
	
	mClientSyphon.bind();
}
void AwesomeSilkApp::setup()
{
    setWindowPos(Vec2i(100,200));
	// set Awesomium logging to verbose
	Awesomium::WebConfig cnf;
	cnf.log_level = Awesomium::kLogLevel_Verbose;
#if defined( CINDER_MAC )
	std::string frameworkPath = ( getAppPath() / "Contents" / "MacOS" ).string();
	cnf.package_path = Awesomium::WebString::CreateFromUTF8( frameworkPath.c_str(), frameworkPath.size() );
#endif
    
	// initialize the Awesomium web engine
	mWebCorePtr = Awesomium::WebCore::Initialize( cnf );
    
	// create a webview
	mWebViewPtr = mWebCorePtr->CreateWebView( getWindowWidth(), getWindowHeight() );
	mWebViewPtr->LoadURL( Awesomium::WebURL( Awesomium::WSLit( "http://localhost/~vgusev/silk/silk/WebContent/silk.html" ) ) );
	mWebViewPtr->Focus();
    
	// load and create a "loading" icon
	try { mLoadingTexture = gl::Texture( loadImage( loadAsset( "loading.png" ) ) ); }
	catch( const std::exception &e ) { console() << "Error loading asset: " << e.what() << std::endl; }
    
    DialogWebViewListener *listener = new DialogWebViewListener();
    mWebViewPtr->set_dialog_listener(listener);
    
    mTextureSyphon.setName("Cinder Texture");
    useSyphon = false;
}
void AwesomeSilkApp::draw()
{
	gl::clear();
    
	if( mWebTexture )
	{
		gl::color( Color::white() );
		gl::draw( mWebTexture );
        if (useSyphon){
            mTextureSyphon.publishTexture(&mWebTexture); //publish our texture
        }
	}
    
	// show spinner while loading
	if( mLoadingTexture && mWebViewPtr && mWebViewPtr->IsLoading() )
	{
		gl::pushModelView();
        
		gl::translate( 0.5f * Vec2f( getWindowSize() ) );
		gl::scale( 0.5f, 0.5f );
		gl::rotate( 180.0f * float( getElapsedSeconds() ) );
		gl::translate( -0.5f * Vec2f( mLoadingTexture.getSize() ) );
		
		gl::color( Color::white() );
		gl::enableAlphaBlending();
		gl::draw( mLoadingTexture );
		gl::disableAlphaBlending();
        
		gl::popModelView();
	}
}
Esempio n. 6
0
void BBPulseApp::setup()
{
    mFrequency = 440.0f;
    mPhase = 0.0f;
    mPhaseAdd = 0.0f;
    
    mModFrequency = 0.0f;
    mModPhase = 0.0f;
    mModPhaseAdd = 0.0f;
    
    audio::Output::play( audio::createCallback( this, &BBPulseApp::audioCallback ) );
    
    mDelay = 200;
    mMix = 0.2f;
    mFeedback = 0.3f;
    
    mDelaySize = mDelay * 44.1f;
    for( int i=0; i<mDelaySize; i++ ){
        mDelayLine.push_back( 0.0f );
    }
    mDelayIndex = 0;
    
    BBPulseSyphon.setName("BBSyphon Screen");
    
}
Esempio n. 7
0
void ardroneApp::setup()
{
    setupMovie();
    
    mSyphonServer.setName("ffmpeg-to-syphon");
    getWindow()->setTitle("eight_io: ffmpeg-to-syphon");

    renderFbo = gl::Fbo( getWindowWidth(), getWindowHeight() );

}
void SyphonBasicApp::draw()
{
	gl::enableAlphaBlending();
	gl::clear(Color::white());
    gl::color(ColorA(1.f, 1.f, 1.f, 1.f));
	
    mShader->bind();
    mShader->uniform( "tex0", 0 );
    mShader->uniform( "sampleOffset", Vec2f( cos( mAngle ), sin( mAngle ) ) * ( 3.0f / getWindowWidth() ) );
    gl::draw(mLogo, Vec2f::zero());
    mShader->unbind();
    
    mClientSyphon.draw(Vec2f(16.f, 64.f)); //draw our client image
    
	mScreenSyphon.publishScreen(); //publish the screen's output
	mTextureSyphon.publishTexture(mLogo); //publish our texture without shader
	
	//anything that we draw after here will not be published
    gl::drawStringCentered("This text will not be published to Syphon.", Vec2f(getWindowCenter().x, 20.f), ColorA::black());
	
}
Esempio n. 9
0
void syphonImpApp::draw()
{
	gl::enableAlphaBlending();
	gl::clear( Color( 0.1f, 0.1f, 0.1f ) );

    renderSceneToFbo();
    mTextureSyphon.publishTexture(&myFbo.getTexture()); //publish our texture

    Vec2f upperLeftCorner = Vec2f( 0, WIDTH );
    Vec2f lowerRightCorner = Vec2f(WIDTH,WIDTH-(WIDTH/1.33) );
    Rectf rect = Rectf( upperLeftCorner, lowerRightCorner );
    gl::draw( myFbo.getTexture(0),rect);


	

}
Esempio n. 10
0
void ardroneApp::setup()
{
    setupMovie();
    
    mSyphonServer.setName("ffmpeg-to-syphon");
    getWindow()->setTitle("eight_io: ffmpeg-to-syphon");

    renderFbo = gl::Fbo( getWindowWidth(), getWindowHeight() );
    
    drone.connect();
    
    // setup command history lengths for debugging and dumping onscreen (OPTIONAL)
    drone.controller.commandHistory.setMaxLength(30);
    drone.dataReceiver.commandHistory.setMaxLength(30);
    
    // setup the simulator so we have a display in the viewport (OPTIONAL)
    droneSimulator.setup(&drone);


}
Esempio n. 11
0
void ardroneApp::draw()
{
    renderFbo.bindFramebuffer();

	gl::clear( Color( 0, 0, 0 ) );
    gl::color( Color::white() );
	if ( mFrameTexture ) {
		Rectf centeredRect = Rectf( mFrameTexture.getBounds() ).getCenteredFit( getWindowBounds(), true );
		gl::draw( mFrameTexture, centeredRect  );
	}
    
    renderFbo.blitToScreen(renderFbo.getBounds(), getWindowBounds());
    mSyphonServer.publishTexture(renderFbo.getTexture(), false);

    renderFbo.unbindFramebuffer(); // return rendering to the window's own frame buffer

	if( mInfoTexture ) {
		glDisable( GL_TEXTURE_RECTANGLE_ARB );
		gl::draw( mInfoTexture, Vec2f( 5, getWindowHeight() - 5 - mInfoTexture.getHeight() ) );
	}
}
Esempio n. 12
0
void BBPulseApp::draw()
{
	// clear out the window with black
	gl::clear( Color( 0, 0, 0 ) );
    
    if( mOutput.size() > 0 ){
        Vec2f scale;
        scale.x = (float)getWindowWidth() / (float)mOutput.size();
        scale.y = 100.0f;
        float centerY = getWindowHeight() / 2.0f;
        
        gl::begin( GL_LINE_STRIP );
        for( int i=0; i<mOutput.size(); i++ ){
            float x = (float)i * scale.x;
            float y = mOutput[i] * scale.y + centerY;
            gl::vertex( x,  y );
        }
        
        gl::end();
        BBPulseSyphon.publishScreen();
        
    }
}
void EpicMonsterApp::draw()
{
	gl::clear( Color::black() );
    
	gl::setMatrices( mMayaCam.getCamera() );
    gl::setViewport( getWindowBounds() );
    
    
    
	gl::enableDepthWrite();
	gl::enableDepthRead();
    
	gl::color( Color::white() );
    
	if ( mEnableWireframe )
		gl::enableWireframe();
	gl::Light light( gl::Light::DIRECTIONAL, 0 );
	light.setAmbient( Color::white() );
	light.setDiffuse( Color::white() );
	light.setSpecular( Color::white() );
	light.lookAt( Vec3f( 0, 0, 0 ), Vec3f( 0, 5, 0 ) );
	light.update( mMayaCam.getCamera() );
	light.enable();
    
	gl::enable( GL_LIGHTING );
	gl::enable( GL_NORMALIZE );
    
    mAssimpLoader.draw();
    
    
    mNormalMap.bind( 2 );
    
    mPPFbo.bindTexture(0);
    mPPFbo.bindTexture(1);
    mDisplacementShader.bind();
    mDisplacementShader.uniform("displacementMap", 0 );
    mDisplacementShader.uniform("velocityMap", 1);
    mDisplacementShader.uniform("normalMap", 2);
        mDisplacementShader.uniform("fallDirection", mFallDirection);
    gl::draw( mVboMesh );
    mDisplacementShader.unbind();
    mPPFbo.unbindTexture();
    
    if(mEnableDebugTexture) {
        gl::Texture tex = mPPFbo.getTexture(0);
        //console() << getWindowBounds();
        gl::draw(tex, Rectf(-5.0f, -5.0f, 5.0f, 5.0f));
    }
    
    
	
    //gl::setMatricesWindow(getWindowSize());
    gl::drawString( toString( SIDE*SIDE ) + " vertices", Vec2f(32.0f, 32.0f));
        gl::drawString( toString((int) getAverageFps()) + " fps", Vec2f(32.0f, 52.0f));
    
	gl::disable( GL_LIGHTING );
    
	if ( mEnableWireframe )
		gl::disableWireframe();
    
    mScreenSyphon.publishScreen(); //publish the screen
    
	params::InterfaceGl::draw();
}
void EpicMonsterApp::setup()
{
    mScreenSyphon.setName("Epic Monster Demo");
    
    mFallDirection = Vec3f(0.0,-1.0,0.0);
    listener.setup(7000);
    
    // Slows down particle pulses
    mTimerSlower = 0.05;
    
    // Number of iterations for particle drawing to texture
    n = Vec3f(128, 128, 0);
    
    // Where texture baking shader starts drawing (obsolete)
    mParTexOffset = Vec3f(-1.0, 1.0, 0.0);
    timer = cinder::Timer(true);
    mStep = true;
    try {
        // Multiple render targets shader updates the positions/velocities
        mParticlesShader = gl::GlslProg( ci::app::loadResource( PASSTHROUGH_VERT ), ci::app::loadResource( PARTICLES_FRAG ));
        // Vertex displacement shader
        mDisplacementShader = gl::GlslProg( ci::app::loadResource( VERTEXDISPLACEMENT_VERT ), ci::app::loadResource( VERTEXDISPLACEMENT_FRAG ));
        
        mBakeShader = gl::GlslProg( ci::app::loadResource( BAKE_VERT ), ci::app::loadResource( BAKE_FRAG ));
    }
    catch( ci::gl::GlslProgCompileExc &exc ) {
        std::cout << "Shader compile error: " << endl;
        std::cout << exc.what();
    }
    catch( const std::exception& ex ) {
        std::cout << "Unable to load shader" << endl;
    }
    
    setupPingPongFbo();
    // THE VBO HAS TO BE DRAWN AFTER FBO!
    setupVBO();
    
    // End of Particle setup
    
    try {
    mAssimpLoader = assimp::AssimpLoader( getAssetPath( "Monsu7b.dae" ) );
        
    }
    catch(const std::exception& ex ) {
        std::cout << "Model loading error: " << endl;
        std::cout << ex.what();
    }
    
    mNormalMap	= gl::Texture( loadImage( loadResource( RES_NORMAL ) ) );
	mAssimpLoader.enableSkinning();
    
	mNodeNames = mAssimpLoader.getNodeNames();
	if ( mNodeNames.empty () )
	{
		mNodeNames.push_back( "NO BONES!" );
		mNoBones = true;
	}
	else
	{
		mNoBones = false;
	}
    
	// query original node orientations from model
	mNodeOrientations.assign( mNodeNames.size(), Quatf() );
	if ( !mNoBones )
	{
		for ( size_t i = 0; i < mNodeOrientations.size(); ++i )
		{
			mNodeOrientations[ i ] = mAssimpLoader.getNodeOrientation( mNodeNames[ i ] );
		}
	}
    
    // query original node orientations from model
	mNodePositions.assign( mNodeNames.size(), Vec3f() );
	if ( !mNoBones )
	{
		for ( size_t i = 0; i < mNodePositions.size(); ++i )
		{
			mNodePositions[ i ] = mAssimpLoader.getNodePosition( mNodeNames[ i ] );
		}
	}
    
	mNodeIndex = 0;
    triangles = mAssimpLoader.totalTriangles();
	mEnableWireframe = false;
    mEnableDebugTexture = false;
    
	mParams = params::InterfaceGl( "Parameters", Vec2i( 200, 300 ) );
    
	setupParams();
    
    CameraPersp cam;
	cam.setPerspective( 60, getWindowAspectRatio(), 0.1f, 1000.0f );
	cam.setEyePoint( Vec3f( 0, 1, 3 ) );
	cam.setCenterOfInterestPoint( Vec3f( 0, 0, 0 ) );
	mMayaCam.setCurrentCam( cam );
}