Пример #1
0
//--------------------------------------------------------------
void testApp::setup() {
    ofSetLogLevel(OF_LOG_VERBOSE);
    ofEnableDepthTest();
    ofSetVerticalSync(false);

    showOverlay = false;

    oculusRift.baseCamera = &cam;
    oculusRift.setup();

    for (int i = 0; i < 80; i++) {
        DemoBox d;
        demos.push_back(d);
    }
    setupBoxes();

    if (ofIsGLProgrammableRenderer())
        bshader.load("Shaders_GL3/simple.vert", "Shaders_GL3/simple.frag");

    // ofBox uses texture coordinates from 0-1, so you can load whatever
    // sized images you want and still use them to texture your box
    // but we have to explicitly normalize our tex coords here
    ofEnableNormalizedTexCoords();

    // loads the OF logo from disk
    ofLogo.loadImage("of.png");

    //enable mouse;
    cam.setAutoDistance(false);
    cam.begin();
    cam.end();

    // set camera y to user eye height
    cam.setGlobalPosition(0, oculusRift.getUserEyeHeight(), 3);
}
Пример #2
0
void Effects::beginGrabPixels() {
	ofEnableNormalizedTexCoords();
	glBindFramebuffer(GL_FRAMEBUFFER, fbo_handle);
	glClearColor(0,0,0,1);
	glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
	glViewport(0,0,width, height);
}
Пример #3
0
//--------------------------------------------------------------
void testApp::setup() {
    ofEnableNormalizedTexCoords();
    ofImage img;
    img.loadImage("hci.png");
    img.mirror(false, true);
    tex.allocate(640, 640, GL_RGB, true);
    ang = 0;
    glEnable(GL_DEPTH_TEST); // enable depth testing, otherwise things will
    tex.loadData(img.getPixels(), 640, 640, GL_RGB);
    ofDisableNormalizedTexCoords();
    
    ofSetLogLevel(OF_LOG_VERBOSE);
    openNIDevice.setup();
    openNIDevice.setLogLevel(OF_LOG_VERBOSE);
    openNIDevice.addDepthGenerator();
    openNIDevice.addImageGenerator(); // comment this out
    openNIDevice.start();
    
    verdana.loadFont(ofToDataPath("verdana.ttf"), 24);
    selectedPoint = ofPoint(200,200);
    
    

    


}
Пример #4
0
void ofkMeshRectRenderer::render()
{
    glMatrixMode(GL_MODELVIEW);
    ofPushMatrix();
    
    ofEnableNormalizedTexCoords();
    
    mTexture.bind();
    
    for (int vertical = 0; vertical < GridRES; vertical++) 
    {
        mVbos[vertical].draw(GL_TRIANGLE_STRIP, 0, 2*(GridRES+1));
        mVbos[vertical].draw(GL_LINE_STRIP, 0, 2*(GridRES+1));

    }
    
    ofDisableNormalizedTexCoords();
    
    mTexture.unbind();
    
    glMatrixMode(GL_MODELVIEW);
    ofPopMatrix();
    
    glMatrixMode(GL_TEXTURE);
    glLoadIdentity();
    
    glMatrixMode(GL_MODELVIEW);
    
    
}
Пример #5
0
void Render::setup() {
    ofSetVerticalSync(vsync);
	ofSetFrameRate(framerate);
    ofBackground(100,100,100);
    ofEnableNormalizedTexCoords();
    ofDisableAntiAliasing();
}
Пример #6
0
//--------------------------------------------------------------
void testApp::setup(){
	
	ofSetLogLevel(OF_LOG_VERBOSE);
    model.loadMeshes("astroBoy_walk.dae",meshes);
	tex.loadImage("boy_10.tga");
	
	ofEnableNormalizedTexCoords();

	for(int i =0; i < meshes.size();i++){
		vboMeshes.push_back(ofVboMesh());
		vboMeshes.back().meshElement = &meshes[i];
		vboMeshes.back().drawType = GL_STREAM_DRAW_ARB;
	}
	
//    ofEnableBlendMode(OF_BLENDMODE_ALPHA);
    
    glEnable(GL_DEPTH_TEST);
	
	lightsOn = true;
	glEnable(GL_SMOOTH);
	glEnable(GL_POINT_SMOOTH);
	glPointSize(4);
	glEnable(GL_LIGHTING);
	glEnable(GL_LIGHT0);
	glEnable(GL_COLOR_MATERIAL);
	glEnable(GL_NORMALIZE);
}
Пример #7
0
//--------------------------------------------------------------
void ofApp::setup() {

    #ifdef TARGET_OPENGLES
    // While this will will work on normal OpenGL as well, it is 
    // required for OpenGL ES because ARB textures are not supported.
    // If this IS set, then we conditionally normalize our 
    // texture coordinates below.
    ofEnableNormalizedTexCoords();
    #endif

	img.load("linzer.png");
	
	// OF_PRIMITIVE_TRIANGLES means every three vertices create a triangle
	mesh.setMode(OF_PRIMITIVE_TRIANGLES);
	int skip = 10;	// this controls the resolution of the mesh
	
	int width = img.getWidth();
	int height = img.getHeight();

	ofVec2f imageSize(width,height);

	ofVec3f zero(0, 0, 0);
	for(int y = 0; y < height - skip; y += skip) {
		for(int x = 0; x < width - skip; x += skip) {
			/*
			 To construct a mesh, we have to build a collection of quads made up of
			 the current pixel, the one to the right, to the bottom right, and
			 beneath. These are called nw, ne, se and sw. To get the texture coords
			 we need to use the actual image indices.
			 */
			ofVec3f nw = getVertexFromImg(img, x, y);
			ofVec3f ne = getVertexFromImg(img, x + skip, y);
			ofVec3f sw = getVertexFromImg(img, x, y + skip);
			ofVec3f se = getVertexFromImg(img, x + skip, y + skip);
			ofVec2f nwi(x, y);
			ofVec2f nei(x + skip, y);
			ofVec2f swi(x, y + skip);
			ofVec2f sei(x + skip, y + skip);
			
			// ignore any zero-data (where there is no depth info)
			if(nw != zero && ne != zero && sw != zero && se != zero) {
				addFace(mesh, nw, ne, se, sw);

				// Normalize our texture coordinates if normalized 
				// texture coordinates are currently enabled.
				if(ofGetUsingNormalizedTexCoords()) {
					nwi /= imageSize;
					nei /= imageSize;
					sei /= imageSize;
					swi /= imageSize;
				}

				addTexCoords(mesh, nwi, nei, sei, swi);
			}
		}
	}
	
	vboMesh = mesh;
}
Пример #8
0
//--------------------------------------------------------------
void testApp::setup(){
	ofBackground(0, 0, 0);
	ofSetVerticalSync(true);
	ofSetFullscreen(true);
	ofHideCursor();
	ofEnableNormalizedTexCoords();
	ofDisableArbTex();
	
	settings.loadFile("settings.xml");
	camId = settings.getValue("camera", 0);
	captureSize.set(settings.getValue("capture:width", 1280),
					settings.getValue("capture:height", 720));
	outputSize.set(settings.getValue("output:width", 1080),
					settings.getValue("output:height", 1920));
	outputPosition.set(settings.getValue("output:x", 2000),
				   settings.getValue("output:y", 0));
	
	ofSetWindowPosition(outputPosition.x, outputPosition.y);
	
	
	cam.setDeviceID(camId);
	cam.initGrabber(captureSize.x, captureSize.y);
	cam.setupControls(ofxUVCQTKitVideoGrabber::LOGITECH_C910);
	cam.setupGui("Camera", "camera.xml");
	
	colorsOfMovement.setup(captureSize/2, 30);

	warper.setup(0, 0, captureSize.x, captureSize.y);
	warper.load();
	warper.deactivate();

	enableAppGui = false;
	enableColorsOfMovementGui = false;
	enableCameraGui = false;
	enableWarpGui = false;
	showMouse = false;

	gui.setup("App", "app.xml");
	
	ofxFloatSlider * maskSizeSlider = new ofxFloatSlider();
	maskSizeSlider->setup("Size", 0.0, 0.0, 100);
	maskSizeSlider->addListener(this, &testApp::onGuiChange);
	gui.add(maskSizeSlider);
	
	ofxFloatSlider * maskXSlider = new ofxFloatSlider();
	maskXSlider->setup("x", 0.0, 0.0, outputSize.x);
	maskXSlider->addListener(this, &testApp::onGuiChange);
	gui.add(maskXSlider);
	
	ofxFloatSlider * maskYSlider = new ofxFloatSlider();
	maskYSlider->setup("y", 0.0, 0.0, outputSize.y);
	maskYSlider->addListener(this, &testApp::onGuiChange);
	gui.add(maskYSlider);
	
	gui.loadFromFile("app.xml");
}
//--------------------------------------------------------------
void testApp::setup(){
    
    ofSetFrameRate(24);
    ofEnableNormalizedTexCoords();
    bDrawWireframe = false;
    
    moon.loadImage("moon.jpg");
    mars.loadImage("planet.jpg");
    
    
    
    vector<ofVec3f> verts;
    vector<ofVec2f> texCoords;
    
    
    /**
     We are going to populate 2 vectors with points that make up 'nSlices' triangles
     Each time through the for loop, we add 3 points to verts and 3 points to texCoords
     Think of cutting out a slice from the picture for each triangle in the circle
     */
    int nSlices = 30;
    float angle = 0;
    float x, y;
    for(int i=0; i<nSlices; i++)
    {
        // Add a triangle point at the center
        verts.push_back( ofPoint(0, 0) );
        texCoords.push_back( ofPoint(0.5, 0.5) );
        
        
        // Add triangle point at angle
        x = cos(angle);
        y = sin(angle);
        verts.push_back(ofPoint(x, y));
        x = 0.5 + cos(angle) * 0.5;
        y = 0.5 + sin(angle) * 0.5;
        texCoords.push_back(ofPoint(x, y));
        
        
        // increment the angle
        angle += (M_TWO_PI / (float)nSlices);
        
        
        // Add the third point of the triangle
        x = cos(angle);
        y = sin(angle);
        verts.push_back(ofPoint(x, y));
        x = 0.5 + cos(angle) * 0.5;
        y = 0.5 + sin(angle) * 0.5;
        texCoords.push_back(ofPoint(x, y));
    }
    
    
    unitCircleWithTexCoords.addVertices(verts);
    unitCircleWithTexCoords.addTexCoords(texCoords);
}
Пример #10
0
//--------------------------------------------------------------
void ofApp::setup()
{
    ofSetLogLevel( OF_LOG_VERBOSE );
    ofEnableDepthTest();
    ofSetVerticalSync( false );
    
    showOverlay = false;
    
    // CHUCK STUFF -------------------------------------------------------------
    
    // get singleton
    chuck = TheChucK::instance();
    // arguments
    const char * argv[] = { "the", "-v0" };
    
    // initialize (SHOULD HAPPEN BEFORE AUDIO STREAM STARTS)
    chuck->initialize( MY_SRATE, MY_BUFFERSIZE, MY_CHANNELS, 2, argv );
    
    // compile and run a file
    chuck->compileFile( "ck/dot-circle.ck" );
    
    // setup the sound stream...
    soundStream.setup( this,
                      MY_CHANNELS,     // output
                      0,               // input
                      MY_SRATE,        // sample rate
                      MY_BUFFERSIZE,   // buffer size
                      MY_NUMBUFFERS ); // num buffer
    
    // set location
    w = 250;
    h = 200;
    
    // OCULUS STUFF ------------------------------------------------------------
    
    oculusRift.baseCamera = &cam;
    oculusRift.setup();
    oculusRift.fullscreenOnRift();
    
    if (ofIsGLProgrammableRenderer())
        bshader.load("Shaders_GL3/simple.vert", "Shaders_GL3/simple.frag");
    
    // ofBox uses texture coordinates from 0-1, so you can load whatever
    // sized images you want and still use them to texture your box
    // but we have to explicitly normalize our tex coords here
    ofEnableNormalizedTexCoords();
    
    //enable mouse;
    cam.setAutoDistance(false);
    cam.begin();
    cam.end();
    
    // set camera y to user eye height
    cam.setGlobalPosition(0, oculusRift.getUserEyeHeight(), 3);
    
}
Пример #11
0
void ofxSphereMeshRenderer::render()
{
    ofEnableNormalizedTexCoords();

    tex.bind();
    mSphere.draw();
    tex.unbind();
    ofDisableNormalizedTexCoords();
    
}
Пример #12
0
void Effects::draw() {
	ofEnableNormalizedTexCoords();
	bind();
		glDrawArrays(GL_QUADS, 0, 4);
	unbind();
	
	if(cracks_enabled) {
		cracks.draw();
	}

}
Пример #13
0
void CameraSource::draw(){
	ofClear(0);
	ofSetHexColor(0xffffff);
	
	if(_videoGrabber.isInitialized()){
		ofDisableNormalizedTexCoords();
		_videoGrabber.draw(0, 0);
		ofEnableNormalizedTexCoords();
	}else{
		ofDrawBitmapString("no camera", _cameraWidth / 2.0f - 40.0f, _cameraHeight / 2.0f + 10.0f);
	}
}
Пример #14
0
void Dihedral4::setup(ofVec3f pos){
    this->pos = pos;
    
    ofEnableNormalizedTexCoords();
    design.loadImage("squareTex.png");
    ofxObjLoader::load("square.obj", meshy);
    meshy.enableTextures();
    //centering object
    ofVec3f centroid = meshy.getCentroid();
    for(int i = 0; i < meshy.getNumVertices(); i++){
        meshy.getVertices()[i] = meshy.getVertices()[i] -    centroid;
    }
}
Пример #15
0
//--------------------------------------------------------------
void testApp::setup(){
    light.enable();
    ofEnableSeparateSpecularLight();
    
    shader.load("cheen");
    lut.loadImage("LUTs/cheen-lut_blueGreen2.png");
    
    
    glEnable(GL_DEPTH_TEST);
	glEnable(GL_TEXTURE_RECTANGLE_ARB);
    ofEnableAlphaBlending();
	ofEnableNormalizedTexCoords();
}
Пример #16
0
void testApp::setup(){
    ofSetFrameRate(60);
    ofBackgroundHex(0xaaaaaa);
    easyCam.setDistance(22);

    ofSetLineWidth(3);
    
    ofSetVerticalSync(true);
    
	// this uses depth information for occlusion
	// rather than always drawing things on top of each other
	glEnable(GL_DEPTH_TEST);
	
	// ofBox uses texture coordinates from 0-1, so you can load whatever
	// sized images you want and still use them to texture your box
	// but we have to explicitly normalize our tex coords here
	ofEnableNormalizedTexCoords();
    
    //create a bunch of custom nodes
    for(double z = 0; z > -36*M_PI; z +=zStep){
        double x = -10;
        double y = tileLayoutFunction(z);
        double blueValue = fmod(-floor(z*2),255);

        CustomNode* node = new CustomNode();
        node->setPosition(x, y, z);
        node->red = 0;
        node->green = 60;
        node->blue = blueValue;
        customNodes.push_back(node);
        
    }
    firstNodePosition = customNodes[0]->getPosition();
    lastNodePosition = customNodes[customNodes.size()-1]->getPosition();
    
    
    light.enable();
    light.setPosition(0, 0, 150);
    light.setDirectional();
    ofFloatColor ambient_color(1.0, 0.0, 0.0,1.0);
    ofFloatColor diffuse_color(1.0, 1.0, 1.0);
    ofFloatColor specular_color(0.0, 1.0, 0.0);
    light.setAmbientColor(ambient_color);
    light.setDiffuseColor(diffuse_color);
    light.setSpecularColor(specular_color);

    easyCam.disableMouseInput();
    
    

}
Пример #17
0
void testApp::setup() {
    ofSetFrameRate(60);
    ofSetVerticalSync(true);
    ofEnableNormalizedTexCoords(); ofDisableArbTex();
    graph.allocate(1200, 400, OF_IMAGE_COLOR);
    buffer.allocate(1200,400,GL_RGBA,1);
    testImg2.loadImage("graph.png");
    ofEnableSmoothing();
    center = ofVec3f();
    setupUI();
    setupVar();
    setupTCP();
    generateMesh(&points);
}
void of3D::setup()
{

	ofEnableNormalizedTexCoords();
	
	ofImage img;
    img.loadImage("ofpic.png");
	img.mirror(false, true);
    text.allocate(128, 128, GL_RGBA, true);
    ang = 0;
	
	glEnable(GL_DEPTH_TEST); // enable depth testing, otherwise things will
    text.loadData(img.getPixels(), 128, 128, GL_RGBA);
	
}
Пример #19
0
//--------------------------------------------------------------
void testApp::draw(){
    ofEnableNormalizedTexCoords();

    
    switch(switchInt)
    {
        case 1:
            posterize();
            break;
        case 2:
            rgbPosterize();
            break;
        case 3:
            wobble();
            break;
        case 4:
            abberation();
            break;
        case 5:
            perlinLab();
            break;
        case 6:
            wobbleAbberation();
            break;
        case 7:
            liquifyRender();
            break;
        case 8:
            perlinXyz();
            break;
        default:
            liquifyRenderS();
            break;
    }
    
    /*
    	cout << "\nMFCCS: ";
	ofSetColor(255, 0, 0,100);
	float xinc = 900.0 / 13;
	for(int i=0; i < 13; i++) {
		float height = mfccs[i] * 250.0;
		ofRect(100 + (i*xinc),600 - height,40, height);
        		cout << mfccs[i] << ",";
	}
     */
    
}
Пример #20
0
void Dihedral3::setup(ofVec3f pos, float rotX, float rotY){
    this->pos = pos;
    this->rotX = rotX;
    this->rotY = rotY;
    
    ofEnableNormalizedTexCoords();
    design.loadImage("triTex2.png");
    //now my mesh is full of obj
    ofxObjLoader::load("triangle2.obj", meshy);
    meshy.enableTextures();
    //centering object
    ofVec3f centroid = meshy.getCentroid();
    for(int i = 0; i < meshy.getNumVertices(); i++){
        meshy.getVertices()[i] = meshy.getVertices()[i] -    centroid;
    }
    
}
Пример #21
0
//--------------------------------------------------------------
void ofApp::setup(){
	ofSetVerticalSync(true);

	// this uses depth information for occlusion
	// rather than always drawing things on top of each other
	ofEnableDepthTest();
	
	// ofBox uses texture coordinates from 0-1, so you can load whatever
	// sized images you want and still use them to texture your box
	// but we have to explicitly normalize our tex coords here
	ofEnableNormalizedTexCoords();
	
	// loads the OF logo from disk
	ofLogo.loadImage("of.png");
	
	// draw the ofBox outlines with some weight
	ofSetLineWidth(10);
}
//--------------------------------------------------------------
void testApp::setup(){

    ofBackground(0, 0, 0);
    
    ofEnableNormalizedTexCoords();
    
    marble.loadImage("moon.jpg");
    
    vector<ofPoint> vertices;
    //vector<ofColor> colors;
   // vector<ofFloatColor> colors;   // go from 0-1
    vector<ofVec2f> texCoods;// vec has 2 floats in it
    
    float nSlices = 30;
    float angle = 0;
    for(int i=0; i<nSlices; i++){
        vertices.push_back(ofPoint(0,0));
        texCoods.push_back(ofVec2f(0.5, 0.5));
        //texCoods.push_back(ofVec2f(marble.getWidth()/2, marble.getHeight()/2));
        //normalize means 0-1
        
        float x = cos(angle);
        float y = sin(angle);
        vertices.push_back(ofPoint(x,y));
        x=0.5+cos(angle)*0.5;
        y=0.5+sin(angle)*0.5;
        texCoods.push_back(ofVec2f(x, y));

        
        angle += M_TWO_PI /(float)nSlices; //mtwopi = 360
        
        
        x=cos(angle);
        y=sin(angle);
        
        vertices.push_back(ofPoint(x,y));
        x=0.5+cos(angle)*0.5;
        y=0.5+sin(angle)*0.5;
        texCoods.push_back(ofVec2f(x, y));
    }
    
    circle.addVertices(vertices);
    circle.addTexCoords(texCoods);
}
Пример #23
0
//--------------------------------------------------------------
void ofApp::setup(){
    ofSetFrameRate(60);
    ofSetVerticalSync(true);
    ofSetLogLevel(OF_LOG_VERBOSE);
    mode=false;
    leap.open();
    ofEnableNormalizedTexCoords();
    cam.setOrientation(ofPoint(-20, 0, 0));
    
    glEnable(GL_DEPTH_TEST);
    glEnable(GL_NORMALIZE);
    
    font.loadFont("SS_Adec2.0_main.otf", 150);
    
    ofHideCursor();
    ofToggleFullscreen();
    image[0].loadImage("apple.png");
    image[1].loadImage("orange.png");
}
Пример #24
0
//--------------------------------------------------------------
void testApp::setup() {
	mode = E_MODE_CAMERA;
	ofBackground(0, 0, 0);
	ofSetWindowShape(1280, 800);
	
	// モデルの設定
	modelBase.setPosition(0.0f, -0.204f, -0.272324f);
	// modelBase.setOrientation(ofVec3f(310.7939f, 0.0f, 0.0f));
	modelBase.setScale(0.01f);
	modelBase.tilt(49.2f);
	
	model.setScaleNomalization(false);
	model.loadModel("cube11.obj");
	
	for (int i = 0; i < model.getNumMeshes(); i++) {
		vertsNum += model.getMesh(i).getNumVertices();
	}
	
	ofEnableNormalizedTexCoords();
	
	// プロジェクターとカメラの設定
	projectorBase.setPosition(-0.05585744f, 0.0265303f, 0.02653026f);
	//projectorBase.setOrientation(ofVec3f(45.0f, 0.0f, 0.0f));
	projectorBase.tilt(-45.0f);
	
	projector.setParent(projectorBase);
	projector.setPosition(-0.0455f, 0.0f, -0.0375f);
	//projector.setOrientation(ofVec3f(349.177f, 0.0f, 0.0f));
	
	camera.setParent(projectorBase);
	camera.setPosition(0.0f, 0.075f, -0.0575f);
	// camera.setOrientation(ofVec3f(11.0f, 0.0f, 0.0f));
	// camera.tilt(-11.0f);
	
	// エディタ用カメラ
	editorCamera.setDistance(1.0f);
	editorCamera.setNearClip(0.01f);
	editorCamera.setFarClip(100.0f);
	
	// コンテンツ
	currentContent.reset(new LevelMeterContent());
}
Пример #25
0
void Mask::setup(){

    if (maskFbo.getWidth() != width || maskFbo.getHeight() != height)
        maskFbo.allocate(width, height, GL_RGBA32F_ARB);

    maskFbo.begin();
        ofClear(255,255,255,0);
    maskFbo.end();

    if (maskFboImage->isAllocated()) {
        ofDisableAlphaBlending();
        ofDisableNormalizedTexCoords();
        maskFbo.begin();
        ofSetColor(255, 255, 255, 255);
            maskFboImage->draw(0,0, 1024, 768);
        maskFbo.end();
        ofEnableNormalizedTexCoords();
        ofEnableAlphaBlending();
    }
}
Пример #26
0
//--------------------------------------------------------------
void testApp::setup(){	
    ofBackground(0, 0, 0);
    ofSetVerticalSync(true);
    
    ofEnableNormalizedTexCoords(); // You can also use ARB mode
    
    // We will use this flag to decide if we should draw the GUI or not
    drawGui = true;    
    
    // Pointer to the texture
    testcard.loadImage("testcard.png");
    ofTexture * texture = &(testcard.getTextureReference());
    
    // Size of the quad you will work with
    ofVec2f size(texture->getWidth(), texture->getHeight());
    
    // Subsection of the texture coordinates
    ofRectangle subsection(ofPoint(0.5,0.0), ofPoint(1.0,1.0));
    
    // Inital perspective corners position (order is top-left, top-right, bottom-left, bottom-right).
    // In this example, let's position it agains the right corner of the screen.
    ofPoint corners[4];
    corners[0].x = ofGetWidth() - texture->getWidth();
    corners[0].y = 0;
    corners[1].x = ofGetWidth();
    corners[1].y = 0;
    corners[2].x = ofGetWidth();
    corners[2].y = texture->getHeight();
    corners[3].x = ofGetWidth() - texture->getWidth();
    corners[3].y = texture->getHeight();
    
    // Name for the controller panel (this will also define the folder in which the data will be saved)
    string name = "Warp/Blend";
    
    // Size of the GUI elements (a wider GUI gives your more precision to control the texture coordinates)
    float guiWidth = 250;
    float guiHeight = 15;
    
    // Setup!
    controller.setup(texture, size, subsection, corners, name, guiWidth, guiHeight);
}
Пример #27
0
//--------------------------------------------------------------
void ofxBerkelium::draw(float x, float y, float w, float h) {

	glEnable(GL_TEXTURE_2D);
	glBindTexture(GL_TEXTURE_2D, web_texture);
	
	ofPushMatrix();
		ofTranslate(x, y);
		ofEnableNormalizedTexCoords();
			glBegin(GL_QUADS);
				glTexCoord2f(0.f, 0.f); glVertex3f(0, 0, 0.f);
				glTexCoord2f(0.f, 1.f); glVertex3f(0, h, 0.f);
				glTexCoord2f(1.f, 1.f); glVertex3f(w,  h, 0.f);
				glTexCoord2f(1.f, 0.f); glVertex3f(w, 0, 0.f);
			glEnd();
		ofDisableNormalizedTexCoords();
	ofPopMatrix();

	glBindTexture(GL_TEXTURE_2D, 0);
	glDisable(GL_TEXTURE_2D);
	
}
Пример #28
0
//--------------------------------------------------------------
void ofApp::draw(){
	ofBackground(0);
	
	cam.begin();
	ofScale(1, -1, -1);
	ofTranslate(-center);
	
	ofEnableNormalizedTexCoords();
	for( int i = 0; i < meshes.size(); i++ ) {
		textures.at(i).bind();
		if(ofGetKeyPressed('w'))
			meshes.at(i).drawWireframe();
		else
			meshes.at(i).draw();
		textures.at(i).unbind();
	}
	ofSetColor(255);
	meshResidual.drawVertices();
	
	cam.end();
}
Пример #29
0
void testApp::setup(){
	ofEnableAlphaBlending();
	ofBackground(255,255,255);
	ofSetVerticalSync(true);
	
	camWidth 		= 640;
	camHeight 		= 480;
	
	ptamm.init(camWidth,camHeight);
	
	grabber.initGrabber(camWidth,camHeight);
    video.allocate(camWidth, camHeight, OF_IMAGE_COLOR);
    
    // ofBox uses texture coordinates from 0-1, so you can load whatever
	// sized images you want and still use them to texture your box
	// but we have to explicitly normalize our tex coords here
	ofEnableNormalizedTexCoords();
	
	// loads the OF logo from disk
    logo.loadImage("of.png");
}
Пример #30
0
//--------------------------------------------------------------
void ofApp::draw() {
    
    //----------------------------------------------------------
    gui.draw();
    
    int w = 512;
    int h = 256;
    int x = 20;
    int y = ofGetHeight() - h - 20;
    fftLive.draw(x, y, w, h);
    
    //----------------------------------------------------------
    ofEnableDepthTest();
    
    float dist = 400;
    camera.setDistance(dist);
    camera.begin();
    
    if(bUseTexture == true) {
        ofEnableNormalizedTexCoords();
        meshTexture.bind();
    }
    
    ofSetColor(ofColor::white);
    meshWarped.drawFaces();
    
    if(bUseTexture == true) {
        meshTexture.unbind();
        ofDisableNormalizedTexCoords();
    }
    
    ofSetColor(ofColor::black);
    meshWarped.drawWireframe();
    
    camera.end();
    
    ofDisableDepthTest();
    ofSetColor(ofColor::white);
    
}