//-------------------------------------------------------------- void testApp::update(){ //we change the background color here based on the values //affected by the mouse position ofBackground((int)red,(int)green,(int)blue); }
void ofApp::drawScene(int sceneIndex, bool flush) { sceneBuffer.begin(); // Enable depth testing. ofSetDepthTest(true); ofBackground(0, 0, 0); if (sceneIndex == 1) { // Draw ship. ofVec3f finalPosition = modelPosition + 10.f * modelPerturbation; ofPushMatrix(); ofTranslate(finalPosition.x, finalPosition.y, finalPosition.z); ofRotate(modelRotation.x, 0, 0, 1); ofSetColor(255, 0, 255, 255); ship.draw(); ofSetColor(255, 255, 255, 255); ofPopMatrix(); // Draw tunnel chunks. static int count = 0; float frequency = 0.01; float red = sin(frequency * count + 0) * 127 + 128; float green = sin(frequency * count + 2) * 127 + 128; float blue = sin(frequency * count + 4) * 127 + 128; ofColor tunnelColor(red, green, blue); for (int i = 0; i < tunnelChunks.size(); i++) { ofPushMatrix(); ofTranslate(0, 0, -5000.f * tunnelChunks[i].getAgePercent() + 200.f); ofColor color = tunnelColor * (1.f - tunnelChunks[i].getAgePercent()); ofSetColor(color); tunnelChunks[i].draw(); ofSetColor(255, 255, 255, 255); ofPopMatrix(); } count++; // Draw road chunks. for (int i = 0; i < roadChunks.size(); i++) { ofPushMatrix(); ofTranslate(0, 0, -5000.f * roadChunks[i].getAgePercent() + 200.f); if (flush) { ofEnableAlphaBlending(); ofSetColor(0, 200.f * (1.f - roadChunks[i].getAgePercent()), 255.f * (1.f - roadChunks[i].getAgePercent()), 20); roadChunks[i].setMode(OF_PRIMITIVE_TRIANGLE_FAN); roadChunks[i].draw(); ofSetColor(255, 255, 255, 255); ofDisableAlphaBlending(); } ofSetColor(0, 200.f * (1.f - roadChunks[i].getAgePercent()), 255.f * (1.f - roadChunks[i].getAgePercent()), 255); roadChunks[i].setMode(OF_PRIMITIVE_LINE_LOOP); roadChunks[i].draw(); ofSetColor(255, 255, 255, 255); ofPopMatrix(); } // Draw boxes. for (int i = boxes.size() - 1; i >= 0; i--) { ofPushMatrix(); ofTranslate(0, 0, -5000.f * boxes[i].getAgePercent() + 200.f); if (flush) { ofEnableAlphaBlending(); ofSetColor(255.f * (1.f - boxes[i].getAgePercent()), 200.f * (1.f - boxes[i].getAgePercent()), 0, 20); boxes[i].setMode(OF_PRIMITIVE_TRIANGLE_FAN); boxes[i].draw(); ofSetColor(255, 255, 255, 255); ofDisableAlphaBlending(); } ofSetColor(200.f * (1.f - boxes[i].getAgePercent()), 200.f * (1.f - boxes[i].getAgePercent()), 0, 255); boxes[i].setMode(OF_PRIMITIVE_LINE_LOOP); boxes[i].draw(); ofSetColor(255, 255, 255, 255); ofPopMatrix(); } } else { // Draw instantaneous sound signal. ofSetColor(0, 255, 0, 255); timeMesh.draw(); ofSetColor(255, 255, 255, 255); // Draw Fourier transformed signal. for (int i = 0; i < frequencyMeshes.size(); i++) { ofPushMatrix(); ofTranslate(0, 0, -500.f * (1.f - frequencyMeshes[i].getAgePercent())); ofSetColor(0, 200.f * frequencyMeshes[i].getAgePercent(), 255.f * frequencyMeshes[i].getAgePercent(), 255); frequencyMeshes[i].draw(); ofSetColor(255, 255, 255, 255); ofPopMatrix(); } } // Disable depth testing. ofSetDepthTest(false); sceneBuffer.end(); // Draw contents of framebuffer to output. if (flush) { sceneBuffer.getTextureReference().draw(0, 0, windowWidth, windowHeight); } }
//-------------------------------------------------------------- void ofApp::rebuildScreen() { ofBackground(0); ofLog(OF_LOG_NOTICE, "Rebuilding Scene"); for (int i = 0; i < colorCount; i++) { if (i % 2 == 0) { hue[i] = (int)ofRandom(0, 255); sat[i] = 255; bri[i] = (int)ofRandom(0, 255); } else { hue[i] = 138; sat[i] = (int)ofRandom(0, 255); bri[i] = 255; } } int counter = 0; int rowCount = (int)ofRandom(5, 40); float rowHeight = (float)ofGetHeight() / (float)rowCount; vector<float> parts; parts.resize(20); for (int i = 0; i < rowCount; i++) { int partCount = i + 1; // Since I'm optimizing to use memory with parts // instead of creating everytime a new chunck of memory, // the correct size of the elements generated from // one pass can't be obtained with vector::size(), // that's why I'm using this variable. int partsSize = 1; for (int ii = 0; ii < partCount; ii++) { if (ofRandom(1.0F) < 0.075F) { int fragments = (int)ofRandom(2.0F, 20.0F); partCount = partCount + fragments; for (int iii = 0; iii < fragments; iii++) { parts.push_back(ofRandom(2.0F)); partsSize++; } } else { parts.push_back(ofRandom(2.0F, 20.0F)); } } int W = ofGetWidth(); float sumPartsTotal = 0; for (int ii = 0; ii < partCount; ii++) sumPartsTotal += parts[ii]; float sumPartsNow = 0; for (int ii = 0; ii < partsSize; ii++) { int index = counter % colorCount; ofSetColor(hue[index], sat[index], bri[index]); sumPartsNow += parts[ii]; ofDrawRectangle(ofMap(sumPartsNow, 0, sumPartsTotal, 0, W), rowHeight * i, ofMap(parts[ii], 0, sumPartsTotal, 0, W) * -1, rowHeight); counter++; } } }
//-------------------------------------------------------------- void menu::draw() { ofBackground(ofColor::black); // clear fbos if(bDrawFbos) { ofSetColor(255,255,255); fbo1.begin(); ofBackground(0,0,0); ofEnableAlphaBlending(); fbo1.end(); fbo2.begin(); ofBackground(0,0,0); ofEnableAlphaBlending(); fbo2.end(); fbo3.begin(); ofBackground(0,0,0); ofEnableAlphaBlending(); fbo3.end(); fbo4.begin(); ofBackground(0,0,0); ofEnableAlphaBlending(); fbo4.end(); draw4Forces_fbos(); fbo1.begin(); ofDisableAlphaBlending(); fbo1.end(); fbo2.begin(); ofDisableAlphaBlending(); fbo2.end(); fbo3.begin(); ofDisableAlphaBlending(); fbo3.end(); fbo4.begin(); ofDisableAlphaBlending(); fbo4.end(); fbo1.draw(centroScreen.x, centroScreen.y); fbo2.draw(centroScreen.x-fbo2.getWidth(), centroScreen.y); fbo3.draw(centroScreen.x-fbo3.getWidth(), centroScreen.y-fbo3.getHeight()); fbo4.draw(centroScreen.x, centroScreen.y-fbo4.getHeight()); } else { if(bDraw4Forces) { ofEnableAlphaBlending(); draw4Forces(); ofDisableAlphaBlending(); } else { ofEnableAlphaBlending(); draw2Colors(); ofDisableAlphaBlending(); } } // Botones para TUIO touchElements.draw(); // TUIOS // tuioClient.drawCursors(); // ofPushMatrix(); //colocamos el canvas en su posicion centrada // ofTranslate((ofGetWidth()-W_WIDTH)/2, 0); hands.draw(); // ofPopMatrix(); ofPushStyle(); borde.draw(); ofPopStyle(); ofPushStyle(); string info = ""; info += "Press [c] for circles\n"; info += "Press [b] for blocks\n"; info += "(4) draw 4Forces: "+ofToString(bDraw4Forces)+"\n"; info += "(r) Modo Fuerza Color: "+ofToString(fRed)+"\n"; info += "(f) Modo FBOs: "+ofToString(bDrawFbos)+"\n"; info += "(d) Fuerza Densidad: "+ofToString(swFuerzaDensidad)+"\n"; info += "(m) mousePressed: " + ofToString(isMousePressed)+"\n"; info += "Total Bodies: "+ofToString(box2d.getBodyCount())+"\n"; info += "Total Joints: "+ofToString(box2d.getJointCount())+"\n\n"; info += "FPS: "+ofToString(ofGetFrameRate(), 1)+"\n"; info += "Button Dest" + ofToString(mdisplay.scene); // ofSetHexColor(0x444342); ofSetHexColor(0xAAAAAA); if( bshowdebug) ofDrawBitmapString(info, 30, 30); ofPopStyle(); mdisplay.draw(); }
//-------------------------------------------------------------- void ofApp::setup(){ ofBackground(50); ofSetCircleResolution(50); Alfonzo.setup(); }
//-------------------------------------------------------------- void ofApp::setup(){ counter = 0; vagRounded.load("vag.ttf", 32); ofBackground(50,50,50); }
//-------------------------------------------------------------- void testApp::setup(){ //ofEnableSmoothing(); smurfFloor = new Smurfloor(60, 5); smurfCfg = new SmurfCfg(); sender = new ofxOscSender(); receiver = new ofxOscReceiver(); synthUtils = new FMFUtils(); ofAddListener(tuio.objectAdded,smurfFloor->brain,&SmurfBrain::objectAdded); ofAddListener(tuio.objectRemoved,smurfFloor->brain,&SmurfBrain::objectRemoved); ofAddListener(tuio.objectUpdated,smurfFloor->brain,&SmurfBrain::objectUpdated); ofAddListener(tuio.cursorAdded,smurfFloor->brain,&SmurfBrain::tuioAdded); ofAddListener(tuio.cursorRemoved,smurfFloor->brain,&SmurfBrain::tuioRemoved); ofAddListener(tuio.cursorUpdated,smurfFloor->brain,&SmurfBrain::tuioUpdated); tuio.start(smurfCfg->getTuioPort()); // Init GUI fluidSolver.setDeltaT(FLUID_DEFAULT_DT); gui.addSlider("fluidCellsX", fluidCellsX, 20, 400); gui.addButton("resizeFluid", resizeFluid); gui.addSlider("fs.viscocity", fluidSolver.viscocity, 0.0, 0.0001f, 0.5); gui.addSlider("fs.colorDiffusion", fluidSolver.colorDiffusion, 0.0, 0.1, 0.5); gui.addSlider("fs.fadeSpeed", fluidSolver.fadeSpeed, 0.0, .03, 0.5); gui.addSlider("fs.solverIterations", fluidSolver.solverIterations, 1, 20); gui.addSlider("fd.drawMode", fluidDrawer.drawMode, 0, FLUID_DRAW_MODE_COUNT-1); gui.addToggle("fs.doRGB", fluidSolver.doRGB); gui.addToggle("fs.doVorticityConfinement", fluidSolver.doVorticityConfinement); gui.addToggle("drawFluid", drawFluid); gui.addToggle("drawParticles", drawParticles); gui.addToggle("renderUsingVA", renderUsingVA); gui.addToggle("fs.wrapX", fluidSolver.wrap_x); gui.addToggle("fs.wrapY", fluidSolver.wrap_y); gui.setAutoSave(true); gui.loadFromXML(); ofSetLogLevel(OF_LOG_VERBOSE); // setup fluid stuff fluidSolver.setup(100, 100); fluidSolver.enableRGB(true).setFadeSpeed(0.002).setDeltaT(0.5).setVisc(0.00015).setColorDiffusion(0.2); fluidDrawer.setDrawMode(0); fluidDrawer.setup(&fluidSolver); fluidCellsX = 120; drawFluid = true; drawParticles = false; renderUsingVA = false; ofBackground(33, 33, 76); ofSetVerticalSync(false); windowResized(ofGetWidth(), ofGetHeight()); // force this at start (cos I don't think it is called) resizeFluid = true; //--------------------------------------------------- sender->setup(smurfCfg->getOscSendAddress(), smurfCfg->getOscSendPort() ); receiver->setup(smurfCfg->getOscReceivePort()); // Use config file at some point here // BPM is set to 60 smurfFloor->setOscSender(sender); smurfFloor->setOscReceiver(receiver); smurfFloor->setSmurfCfg(smurfCfg); smurfFloor->brain->loadGestures(); // create a DAC FMFDac *DAC = synthUtils->createDAC(-440.0); smurfFloor->brain->getSmurfs()->push_back(DAC); }
void setup() { ofSetWindowShape(512, 512); // Set the initial window size. ofBackground(0); // Set the background to 0 (black). }
//-------------------------------------------------------------- void ofApp::draw(){ ofBackground(0, 0, 0); ofDrawBitmapStringHighlight("Rot :"+ofToString(view.getRotate()), 10, 500); ofMatrix4x4 headView; headView = tracking.getLastHeadView(transform.getHeadView()); ofMatrix4x4 translate; translate.makeTranslationMatrix(ofVec3f(0.06, 0, 0)); view.makeIdentityMatrix(); view=headView*translate; transform.setMatrix(view); ofSetColor(255, 0, 255); ofDrawBitmapStringHighlight("HeadView: "+ofToString(transform.getHeadView(), 10), 10, 100); ofDrawBitmapStringHighlight("Gyro :" + ofToString(tracking.mTracker.getLastGyro()), 10, 200); ofDrawBitmapStringHighlight("Accel :"+ofToString(tracking.mTracker.getLastAccel()), 10, 300); node.setTransformMatrix(view); cam.setOrientation(node.getOrientationQuat()); // cam.setOrientation(rot*cam.getOrientationQuat()); ofSetColor(255, 0, 255); ofDrawBitmapStringHighlight("HeadView: "+ofToString(transform.getHeadView(), 10), 10, 100); ofDrawBitmapStringHighlight("Gyro :" + ofToString(tracking.mTracker.getLastGyro()), 10, 200); ofDrawBitmapStringHighlight("Accel :"+ofToString(tracking.mTracker.getLastAccel()), 10, 300); ofDrawBitmapStringHighlight("Rot :"+ofToString(view.getRotate()), 10, 400); ofDrawBitmapStringHighlight("Cardboard Camera", ofGetWidth()-ofGetHeight(), ofGetHeight()/2-20); cam.begin(ofRectangle(ofGetWidth()-ofGetHeight(), ofGetHeight()/2, ofGetHeight()/2, ofGetHeight()/2)); ofSetColor(255, 0, 255); ofPushMatrix(); // ofVec3f axis; // float angle; // rot.getRotate(angle, axis); // ofRotate(angle, axis.x, axis.y, axis.z); planet.drawWireframe(); ofSetColor(255, 255, 0); ofDrawBox(50, 0, 0, 10, 10, 10); ofDrawBox(0, 50, 0, 10, 10, 10); ofDrawBox(0, 0, 50, 10, 10, 10); ofDrawBox(0, 50, 50, 10, 10, 10); ofDrawBox(50, 50, 50, 10, 10, 10); ofDrawBox(-50, 0, 0, 10, 10, 10); ofDrawBox(0, -50, 0, 10, 10, 10); ofDrawBox(0, 0, -50, 10, 10, 10); ofDrawBox(0, -50, -50, 10, 10, 10); ofDrawBox(-50, -50, -50, 10, 10, 10); ofPopMatrix(); cam.end(); ofDrawBitmapStringHighlight("EasyCam View", ofGetWidth()-ofGetHeight()/2, ofGetHeight()/2-20); easycam.lookAt(node); easycam.begin(ofRectangle(ofGetWidth()-ofGetHeight()/2, ofGetHeight()/2, ofGetHeight()/2, ofGetHeight()/2)); node.draw(); easycam.end(); easycam.lookAt(cam); easycam.begin(ofRectangle(ofGetWidth()-ofGetHeight()/2, 0, ofGetHeight()/2, ofGetHeight()/2)); cam.draw(); easycam.end(); }
//-------------------------------------------------------------- void testApp::update() { ofBackground(backgroundColor, backgroundColor, backgroundColor); kinect.update(); //box2d.update(); kinectRawImage.setFromPixels(getKinectRGBPixels(), imageWidth, imageHeight, OF_IMAGE_COLOR, true); grayImage.setFromPixels(kinect.getDepthPixels(), imageWidth, imageHeight); if (bLearnBakground == true) { grayBg = grayImage; // the = sign copys the pixels from grayImage into grayBg (operator overloading) bLearnBakground = false; } // take the abs value of the difference between background and incoming and then threshold: grayDiff.absDiff(grayBg, grayImage); grayDiff.blur( blur ); grayDiff.threshold(threshold); contourFinder.blobs.clear(); fingersFound=false; handFound=false; numBlobs = contourFinder.findContours(grayDiff, minArea, maxArea, numConsidered, findHoles); //blobTracker.trackBlobs( contourFinder.blobs ); for(int i=0; i<lineStrips.size(); i++) { lineStrips[i].clear(); } lineStrips.clear(); simpleContours.clear(); for(int i=0; i<contourFinder.blobs.size(); i++) { if (contourFinder.blobs[i].nPts != -1) { int numPoints = contourFinder.blobs[i].nPts; contourReg.clear(); contourSmooth.clear(); contourSimple.clear(); contourReg.assign(numPoints, ofxPoint2f()); contourSmooth.assign(numPoints, ofxPoint2f()); for(int j = 0; j < numPoints; j++){ contourReg[j] = contourFinder.blobs[0].pts[j]; } contourSimp.smooth(contourReg, contourSmooth, smoothPct); contourSimp.simplify(contourSmooth, contourSimple, tolerance); simpleContours.push_back(contourSimple); ofxBox2dLine lineStrip; for (float f = contourSimple.size()-1; f >=0; f--) { lineStrip.addPoint(contourSimple[f].x, imageHeight+contourSimple[f].y); } lineStrip.setWorld(myWorld); lineStrip.bIsLoop = true; lineStrip.createShapeWithOptions(lineFriction, lineRestitution, lineDensity); lineStrips.push_back(lineStrip); } } if (doFingerTracking) { if(numBlobs > 0) { fingersFound=fingerFinder.findFingers(contourFinder.blobs[0]); handFound=fingerFinder.findHands(contourFinder.blobs[0]); if(doFingerFollow) { numFingerPoints = fingerFinder.fingerPoints.size(); for(int i=0; i<boxes.size(); i++) { int randomPoint = ofRandom(0, numFingerPoints); ofxPoint2f point = fingerFinder.fingerPoints[numFingerPoints]; boxes[i].addAttractionPoint(point.x, imageHeight+point.y, strength, minDis); boxes[i].addDamping(damping, damping); } } } } box2d.update(); if (forceGUIDraw) { gui.show(); }else { gui.hide(); } }
//-------------------------------------------------------------- void testApp::setup(){ ofBackground(0); isMove = true; isFullScreen = false; isTargetView = true; ofSetFullscreen(isFullScreen); ofSetFrameRate(30); cout<<"START LOADING PERFUME MEMBERS"<<endl; ofxObjLoader::load("model1.obj", aachan, true); ofxObjLoader::load("model2.obj", nocchi, true); ofxObjLoader::load("model3.obj", kashiyuka, true); cout << " num vertices " << aachan.getNumVertices() << endl; cout << " num texcords " << aachan.getNumTexCoords() << endl; cout << " num normals " << aachan.getNumNormals() << endl; cout << " num indices " << aachan.getNumIndices() << endl; //test mseX = 0; mseY = 0; mouseP.set(0.0f, 0.0f); mouseD.set(0.0f, 0.0f); mouseR.set(0.0f, 0.0f); //camera position //myCam.setDistance(1000); point.set(0.0f, 0.0f, -100.0f); origin.set(0, 1, 0); upVector.set(0.0f, 1.0f, 0.0f); target.set(1.0f, 0.0f, 0.0f); camVec.set(-100.0f, 0.0f, 500.0f); lensoffset.set(0.0f, 0.0f); myCam.setupPerspective(false, 45, 0, 10000, lensoffset); //myCam.lookAt(target, upVector); //myCam.setTarget(origin); //lighting /* myCam.setDistance(1000); //lighting mySun.enable(); mySun.setSpotlight(); mySun.setPosition(-300, 10, 1000); ambtCol = ofFloatColor(0.1, 0.1, 0.3); dffsCol = ofFloatColor(0.8, 0.8, 0.8); spclCol = ofFloatColor(0.9, 0.9, 0.9); mySun.setAmbientColor(ambtCol); mySun.setDiffuseColor(dffsCol); mySun.setSpecularColor(spclCol); */ //mySun.setAmbientColor( //flag initialize isAachan = true; isNOCCHi = true; isKashiyuka = true; //nocchi.enableIndices(); //nocchi.setMode(OF_PRIMITIVE_TRIANGLE_STRIP); glEnable(GL_DEPTH_TEST); cout<<"SET UP FINISHED."<<endl; }
//-------------------------------------------------------------- void testApp::update() { ofBackground(100, 100, 100); kinect.update(); // load grayscale depth image from the kinect source grayImage.setFromPixels(kinect.getDepthPixels(), kinect.width, kinect.height); colorImg.setFromPixels(kinect.getPixels(), kinect.width, kinect.height); colorImg.getCvImage(); // Quick and dirty noise filter on the depth map. Needs work grayImage.dilate(); grayImage.erode(); // If the user pressed spacebar, capture the depth and RGB images and save for later if (bLearnBakground == true) { grayBg = grayImage; bLearnBakground = false; } // Subtract the saved background from the current one grayDiff = grayImage; grayDiff -= grayBg; grayDiff.threshold(1); // anything that is > 1 has changed, so keep it grayDiff *= grayImage; // multiply in the current depth values, to mask it // Copy the filtered depthmap so we can use it for detecting feet footDiff= grayDiff; handDiff=grayDiff; // for feet we want to focus on only the bottom part of the image (et the region of interest to the bottom 180 px) footDiff.setROI(0,300,footDiff.width, footDiff.height/2); handDiff.setROI(0,0,handDiff.width, handDiff.height/2); // cut off anything that is too far away grayDiff.threshold(farThreshold); footDiff.threshold(farThreshold); handDiff.threshold(farThreshold); // since we set ROI, we need to reset it footDiff.resetROI(); handDiff.resetROI(); // also, since ROI was on when we did the above threshold we clear out all pixels that are not fully white //(which ends up being only the upper part of the iamge) footDiff.threshold(nearThreshold); handDiff.threshold(nearThreshold); handDiff.mirror(false,true); footDiff.mirror(false,true); // Find blobs (should be hands and foot) in the filtered depthmap contourFinder.findContours(handDiff, 1000, (kinect.width*kinect.height)/2, 5, false); footContourFinder.findContours(footDiff, 1000, (kinect.width*kinect.height)/2, 5, false); // if at least 2 blobs were detected (presumably 2 hands), figure out // their locations and calculate which way to "move" if (contourFinder.blobs.size() >= 2) { turnR = false; turnL = false; // Find the x,y cord of the center of the first 2 blobs float x1 = contourFinder.blobs[0].centroid.x; float y1 = contourFinder.blobs[0].centroid.y; float x2 = contourFinder.blobs[1].centroid.x; float y2 = contourFinder.blobs[1].centroid.y; // the x1<x2 check is to ensure that p1 is always the rightmost blob (right hand) ofPoint p1(x1<x2 ? x1 : x2,x1<x2 ? y1 : y2, 0); ofPoint p2(x2<x1 ? x1 : x2,x2<x1 ? y1 : y2, 0); if(p1.y > p2.y ) // turning right if righ hand raised { if(!rightDown) // if right is already down, dont send key even again { directionF=1; leftDown = false; rightDown = true; } } else if(p1.y < p2.y ) // turning left if left hand raised { if(!leftDown) // if left is already down, dont send key even again { directionF=2; rightDown = false; leftDown = true; } } else // hands centered so moving straight { if(leftDown) { directionF=0; leftDown = false; } if(rightDown) { directionF=0; rightDown = false; } } } /* one hand detected >> rotate*/ else if(contourFinder.blobs.size() == 1) { // Find the x,y cord of the center of blob float x1 = contourFinder.blobs[0].centroid.x; float y1 = contourFinder.blobs[0].centroid.y; if(x1 < (handDiff.width/2) ) //left hand up so rotate left { if(!turnL) { directionF = 4; turnL = true; turnR = false; } } else if(x1 > (handDiff.width/2) ) // right hand up so rotate right { if(!turnR) { directionF = 3; turnR = true; turnL = false; } } else { if(turnR) { directionF=0; turnR = false; } if(turnL) { directionF=0; turnL = false; } } } // no hands detected so moving straight else { if(leftDown) { directionF=0; leftDown = false; } if(rightDown) { directionF=0; rightDown = false; } turnR = false; turnL = false; directionF=0; } // if any blob is detected in the foot map, it can be considered a foot if(footContourFinder.blobs.size() >= 1) { // Find the x,y cord of the center of blob float x1 = footContourFinder.blobs[0].centroid.x; //float y1 = contourFinder.blobs[0].centroid.y; if(x1 > (footDiff.width/2) ) //right leg infront { if(!footFDown) //moving foward { footF= 1; footFDown = true; footBDown = false; } } else // left leg infront { if(!footBDown) //moving foward { footF= 3; footBDown = true; footFDown = false; } } footDown = true; } else { //ofBackground(100,100,100); if(footDown) { footF=2; //stop footDown = false; footFDown = false; footBDown = false; } } // update the cv images grayImage.flagImageChanged(); }
//-------------------------------------------------------------- void ofApp::setup(){ #ifdef DEBUG #else ofSetDataPathRoot("../Resources/data"); #endif ofEnableAlphaBlending(); ofEnableDepthTest(); ofSetupScreen(); ofSetFrameRate(60); ofBackground(0); ofDisableArbTex(); ofEnablePointSprites(); processScreenWidth = 1280; processScreenHeight = 512; screenWidth = processScreenWidth; screenHeight = processScreenHeight; figureModel.loadModel("mesh04/mesh04.obj", false); figureModel.setScaleNormalization(false); mesh.setMode(OF_PRIMITIVE_POINTS); mesh = figureModel.getMesh(0); mesh.enableColors(); mesh.enableIndices(); modelIndex = 0; ofVec3f _centerMesh = mesh.getCentroid() + ofVec3f(0, 1.5, 3); numPoint = mesh.getNumVertices(); for (int i=0; i<numPoint; i++) { mesh.setVertex(i, mesh.getVertex(i) - _centerMesh); mesh.addColor(ofColor(255,30)); } cam.setAutoDistance(false); cam.setDistance(10); cam.setFarClip(0); captureW = processScreenWidth; captureH = processScreenHeight; line = 0; for (int i=0; i<514; i++) { sineBuffer[i] = sines[i]; } for (int i=0; i<INITIAL_BUFFER_SIZE ; i++) { outp[i] = 0; } for (int i=0; i<BIT ; i++) { amp[i] = 0; hertzScale[i] = 0; phases[i] = 0; } maxHertz = 6000; spectrum = new SpectrumDrawer( 1, maxHertz ); playerHead = new PlayerHead(); // ofSoundStreamSetup(2, 0, this, SAMPLE_RATE, INITIAL_BUFFER_SIZE, 4); // ??? // soundStream.printDeviceList(); // ofSoundStreamSetup( 2, 0, this, SAMPLE_RATE, INITIAL_BUFFER_SIZE, 4 ); auto devices = soundStream.getMatchingDevices("default"); ofSoundStreamSettings settings; // auto devices = soundStream.getDeviceList(); if (!devices.empty()) { settings.setOutDevice(devices[1]); } settings.setOutListener(this); settings.bufferSize = INITIAL_BUFFER_SIZE; settings.sampleRate = SAMPLE_RATE; settings.numInputChannels = 0; settings.numOutputChannels = 2; soundStream.setup(settings); gui.setup(); guiSetting(); // imageFormat.addListener(this, &ofApp::imageFormatButtonClick); errorLength.addListener(this, &ofApp::errorLengthChanged); texScreen.allocate(captureW, captureH, GL_RGB); captureImage.allocate(captureW, captureH, OF_IMAGE_COLOR); texProcessScreen.allocate(captureW, captureH, GL_RGB); captureProcessImage.allocate(captureW, captureH, OF_IMAGE_COLOR); originalFbo.allocate(captureW, captureH, GL_RGB); processingImagFbo.allocate(captureW, captureH, GL_RGB); bImageCapture = true; bGuiView = true; imageProcessCapture = false; bImageProcess = false; bImageProcessView = false; }
void testApp::setup(){ ofBackground(0, 0, 0); // 指定したIPアドレスとポート番号でサーバーに接続 sender.setup(HOST, PORT); }
//-------------------------------------------------------------- void ofApp::draw(){ ofBackground(ofColor::yellow); obj.draw(); }
//-------------------------------------------------------------- void ofApp::setup(){ counter = 0; ofBackground(255,255,255); ofSetFrameRate(60); // if vertical sync is off, we can go a bit fast... this caps the framerate at 60fps. }
//-------------------------------------------------------------- void ofApp::setup(){ ofBackground(ofColor::beige); ofEnableSmoothing(); v.setup(ofGetWidth()/2, ofGetHeight()/2); }
//---------------------------------------------------------- void ofBackground(int brightness, int alpha){ ofBackground(brightness, brightness, brightness, alpha); }
//-------------------------------------------------------------- void testApp::update(){ ofBackground(33, 33, 76); smurfFloor->think(); if(resizeFluid) { fluidSolver.setSize(fluidCellsX, fluidCellsX / window.aspectRatio); fluidDrawer.setup(&fluidSolver); resizeFluid = false; } tuio.getMessage(); ///------------------------------------------------------ windowResized(ofGetWidth(), ofGetHeight()); // force this at start (cos I don't think it is called) // do finger stuff list<ofxTuioCursor*>cursorList = tuio.getTuioCursors(); for(list<ofxTuioCursor*>::iterator it=cursorList.begin(); it != cursorList.end(); it++) { ofxTuioCursor *tcur = (*it); float vx = tcur->getXSpeed() * tuioCursorSpeedMult; float vy = tcur->getYSpeed() * tuioCursorSpeedMult; if(vx == 0 && vy == 0) { vx = ofRandom(-tuioStationaryForce, tuioStationaryForce); vy = ofRandom(-tuioStationaryForce, tuioStationaryForce); } addToFluid(tcur->getX(), tcur->getY(), vx, vy, true); } for (int i=0; i<smurfFloor->brain->getSmurfs()->size(); i++ ) { Smurf* s = smurfFloor->brain->getSmurfs()->at(i); ofxTuioObject *blob = (ofxTuioObject*) s->getTuio(); //addToFluid(blob->getX(), blob->getY(), blob->getMotionSpeed(), blob->getMotionSpeed(), false); addToFluid(blob->getX(), blob->getY(), blob->getRotationAccel(), blob->getRotationAccel(), true); } for (int i=0; i<smurfFloor->brain->getConnections()->size(); i++ ) { SConnection *connection = smurfFloor->brain->getConnections()->at(i); if (connection != NULL) { handleAnchorsUpdate(connection); } } for (int i=0; i<smurfFloor->waves->size(); i++ ) { SWave* w = smurfFloor->waves->at(i); if (w->getFid() != -1) { for (float cc=0.0;cc<15.0;cc++) { float vx = ofRandom(-tuioStationaryForce, tuioStationaryForce); float vy = ofRandom(-tuioStationaryForce, tuioStationaryForce); float x = (w->getX() + cosf(ofDegToRad(cc*24.0))*w->getRadius())/ofGetWidth(); float y = (w->getY() + sinf(ofDegToRad(cc*24.0))*w->getRadius())/ofGetHeight(); //Smurf *s = smurfFloor->brain->findSmurf(w->getFid()); //if (s != NULL && w->getX(),w->getRadius() addToFluid( x, y, 0.001, 0.001, true ); } } } fluidSolver.update(); // save old mouse position (openFrameworks doesn't do this automatically like processing does) pmouseX = mouseX; pmouseY = mouseY; }
//---------------------------------------------------------- void ofBackground(const ofColor & c){ ofBackground ( c.r, c.g, c.b, c.a); }
//-------------------------------------------------------------- void testApp::setup() { // ofSoundStreamListDevices(); ofSetBackgroundAuto(true); ofBackground(0,0,0); bg.loadImage("chrome/bg-dark.png"); bg.getTextureReference().setTextureWrap(GL_REPEAT, GL_REPEAT); #ifdef USE_CV ofSetFrameRate(VIDEO_FPS); #endif glutSetWindowTitle("A tangible interface for sonification of geo-spatial and phenological data at multiple time-scales."); font.loadFont("fonts/HelveticaBold.ttf", 12); font_sm.loadFont("fonts/Helvetica.ttf", 9); #ifdef USE_CV cvGrabber.setDesiredFrameRate(VIDEO_FPS); cvGrabber.initGrabber(VIDEO_SIZE); videoSize.set(VIDEO_SIZE); imRGB.allocate(VIDEO_SIZE); imBW.allocate(VIDEO_SIZE); #endif #ifdef USE_CAMERA cameraGrabber.setDesiredFrameRate(24); cameraGrabber.initGrabber(CAMERA_SIZE); cameraImage.allocate(CAMERA_SIZE, OF_IMAGE_COLOR); #endif #ifdef USE_FIDUCIAL_TRACKER fiducials.videoSize.x = videoSize.x; fiducials.videoSize.y = videoSize.y; fiducials.imBW = &imBW; fiducials.setup(); ofAddListener(fiducials.newFrame, this, &testApp::processFiducials); #endif #ifdef USE_SONIFICATION_ENGINE soundEngine.instruments[4*2] = "wav/flute"; soundEngine.instruments[1*2] = "wav/clarinet"; // soundEngine.instruments[3*2] = "wav/marimba"; // soundEngine.instruments[4*2] = "wav/flute"; soundEngine.instruments[5*2] = "wav/french_horn"; // soundEngine.instruments[6*2] = "wav/bass"; // soundEngine.instruments[3*2] = "wav/english_horn"; // soundEngine.instruments[8*2] = "wav/piano"; // soundEngine.instruments[9*2] = "wav/tuba"; #ifdef USE_GEO_DATA soundEngine.geoData = &geoData; #endif #endif #ifdef USE_GUI gui.setup(); <<<<<<< HEAD:ofSoniphenology/src/testApp.cpp
//-------------------------------------------------------------- void ofApp::setup(){ // ofSetBackgroundAuto(false); ofBackground(ofColor::wheat); ofSetRectMode(OF_RECTMODE_CENTER); ofSetFrameRate(1); }
void testApp::setup(){ //画面基本設定 ofBackground(0); ofSetFrameRate(60); ofSetVerticalSync(true); ofSetCircleResolution(64); ofEnableBlendMode(OF_BLENDMODE_ADD); ofHideCursor(); //FFTのサイズとバッファサイズを設定 fft_size = 1024; buffer_size = fft_size * 2; //FFTサイズにあわせて出力結果の配列を左右2ch分準備 audio_input = new float[buffer_size]; magnitude = new float[fft_size]; phase = new float[fft_size]; power = new float[fft_size]; // Setup light light.setPosition(1000, 1000, 2000); cam.setDistance(500); cam.disableMouseInput(); // Setup post-processing chain post.init(ofGetWidth(), ofGetHeight()); //post.createPass<FxaaPass>()->setEnabled(false); post.createPass<BloomPass>()->setEnabled(true); //post.createPass<DofPass>()->setEnabled(false); //post.createPass<KaleidoscopePass>()->setEnabled(false); //post.createPass<NoiseWarpPass>()->setEnabled(false); //post.createPass<PixelatePass>()->setEnabled(false); //post.createPass<EdgePass>()->setEnabled(false); //GUI setup /* gui.setup("control panel test", 0, 0, 340, 400); gui.addPanel("panel 1", 1); gui.addSlider("Audio Level", "audio_level", 1.0, 0.0, 10.0, false); gui.loadSettings("controlPanel.xml"); gui.hide(); */ mode = 0; fnwr = new Fnwr(); //fftRotate = new FFTRotate(); fftCircle = new FFTCircle(); fftSinewave = new FFTSinewave(); fftBox = new FFTBox(); //fftParticle = new FFTParticle(); fftGlitch = new FFTGlitch(); fftFnwrGlitch = new FFTFnwrGlitch(); fftColor = new FFTColor(); fftGrid = new FFTGrid(); fftFnwrNoise = new FFTFnwrNoise(); fftTracer = new FFTTracer(); //ofSoundStreamListDevices(); //Sound stream audioLevel = 1.0f; ofSoundStreamSetup(0, 1, this, 44100, buffer_size, 2); }
//-------------------------------------------------------------- void testApp::update(){ ofBackground(255,255,255); }
//-------------------------------------------------------------- void ofApp::setup(){ ofBackground(ofColor::black); image.allocate(ofGetWidth(), ofGetHeight(), OF_IMAGE_GRAYSCALE); imageData = image.getPixels(); }
//-------------------------------------------------------------- void testApp::setup() { ofSetWindowTitle("DevArt"); ofBackground(255, 255, 255); //Syphon Setup float xInit = OFX_UI_GLOBAL_WIDGET_SPACING; float length = 320; gui = new ofxUICanvas(); gui->addLabel("Syphon Clients", OFX_UI_FONT_LARGE); vector<string> names; names.push_back("No Clients"); gui->autoSizeToFitWidgets(); ddl = gui->addDropDownList("Syphon Clients", names); ddl->setLabelText("No Clients"); ddl->setAllowMultiple(false); ddl->setAutoClose(true); ddl->setShowCurrentSelected(true); //gui->setTheme(OFX_UI_THEME_BARBIE); gui->autoSizeToFitWidgets(); gui->setDrawWidgetPadding(true); ofAddListener(gui->newGUIEvent, this, &testApp::guiEvent); //setup our directory dir.setup(); //setup our client client.setup(); //register for our directory's callbacks ofAddListener(dir.events.serverAnnounced, this, &testApp::serverAnnounced); // not yet implemented //ofAddListener(dir.events.serverUpdated, this, &testApp::serverUpdated); ofAddListener(dir.events.serverRetired, this, &testApp::serverRetired); dirIdx = -1; selectID = -1; serverName = ""; appName = ""; cameraRotation.set(0); zoom = -500; zoomTarget = 100; for (int i = 0; i < matrixNum; i++) { matrix[i] = new ledMatrix(0, 0, i * -40); } matrixSelect = 0; for (int i = 0; i < matrixNum; i++) { mState[i] = false; } for (int i = 0; i < matrixNum; i++) { matrix[i]->matrixOff(); } syphonFBO.allocate(400, 400); ofSetSmoothLighting(true); pointLight.setDiffuseColor( ofFloatColor(.85, .85, .55) ); pointLight.setSpecularColor( ofFloatColor(1.f, 1.f, 1.f)); // shininess is a value between 0 - 128, 128 being the most shiny // material.setShininess( 120 ); // the light highlight of the material // material.setSpecularColor(ofColor(255, 255, 255, 255)); }
//-------------------------------------------------------------- void testApp::draw(){ ofBackground(0); grayScott.draw(); }
//-------------------------------------------------------------- void ofApp::setup(){ ofBackground( 0, 0, 0 ); img.loadImage("IMG_6695.JPG"); }
//-------------------------------------------------------------- void ofApp::update(){ ofBackground(100,100,100); bool bNewFrame = false; #ifdef _USE_LIVE_VIDEO vidGrabber.update(); bNewFrame = vidGrabber.isFrameNew(); #else if(mNextFrame) { vidPlayer.update(); bNewFrame = vidPlayer.isFrameNew(); } #endif if (bNewFrame){ #ifdef _USE_LIVE_VIDEO colorImg.setFromPixels(vidGrabber.getPixels()); #else colorImg.setFromPixels(vidPlayer.getPixels()); #endif grayImage = colorImg; if (bLearnBakground == true){ grayBg = grayImage; // the = sign copys the pixels from grayImage into grayBg (operator overloading) bLearnBakground = false; } // take the abs value of the difference between background and incoming and then threshold: grayDiff.absDiff(grayBg, grayImage); grayDiff.threshold(Settings::sWhiteThreshold); // find contours which are between the size of 20 pixels and 1/3 the w*h pixels. // also, find holes is set to true so we will get interior contours as well.... contourFinder.findContours(grayDiff, 20, (340*240)/3, 10, false, false); // find holes for(int i = 0; i < mRecordObjects.size(); i++) { mRecordObjects[i]->startNewFrame(); } mBlobMapper.clear(); for (int i = 0; i < contourFinder.nBlobs; i++){ int lIndexClosest; getClosest(i, lIndexClosest); mBlobMapper.push_back(lIndexClosest); if(lIndexClosest >= 0) { mRecordObjects[lIndexClosest]->addNewMatch(contourFinder.blobs[i]); } } for(int i = 0; i < mRecordObjects.size(); i++) { mRecordObjects[i]->digestFrame(); if(mRecordObjects[i]->wantToSendMessage()) { ofxOscMessage lMess = mRecordObjects[i]->getOSCMessage(); sender.sendMessage(lMess, false); } } } }
//-------------------------------------------------------------- void testApp::setup(){ ofBackground(255,255,255); //----------- //the string is printed at the top of the app //to give the user some feedback message = "loading mySettings.xml"; //we load our settings file //if it doesn't exist we can still make one //by hitting the 's' key if( XML.loadFile("mySettings.xml") ){ message = "mySettings.xml loaded!"; }else{ message = "unable to load mySettings.xml check data/ folder"; } //read the colors from XML //if the settings file doesn't exist we assigns default values (170, 190, 240) red = XML.getValue("BACKGROUND:COLOR:RED", 170); green = XML.getValue("BACKGROUND:COLOR:GREEN", 190); blue = XML.getValue("BACKGROUND:COLOR:BLUE", 240); /* "BACKGROUND:COLOR:RED" referes to a structure like this: <BACKGROUND> <COLOR> <RED>101.103516</RED> </COLOR> </BACKGROUND> */ //we initalize some of our variables lastTagNumber = 0; pointCount = 0; lineCount = 0; //------- //this is a more advanced use of ofXMLSettings //we are going to be reading multiple tags with the same name //lets see how many <STROKE> </STROKE> tags there are in the xml file int numDragTags = XML.getNumTags("STROKE:PT"); //if there is at least one <STROKE> tag we can read the list of points //and then try and draw it as a line on the screen if(numDragTags > 0){ //we push into the last STROKE tag //this temporarirly treats the tag as //the document root. XML.pushTag("STROKE", numDragTags-1); //we see how many points we have stored in <PT> tags int numPtTags = XML.getNumTags("PT"); if(numPtTags > 0){ //We then read those x y values into our //array - so that we can then draw the points as //a line on the screen //we have only allocated a certan amount of space for our array //so we don't want to read more than that amount of points int totalToRead = MIN(numPtTags, NUM_PTS); for(int i = 0; i < totalToRead; i++){ //the last argument of getValue can be used to specify //which tag out of multiple tags you are refering to. int x = XML.getValue("PT:X", 0, i); int y = XML.getValue("PT:Y", 0, i); dragPts[i].set(x, y); pointCount++; } } //this pops us out of the STROKE tag //sets the root back to the xml document XML.popTag(); } //load a monospaced font //which we will use to show part of the xml structure TTF.loadFont("mono.ttf", 7); }