//-------------------------------------------------------------- void testApp::setup(){ ofSetFrameRate(60); img.allocate(600,200,OF_IMAGE_GRAYSCALE); img2.allocate(600,200,OF_IMAGE_GRAYSCALE); img3.allocate(600,200,OF_IMAGE_COLOR); unsigned char * pixels = img.getPixels(); for(int x = 0;x<600;++x){ for(int y=0;y<200;++y){ char brightess = ofMap(ofNoise(x*faktor,y*faktor),0,1,0,255); pixels[y*600+x] = brightess; } } img.update(); }
//-------------------------------------------------------------- void testApp::keyPressed (int key){ if(key =='e'){ mode = "edit"; ofSetWindowShape(1280, 900); reSort(); } if( key == 'p' ){ //startRender(); mode = "play-back"; whichVideo = 0; totalFrames = 0; framecounter = 0; nextVideo(); ofSetWindowShape(1024, 576); img.setUseTexture(false); img.allocate(1024, 576, OF_IMAGE_COLOR); } if( key == 'P' ){ //startRender(); mode = "play"; whichVideo = 0; totalFrames = 0; framecounter = 0; nextVideo(); ofSetWindowShape(1024, 576); img.setUseTexture(false); img.allocate(1024, 576, OF_IMAGE_COLOR); } if( key == 'r' ){ mode = "edit"; cout << "random shuffle"<<endl; ofRandomize(thumbs); } if( key == 'l' ){ loadOrder(); } if( key == 'm' ){ mode = "move"; ofSetWindowShape(1280, 900); reSort(); } }
void ofApp::setup() { ofSetFrameRate(60); ofSetVerticalSync(true); ofSetWindowShape(w*2, h); synth.allocate(w, h, OF_IMAGE_GRAYSCALE); loadImage("1.jpg"); }
void testApp::normalizeImage(ofImage& img, ofImage& normalized) { srcTracker.update(toCv(img)); if(srcTracker.getFound()) { drawNormalized(srcTracker, img, srcNormalized); normalized.allocate(normalizedWidth, normalizedHeight, OF_IMAGE_COLOR); srcNormalized.readToPixels(normalized.getPixelsRef()); normalized.update(); } else { ofLogWarning() << "couldn't find the face" << endl; } }
void ofxLibdc::grabStill(ofImage& img) { setTransmit(false); flushBuffer(); dc1394_video_set_one_shot(camera, DC1394_ON); // if possible, the following should be replaced with a call to grabFrame dc1394video_frame_t *frame; dc1394_capture_dequeue(camera, capturePolicy, &frame); img.allocate(width, height, imageType); if(imageType == OF_IMAGE_GRAYSCALE) { memcpy(img.getPixels(), frame->image, width * height); } else if(imageType == OF_IMAGE_COLOR) { } dc1394_capture_enqueue(camera, frame); }
bool ofxLibdc::grabVideo(ofImage& img, bool dropFrames) { setTransmit(true); img.allocate(width, height, imageType); if(dropFrames) { bool remaining; int i = 0; do { remaining = grabFrame(img); if(!remaining && i == 0) return false; i++; } while (remaining); return true; } else { return grabFrame(img); } }
void ofxDepthImageCompressor::convertTo8BitImage(unsigned short* buf, ofImage& image){ int nearPlane = 500; int farPlane = 7000; if(!image.isAllocated()){ image.allocate(640,480,OF_IMAGE_GRAYSCALE); } unsigned char* pix = image.getPixels(); int stride = image.getPixelsRef().getNumChannels(); for(int i = 0; i < 640*480; i++){ //ofMap(buf[i], nearPlane, farPlane, 255, 0, true); unsigned char value = buf[i] == 0 ? 0 : 255 - (255 * (buf[i] - nearPlane) ) / farPlane;// + ofMap(buf[i], nearPlane, farPlane, 255, 0, true); for(int c = 0; c < stride; c++){ pix[i*stride+c] = value; } } image.update(); }
void ofApp::setup() { num=512; int n = num * num; float scale = .009; float base = .008; patterns.clear(); for(int i = 0; i < 6; i++) { int baseSize = (int) powf(2.5, i); patterns.push_back(TuringPattern(num, baseSize, baseSize * 2, log(baseSize) * scale + base)); } grid.resize(n); for (int i=0;i<n;i++) { grid[i]=ofRandom(-1, 1); } buffer.allocate(num, num, OF_IMAGE_GRAYSCALE); }
//-------------------------------------------------------------- void ofApp::setup(){ ofSetFrameRate(15); ofBackground(255); camWidth = 640; // try to grab at this size. camHeight = 480; currentFrame.allocate(camWidth, camHeight, OF_IMAGE_GRAYSCALE); vidGrabber.setVerbose(true); vidGrabber.initGrabber(camWidth,camHeight); font.loadFont("Courier New Bold.ttf", 9); ofEnableAlphaBlending(); }
void setup() { ofSetVerticalSync(true); ofBackground(0); config = ofLoadJson("../../../SharedData/shared/config.json"); float camWidth = config["camera"]["width"]; float camHeight = config["camera"]["height"]; float camFrameRate = config["camera"]["framerate"]; device = config["camera"]["device"]; if (device == "blackmagic") { cam = &blackmagicGrabber; } else { cam = &videoGrabber; } cam->setDesiredFrameRate(camFrameRate); cam->setup(camWidth, camHeight); clipping.allocate(camWidth, camHeight, OF_IMAGE_COLOR_ALPHA); toggleGrayscale = false; updateWindowShape(); }
void ofxLibdc::getOneShot(ofImage& img) { setTransmit(false); flush(); dc1394_video_set_one_shot(camera, DC1394_ON); dc1394video_frame_t *frame; dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame); img.allocate(width, height, imageType); if(imageType == OF_IMAGE_GRAYSCALE) { memcpy(img.getPixels(), frame->image, width * height); } else if(imageType == OF_IMAGE_COLOR) { // this shouldn't be reallocated every frame! dc1394video_frame_t* rgbFrame = (dc1394video_frame_t*) calloc(1, sizeof(dc1394video_frame_t)); rgbFrame->color_coding = DC1394_COLOR_CODING_RGB8; dc1394_convert_frames(frame, rgbFrame); memcpy(img.getPixels(), rgbFrame->image, 3 * width * height); free(rgbFrame); } img.setFromPixels(frame->image, width, height, imageType); dc1394_capture_enqueue(camera, frame); }
// this could also be done with OpenCV, cvResize + CV_INTER_NN // or even faster by precomputing a remap function void threadedScanLoader::resize(ofImage& from, ofImage& to, int toWidth, int toHeight) { to.allocate(toWidth, toHeight, OF_IMAGE_COLOR_ALPHA); unsigned char* fromPixels = from.getPixels(); unsigned char* toPixels = to.getPixels(); int toPosition = 0; int fromWidth = from.getWidth(); int fromHeight = from.getHeight(); for(int toy = 0; toy < toHeight; toy++) { int fromy = (toy * fromHeight) / toHeight; int fromPosition = fromy * fromWidth; for(int tox = 0; tox < toWidth; tox++) { int fromx = (tox * fromWidth) / toWidth; int cur = (fromPosition + fromx) * 4; toPixels[toPosition++] = fromPixels[cur + 0]; toPixels[toPosition++] = fromPixels[cur + 1]; toPixels[toPosition++] = fromPixels[cur + 2]; toPixels[toPosition++] = fromPixels[cur + 3]; } } }
void faceColorToTexture(ofMesh& mesh, ofImage& image) { vector<ofFloatColor> &color = mesh.getColors(); int num_face = color.size() / 3; int tex_size = ofNextPow2(ceil(sqrt(num_face))); bool arb = ofGetUsingArbTex(); ofDisableArbTex(); image.allocate(tex_size, tex_size, OF_IMAGE_COLOR); if (arb) ofEnableArbTex(); mesh.clearTexCoords(); image.getPixelsRef().set(0); float texel_size = (1. / image.getWidth()) * 0.5; for (int i = 0; i < num_face; i++) { int u = (i % tex_size); int v = (i / tex_size); ofColor c = color[i * 3]; image.setColor(u, v, c); float uu = (float)u / image.getWidth() + texel_size; float vv = (float)v / image.getHeight() + texel_size; mesh.addTexCoord(ofVec2f(uu, vv)); mesh.addTexCoord(ofVec2f(uu, vv)); mesh.addTexCoord(ofVec2f(uu, vv)); } image.update(); mesh.clearColors(); }
//-------------------------------------------------------------- void captureApp::getClipping(ofImage& img, ofImage& clipping) { int w = img.getWidth(); int h = img.getHeight(); clipping.allocate(w, h, OF_IMAGE_COLOR_ALPHA); unsigned char* imgPixels = img.getPixels(); unsigned char* clippingPixels = clipping.getPixels(); int n = w * h; for(int i = 0; i < n; i++) { if(imgPixels[i*3] == 0 || imgPixels[i*3] == 255 || imgPixels[i*3+1] == 0 || imgPixels[i*3+1] == 255 || imgPixels[i*3+2] == 0 || imgPixels[i*3+2] == 255 ) { clippingPixels[i * 4 + 0] = 255; clippingPixels[i * 4 + 1] = 255; clippingPixels[i * 4 + 2] = 255; clippingPixels[i * 4 + 3] = 255; } else { clippingPixels[i * 4 + 0] = 0; clippingPixels[i * 4 + 1] = 0; clippingPixels[i * 4 + 2] = 0; clippingPixels[i * 4 + 3] = 0; } } }
// --------------------------------------------------------------------------------------- // void resolutionChanged( int &_res ) { destImage.allocate( _res, _res, OF_IMAGE_COLOR ); }
void resize(ofImage& source, ofImage& destination, float xScale, float yScale, int interpolation) { ofImageType sourceType = source.getPixelsRef().getImageType(); destination.allocate(source.getWidth() * xScale, source.getHeight() * yScale, sourceType); resize(source, destination, interpolation); }
//-------------------------------------------------------------- void ofApp::draw(){ //Merci Ludo pour ton aide currentFrame = vidGrabber.getPixelsRef(); currentFrameCopy.allocate(currentFrame.getWidth(), currentFrame.getHeight(), OF_IMAGE_GRAYSCALE); for(int x=0 ; x < 256 ; x++) { histogram[x] = 0; } for (int i = 0; i < camWidth; i++){ for (int j = 0; j < camHeight; j++){ int lightness = currentFrame.getColor(i,j).getLightness(); histogram[lightness] = histogram[lightness]+1; ofColor pixel; pixel.set(lightness, lightness, lightness); currentFrame.setColor(i, j, pixel); } } ofSetHexColor(0xffffff); currentFrame.reloadTexture(); currentFrame.draw(0,0); ofFill(); ofSetHexColor(0x000000); ofSetPolyMode(OF_POLY_WINDING_ODD); ofLine(770, 400, 770, 400-255); ofLine(770, 400, 770+255, 400); histogramMax = 0; maxIndex = 0; for(int x = 0 ; x < 256 ; x++) { if (histogram[x]>histogramMax) { histogramMax = histogram[x]; maxIndex = x; } histogram[x] = histogram[x]/100; //cout << x << " : " << histogram[x] << "\n"; ofLine(x+770, 400-histogram[x], x+770, 400); } ofSetColor(255,0,0); ofLine(maxIndex+770, 400-histogram[maxIndex], maxIndex+770, 400); ofSetColor(0); ofDrawBitmapString("Histogram : ", 770, 100); ofDrawBitmapString("0 255 ", 770, 415); ofDrawBitmapString("0", 755, 400); ofDrawBitmapString("???", 773, 150); threshold = 128; for(int y = 0; y < camHeight; y++) { for(int x = 0; x < camWidth; x++) { ofColor cur = currentFrame.getColor(x, y); int lightness = cur.getLightness(); ofColor pixel; if (lightness<threshold) pixel.set(0, 0, 0); else pixel.set(255, 255, 255); currentFrameCopy.setColor(x, y, pixel); } } ofSetColor(255); currentFrameCopy.reloadTexture(); currentFrameCopy.draw(0, 480); }