예제 #1
0
ofTexture ofxImageTS::invert(ofImage image){
    
    ofTexture texture;
    ofPixels copy;
    copy.allocate(image.getWidth(), image.getHeight(), OF_PIXELS_RGB);
    texture.allocate(image);
    for(int i = 0; i < image.getPixels().size()-3; i += 3){
        copy[i] = 255 - image.getPixels()[i];
        copy[i+1] = 255 - image.getPixels()[i+1];
        copy[i+2] = 255 - image.getPixels()[i+2];
    }
    texture.loadData(copy);
    return texture;
}
예제 #2
0
ofTexture ofxImageTS::greyScale(ofImage image) {
    
    int avg;
    ofTexture texture;
    ofPixels copy;
    copy.allocate(image.getWidth(), image.getHeight(), OF_PIXELS_RGB);
    texture.allocate(image);
    for(int i = 0; i < image.getPixels().size()-3; i += 3){
        avg = (image.getPixels()[i] + image.getPixels()[i+1] + image.getPixels()[i+2])/3;
        copy[i] = avg;
        copy[i+1] = avg;
        copy[i+2] = avg;
    }
    texture.loadData(copy);
    return texture;
}
예제 #3
0
ofTexture ofxImageTS::tanGB(ofImage image){
    
    ofTexture texture;
    ofPixels copy;
    copy.allocate(image.getWidth(), image.getHeight(), OF_PIXELS_RGB);
    texture.allocate(image);
    float Tan;
    for(int i = 0; i < image.getPixels().size()-3; i += 3){
        Tan = tan(i);
        copy[i] =  image.getPixels()[i];
        copy[i+1] = Tan * image.getPixels()[i+1];
        copy[i+2] = Tan * image.getPixels()[i+2];
    }
    texture.loadData(copy);
    return texture;
}
예제 #4
0
//--------------------------------------------------------------
ofImage ofxContrast::setBrightness(ofImage& _img, float brightnessAmount){
    ofxCvColorImage cvimg;
    cvimg.allocate(_img.width, _img.height);
    cvimg.setFromPixels(_img.getPixels(), _img.width, _img.height);
    
	float brightnessVal = MAX(-127, MIN(127, brightnessAmount));
	
	unsigned char data[ 256 ];
	CvMat * matrix;
	
	matrix = cvCreateMatHeader( 1, 256, CV_8UC1 );
    cvSetData( matrix, data, 0 );
	
	for( int i=0; i<256; i++ ) {
		int value = cvRound( i+brightnessVal );
		data[i]	= (unsigned char) min( max(0,value), 255 );
	}
	
    cvLUT( cvimg.getCvImage(), cvimg.getCvImage(), matrix );
	cvReleaseMat( &matrix );
    
    ofImage ofimg;
    ofimg.allocate(_img.width, _img.height, OF_IMAGE_COLOR);
    ofimg.setFromPixels(cvimg.getPixels(), _img.width, _img.height, OF_IMAGE_COLOR);
	return ofimg;
}
예제 #5
0
ofTexture ofxImageTS::noise(ofImage image, float mix) {
    
    float avg;
    ofTexture texture;
    ofPixels copy;
    copy.allocate(image.getWidth(), image.getHeight(), OF_PIXELS_RGB);
    texture.allocate(image);
    for(int i = 0; i < image.getPixels().size()-3; i += 3){
        avg = (image.getPixels()[i] + image.getPixels()[i+1] + image.getPixels()[i+2])/3.0f;
        copy[i] = avg * (125 - avg) * mix;
        copy[i+1] = avg * (125 - avg) * mix;
        copy[i+2] = avg * (125 - avg) * mix;
    }
    texture.loadData(copy);
    return texture;
}
void ofxQTVideoSaver::addFrame(ofImage newImg){

    // downscale if dims dont match
    if(newImg.width != vidOutputWidth || newImg.height != vidOutputHeight){
        // resize image before output
        newImg.resize(vidOutputWidth, vidOutputHeight);
    }

    video.addFrame(newImg.getPixels(), 1.0f / vidFrameRate);
    numRecordedFrames++;

    if(numRecordedFrames % videoFrameLength == 0 ) {

        numRecordedFrames = 1;
        string vidName = vidDir + "/"+ vidNamePrefix + ofToString(curVideoNum) +".mov";

        printf("save curVideoNum  \n");

        // Save the movie to disk
        video.finishMovie();
        video.setup(vidOutputWidth,vidOutputHeight,vidName);

        curVideoNum++;

        if(curVideoNum > maxVideos) {
            /* clean up code should start to run here*/
            /* this class will infinitly create mov files so you might want to delete some
            after a few have been made
            */
        };


    }

}
예제 #7
0
string ofxBaseGui::saveStencilToHex(ofImage& img) {
	stringstream strm;
	int width = img.getWidth();
	int height = img.getHeight();
	int n = width * height;
	unsigned char cur = 0;
	int shift = 0;
	strm << "{";
	for(int i = 0; i < n;) {
		if(img.getPixels()[i * 4 + 3] > 0) {
			cur |= 1 << shift;
		}
		i++;
		if(i % 8 == 0) {
			strm << "0x" << hex << (unsigned int) cur;
			cur = 0;
			shift = 0;
			if(i < n) {
				strm << ",";
			}
		} else {
			shift++;
		}
	}
	strm << "}";
	return strm.str();
}
예제 #8
0
ofTexture ofxImageTS::sinusoidal(ofImage image){
    
    ofTexture texture;
    ofPixels copy;
    copy.allocate(image.getWidth(), image.getHeight(), OF_PIXELS_RGB);
    texture.allocate(image);
    float Sin;
    for(int i = 0; i < image.getPixels().size()-3; i += 3){
        Sin = (sin(i) + 1)/2.0f;
        copy[i] = Sin * image.getPixels()[i];
        copy[i+1] = Sin *  image.getPixels()[i+1];
        copy[i+2] = Sin *  image.getPixels()[i+2];
    }
    texture.loadData(copy);
    return texture;
}
예제 #9
0
// image2data converts an image to OctoWS2811's raw data format.
// The number of vertical pixels in the image must be a multiple
// of 8.  The data array must be the proper size for the image.
//--------------------------------------------------------------
void ofxTeensyOcto::image2data(ofImage image, unsigned char* data, bool layout)
{    
    int offset = 3;
    int x, y, xbegin, xend, xinc, mask;
    int linesPerPin = image.getHeight() / 8;
    int* pixel = new int[8];
    
    // get the copied image pixels
    pixels2 = image.getPixels();

    // 2d array of our pixel colors
    for (int x = 0; x < ledWidth; x++)
    {
        for (int y = 0; y < (ledHeight * stripsPerPort * numPortsMain); y++)
        {
            int loc = x + y * ledWidth;
            colors[loc] = pixels2.getColor(x, y);
        }
    }
    
    for (y = 0; y < linesPerPin; y++)
    {
        if ((y & 1) == (layout ? 0 : 1))
        {
            // even numbered rows are left to right
            xbegin = 0;
            xend = image.getWidth();
            xinc = 1;
        }
        else
        {
            // odd numbered rows are right to left
            xbegin = image.getWidth() - 1;
            xend = -1;
            xinc = -1;
        }
        
        for (x = xbegin; x != xend; x += xinc)
        {
            for (int i=0; i < 8; i++)
            {
                int temploc = x + (y + linesPerPin * i) * image.getWidth();
                pixel[i] = colors[temploc].getHex();
                pixel[i] = colorWiring(pixel[i]);
            }
            
            // convert 8 pixels to 24 bytes
            for (mask = 0x800000; mask != 0; mask >>= 1)
            {
                unsigned char b = 0;
                for (int i=0; i < 8; i++)
                {
                    if ((pixel[i] & mask) != 0) b |= (1 << i);
                }
                data[offset++] = b;
            }
        }
    }
}
예제 #10
0
void ofxGifEncoder::addFrame(ofImage & img, float _duration) {

    if(img.width != w || img.height != h) {
        ofLog(OF_LOG_WARNING, "ofxGifEncoder::addFrame image dimensions don't match, skipping frame");
        return;
    }
    
    addFrame(img.getPixels(), w, h, img.bpp,  _duration);
}
예제 #11
0
//-----------------------------------------------------
void ofxSURFTracker::detect(ofImage &img) {

    int inputWidth = img.getWidth();
    int inputHeight = img.getHeight();

    if(inputWidth < width || inputHeight < height) {
        return; // detection impossible, because I can't crop out of this image
    }
    detect(img.getPixels(), inputWidth, inputHeight);
}
예제 #12
0
//--------------------------------------------------------------
void testApp::setup(){

	quadmesh.create(150, 50, 400, 300, 8);

	quadimage.loadImage("1.jpg");
	quadtexture = genTex(quadimage.getWidth(), quadimage.getHeight(), quadimage.getPixels());

	// if use GL_TEXTURE_RECTANGLE, change texture coord from [1,1] to [width, height]
	//quadmesh.ResetTextureCoords(quadimage.getWidth(), quadimage.getHeight());
}
예제 #13
0
//--------------------------------------------------------------
// create a visual representation of the simulation
void Rd::getImage(ofImage & image, const ofColor & c1, const ofColor & c2){
	unsigned char * pixels = image.getPixels();
	for(int indexImg = 0, indexA = 0; indexA < A.size(); indexImg += 3, indexA++){
		ofColor c = c1.getLerped(c2, A[indexA] * A[indexA]);
		pixels[indexImg] = c.r;
		pixels[indexImg + 1] = c.b;
		pixels[indexImg + 2] = c.g;
	}
	image.update();
}
예제 #14
0
void ofApp::update() {
	step();
	
	unsigned char* pixels = buffer.getPixels();
	int n = num * num;
  for (int i = 0; i < n; i++) {
		pixels[i] = 128 + 100*grid[i];
  }	
	buffer.update();
}
예제 #15
0
void ofxLibdc::getOneShot(ofImage& img) {
	setTransmit(false);
	flush();
	dc1394_video_set_one_shot(camera, DC1394_ON);
	dc1394video_frame_t *frame;
	dc1394_capture_dequeue(camera, DC1394_CAPTURE_POLICY_WAIT, &frame);
	img.allocate(width, height, imageType);
	if(imageType == OF_IMAGE_GRAYSCALE) {
		memcpy(img.getPixels(), frame->image, width * height);
	} else if(imageType == OF_IMAGE_COLOR) {
		// this shouldn't be reallocated every frame!
		dc1394video_frame_t* rgbFrame = (dc1394video_frame_t*) calloc(1, sizeof(dc1394video_frame_t));
    rgbFrame->color_coding = DC1394_COLOR_CODING_RGB8;
    dc1394_convert_frames(frame, rgbFrame);
		memcpy(img.getPixels(), rgbFrame->image, 3 * width * height);
		free(rgbFrame);
	}
	img.setFromPixels(frame->image, width, height, imageType);
	dc1394_capture_enqueue(camera, frame);
}
예제 #16
0
	void synthImage()
	{
		const int num_pixels = w * h;
		unsigned char *synth_pixels = synth.getPixels();
		unsigned char *img1_pixels = img1.getPixels();
		
		// amount
		for (int i=0; i<num_pixels; i++)
			avarage_pixel_value += img1_pixels[i];
		
		// calc avarage
		avarage_pixel_value /= num_pixels;
		
		// set to synth image
		for (int i=0; i<num_pixels; i++)
			synth_pixels[i] = avarage_pixel_value;
		
		// update texture
		synth.update();
	}
예제 #17
0
void CirclePacker::setup( int $_id, ofImage& $image, ofColor $targetColor, int $colorRange, float $minRadius ) {
	_iw					= $image.width;
	_ih					= $image.height;
	_id					= $_id;
	init();
	_pixels				= $image.getPixels();
	_targetColor		= $targetColor;
	_colorRange			= $colorRange;
	_minRadius			= $minRadius;
	_bSetup				= true;
}
예제 #18
0
	int countBlackPixels(ofImage &img) {
		int retval = 0;
		
		for(int i = 0; i < img.height*img.width*3; i+=3){
			if(img.getPixels()[i]==0) {
				retval++;
			}
		}
		
		return retval;
	}
예제 #19
0
int ofxCvHaarFinder::findHaarObjects(ofImage& input, int minWidth, int minHeight) {
	
	ofxCvGrayscaleImage gray;
	gray.allocate(input.width, input.height);

	if( input.type == OF_IMAGE_COLOR ){
		ofxCvColorImage color;
		color.allocate(input.width, input.height);
		color = input.getPixels();
		gray = color;
	}else if( input.type == OF_IMAGE_GRAYSCALE ){
		gray = input.getPixels();
	}else{
		ofLog(OF_LOG_ERROR, "ofxCvHaarFinder::findHaarObjects doesn't support OF_IMAGE_RGBA ofImage");
		return 0;
	}
	
	return findHaarObjects(gray, minWidth, minHeight);
	
}
// this could also be done with OpenCV, cvResize + CV_INTER_NN
// or even faster by precomputing a remap function
void threadedScanLoader::resize(ofImage& from, ofImage& to, int toWidth, int toHeight) {
	to.allocate(toWidth, toHeight, OF_IMAGE_COLOR_ALPHA);
	unsigned char* fromPixels = from.getPixels();
	unsigned char* toPixels = to.getPixels();
	int toPosition = 0;
	int fromWidth = from.getWidth();
	int fromHeight = from.getHeight();
	for(int toy = 0; toy < toHeight; toy++) {
		int fromy = (toy * fromHeight) / toHeight;
		int fromPosition = fromy * fromWidth;
		for(int tox = 0; tox < toWidth; tox++) {
			int fromx = (tox * fromWidth) / toWidth;
			int cur = (fromPosition + fromx) * 4;
			toPixels[toPosition++] = fromPixels[cur + 0];
			toPixels[toPosition++] = fromPixels[cur + 1];
			toPixels[toPosition++] = fromPixels[cur + 2];
			toPixels[toPosition++] = fromPixels[cur + 3];
		}
	}
}
예제 #21
0
static int getAvgBrightness(ofImage img) {
    int total = 0;
    int width = img.getWidth();
    int height = img.getHeight();
    for(int i = 0; i < width; i++) {
        for(int j = 0; j < height; j++) {
            total += img.getPixels().getColor(i, j).getBrightness();
        }
    }
    return total / (width*height);
}
예제 #22
0
//--------------------------------------------------------------
void testApp::update(){
	unsigned char * pixels = img2.getPixels();
	for(int x = 0;x<600;++x){
		for(int y=0;y<200;++y){
			char brightess = ofMap(ofNoise(x*faktor,y*faktor,ofGetFrameNum()*faktor),0,1,0,255);
			pixels[y*600+x] = brightess;
		}
	}

	unsigned char * rgbPixels = img3.getPixels();
	for(int x = 0;x<600;++x){
		for(int y=0;y<200;++y){
			for(int c=0;c<3;++c){
				rgbPixels[(y*600+x)*3+c] = ofMap(ofNoise(x*faktor,y*faktor,c,ofGetFrameNum()*faktor),0,1,0,255);
			}
		}
	}

	img2.update();
	img3.update();
}
예제 #23
0
static ofPoint getAvgColor(ofImage img) {
    ofPoint total(0,0,0);
    int width = img.getWidth();
    int height = img.getHeight();
    ofPixels pixels = img.getPixels();
    for(int i = 0; i < width; i++) {
        for(int j = 0; j < height; j++) {
            ofColor col = pixels.getColor(i, j);
            total += ofPoint(col.r, col.g, col.b);
        }
    }
    return total / (width*height);
}
예제 #24
0
ofTexture ofxImageTS::dualist(ofImage image, ofColor base, ofColor top) {
    int avg;
    ofTexture texture;
    ofPixels copy;
    copy.allocate(image.getWidth(), image.getHeight(), OF_PIXELS_RGB);
    texture.allocate(image);
    for(int i = 0; i < image.getPixels().size()-3; i += 3){
        avg = (image.getPixels()[i] + image.getPixels()[i+1] + image.getPixels()[i+2])/3;
        if(avg < 128) {
            copy[i] = base.r;
            copy[i+1] = base.g;
            copy[i+2] = base.b;
        }
        else {
            copy[i] = top.r;
            copy[i+1] = top.g;
            copy[i+2] = top.b;
        }
    }
    texture.loadData(copy);
    return texture;
}
예제 #25
0
void ofxDepthImageCompressor::convertTo8BitImage(unsigned short* buf, ofImage& image){
	int nearPlane = 500;
	int farPlane = 7000;
	unsigned char* pix = image.getPixels();
    int stride = image.getPixelsRef().getNumChannels();
	for(int i = 0; i < 640*480; i++){
        //ofMap(buf[i], nearPlane, farPlane, 255, 0, true);
        unsigned char value = buf[i] == 0 ? 0 : 255 - (255 * (buf[i] - nearPlane) ) / farPlane;// + ofMap(buf[i], nearPlane, farPlane, 255, 0, true);
        for(int c = 0; c < stride; c++){
            pix[i*stride+c] = value;
        }
	}
	image.update();
}
예제 #26
0
ofTexture ofxImageTS::whiteBlack(ofImage image) {
    
    int avg;
    ofTexture texture;
    ofPixels copy;
    copy.allocate(image.getWidth(), image.getHeight(), OF_PIXELS_RGB);
    texture.allocate(image);
    for(int i = 0; i < image.getPixels().size()-3; i += 3){
        avg = (image.getPixels()[i] + image.getPixels()[i+1] + image.getPixels()[i+2])/3;
        if(avg >= 128) {
            copy[i] = 254;
            copy[i+1] = 254;
            copy[i+2] = 254;
        }
        else {
            copy[i] = 0;
            copy[i+1] = 0;
            copy[i+2] = 0;
        }
    }
    texture.loadData(copy);
    return texture;
}
예제 #27
0
bool ofxLibdc::grabFrame(ofImage& img) {
	dc1394video_frame_t *frame;
	dc1394_capture_dequeue(camera, capturePolicy, &frame);
	if(frame != NULL) {
		if(imageType == OF_IMAGE_GRAYSCALE) {
			memcpy(img.getPixels(), frame->image, width * height);
		} else if(imageType == OF_IMAGE_COLOR) {
		}
		dc1394_capture_enqueue(camera, frame);
		return true;
	} else {
		return false;
	}
}
예제 #28
0
void ofxLibdc::grabStill(ofImage& img) {
	setTransmit(false);
	flushBuffer();
	dc1394_video_set_one_shot(camera, DC1394_ON);
	// if possible, the following should be replaced with a call to grabFrame
	dc1394video_frame_t *frame;
	dc1394_capture_dequeue(camera, capturePolicy, &frame);
	img.allocate(width, height, imageType);
	if(imageType == OF_IMAGE_GRAYSCALE) {
		memcpy(img.getPixels(), frame->image, width * height);
	} else if(imageType == OF_IMAGE_COLOR) {
	}
	dc1394_capture_enqueue(camera, frame);
}
예제 #29
0
//--------------------------------------------------------------
void captureApp::getClipping(ofImage& img, ofImage& clipping) {
	int w = img.getWidth();
	int h = img.getHeight();
	clipping.allocate(w, h, OF_IMAGE_COLOR_ALPHA);
	unsigned char* imgPixels = img.getPixels();
	unsigned char* clippingPixels = clipping.getPixels();
	int n = w * h;
	for(int i = 0; i < n; i++) {
		if(imgPixels[i*3] == 0 || imgPixels[i*3] == 255 ||
		   imgPixels[i*3+1] == 0 || imgPixels[i*3+1] == 255 ||
		   imgPixels[i*3+2] == 0 || imgPixels[i*3+2] == 255 ) {
			clippingPixels[i * 4 + 0] = 255;
			clippingPixels[i * 4 + 1] = 255;
			clippingPixels[i * 4 + 2] = 255;
			clippingPixels[i * 4 + 3] = 255;
		} else {
			clippingPixels[i * 4 + 0] = 0;
			clippingPixels[i * 4 + 1] = 0;
			clippingPixels[i * 4 + 2] = 0;
			clippingPixels[i * 4 + 3] = 0;
			
		}
	}
}
예제 #30
0
void testApp::makeMasked(ofImage mask, ofImage toBeMasked){
		// if you want blurrier outlines, try smaller numbers, fewer cycles than blurring.
		// mask.resize(40,30);
    mask.resize(80,60);
    mask.resize(WIDTH,HEIGHT);
    
    makeMe.setFromPixels(toBeMasked.getPixels(),WIDTH,HEIGHT,OF_IMAGE_COLOR);
    
    //then draw a quad for the top layer using our composite shader to set the alpha
	maskShader.begin();
        
        //our shader uses two textures, the top layer and the alpha
        //we can load two textures into a shader using the multi texture coordinate extensions
        glActiveTexture(GL_TEXTURE0_ARB);
        makeMe.getTextureReference().bind();
        
        glActiveTexture(GL_TEXTURE1_ARB);
        mask.getTextureReference().bind();
        //draw a quad the size of the frame
        glBegin(GL_QUADS);
            
            glMultiTexCoord2d(GL_TEXTURE0_ARB, 0, 0);
            glMultiTexCoord2d(GL_TEXTURE1_ARB, 0, 0);		
            glVertex2f( 0, 0);
            
            glMultiTexCoord2d(GL_TEXTURE0_ARB, WIDTH, 0);
            glMultiTexCoord2d(GL_TEXTURE1_ARB, WIDTH, 0);		
            glVertex2f( ofGetWidth(), 0);
            
            glMultiTexCoord2d(GL_TEXTURE0_ARB, WIDTH, HEIGHT);
            glMultiTexCoord2d(GL_TEXTURE1_ARB, WIDTH, HEIGHT);		
            glVertex2f( ofGetWidth(), ofGetHeight());
            
            glMultiTexCoord2d(GL_TEXTURE0_ARB, 0, HEIGHT);
            glMultiTexCoord2d(GL_TEXTURE1_ARB, 0, HEIGHT);		
            glVertex2f( 0, ofGetHeight());

        glEnd();
        
        //deactive and clean up
        glActiveTexture(GL_TEXTURE1_ARB);
        mask.getTextureReference().unbind();
        //mask.unbind();
        glActiveTexture(GL_TEXTURE0_ARB);
        makeMe.getTextureReference().unbind();
        
	maskShader.end();
}