void testApp :: checkForCommonFill ( ofxCvGrayscaleImage& imageOut, ofxCvGrayscaleImage& image1, ofxCvGrayscaleImage& image2 ) { int noPixels; noPixels = imageOut.width * imageOut.height; unsigned char* imageOutPixels; unsigned char* image1Pixels; unsigned char* image2Pixels; imageOutPixels = new unsigned char[ noPixels ]; image1Pixels = image1.getPixels(); image2Pixels = image2.getPixels(); for( int i=0; i<noPixels; i++ ) { if ( image1Pixels[ i ] == 255 && image2Pixels[ i ] == 255 ) { imageOutPixels[ i ] = 255; } else { imageOutPixels[ i ] = 0; } } imageOut.setFromPixels( imageOutPixels, imageOut.width, imageOut.height ); delete[] imageOutPixels; }
//-------------------------------------------------------------- void DepthHoleFiller::performMorphologicalOpen ( ofxCvGrayscaleImage &input){ // Clean up the holes using morphological close. // http://homepages.inf.ed.ac.uk/rbf/HIPR2/open.htm input.erode(); input.dilate(); }
void ofxCvOpticalFlowLK::calc( ofxCvGrayscaleImage & pastImage, ofxCvGrayscaleImage & currentImage, int size ) { cvCalcOpticalFlowLK( pastImage.getCvImage(), currentImage.getCvImage(), cvSize( size, size), vel_x, vel_y ); }
void update() { // Update our little offset thingy. offset += 0.01; if (offset > 1) { offset = 0; } // Update our camera. grabber.update(); // If the camera has a new frame to offer us ... if (grabber.isFrameNew()) { // Make a copy of our grabber pixels in the colorImage. colorImage.setFromPixels(grabber.getPixelsRef()); // When we assign a color image to a grayscale image, it is converted automatically. grayscaleImage = colorImage; // If we set learnBackground to true using the keyboard, we'll take a snapshot of // the background and use it to create a clean foreground image. if (learnBackground == true) { // We assign the grayscaleImage to the grayscaleBackgroundImage. grayscaleBackgroundImage = grayscaleImage; // Now we set learnBakground so we won't set a background unless // explicitly directed to with a keyboard command. learnBackground = false; } // Create a difference image by comparing the background and the current grayscale images. grayscaleAbsoluteDifference.absDiff(grayscaleBackgroundImage, grayscaleImage); // Assign grayscaleAbsoluteDifference to the grayscaleBinary image. grayscaleBinary = grayscaleAbsoluteDifference; // Then threshold the grayscale image to create a binary image. grayscaleBinary.threshold(threshold, invert); // Find contours (blobs) that are between the size of 20 pixels and // 1 / 3 * (width * height) of the camera. Also find holes. contourFinder.findContours(grayscaleBinary, 100, (width * height) / 3.0, 10, true); // Get the biggest blob and use it to draw. if (contourFinder.nBlobs > 0) { holePositions.addVertex(contourFinder.blobs[0].boundingRect.getCenter()); } else { holePositions.clear(); } } }
//-------------------------------------------------------------------------------- void ofxCvGrayscaleImage::absDiff( ofxCvGrayscaleImage& mom ) { if( matchingROI(getROI(), mom.getROI()) ) { cvAbsDiff( cvImage, mom.getCvImage(), cvImageTemp ); swapTemp(); flagImageChanged(); } else { ofLog(OF_LOG_ERROR, "in *=, ROI mismatch"); } }
void ofCvOpticalFlowBM::calc( ofxCvGrayscaleImage & pastImage, ofxCvGrayscaleImage & currentImage, int size ) { cvCalcOpticalFlowBM(pastImage.getCvImage(), currentImage.getCvImage(), block, shift, max_range, 0, vel_x, vel_y); }
//-------------------------------------------------------------------------------- void ofxCvFloatImage::addWeighted( ofxCvGrayscaleImage& mom, float f ) { if( matchingROI(getROI(), mom.getROI()) ) { convertGrayToFloat(mom.getCvImage(), cvImageTemp); cvAddWeighted( cvImageTemp, f, cvImage, 1.0f-f,0, cvImage ); flagImageChanged(); } else { ofLog(OF_LOG_ERROR, "in addWeighted, ROI mismatch"); } }
//-------------------------------------------------------------------------------- void ofxCvColorImage::convertToGrayscalePlanarImages(ofxCvGrayscaleImage& red, ofxCvGrayscaleImage& green, ofxCvGrayscaleImage& blue){ if( red.width == width && red.height == height && green.width == width && green.height == height && blue.width == width && blue.height == height ) { cvCvtPixToPlane(cvImage, red.getCvImage(), green.getCvImage(), blue.getCvImage(), NULL); } else { ofLog(OF_LOG_ERROR, "in convertToGrayscalePlanarImages, images are different sizes"); } }
void testApp::resetRoiMask(ofxCvGrayscaleImage img){ int w = img.getWidth(); int h = img.getHeight(); roiMask[0] = ofPoint(0,0); roiMask[1] = ofPoint(w-1,0); roiMask[2] = ofPoint(w-1,h-1); roiMask[3] = ofPoint(0,h-1); }
//-------------------------------------------------------------------------------- void ofxCvFloatImage::addWeighted( ofxCvGrayscaleImage& mom, float f ) { if( pushSetBothToTheirIntersectionROI(*this,mom) ) { convertGrayToFloat(mom.getCvImage(), cvImageTemp); cvAddWeighted( cvImageTemp, f, cvImage, 1.0f-f,0, cvImage ); popROI(); //restore prevoius ROI mom.popROI(); //restore prevoius ROI flagImageChanged(); } else { ofLog(OF_LOG_ERROR, "in addWeighted, ROI mismatch"); } }
//-------------------------------------------------------------------------------- void ofxCvColorImage::setFromGrayscalePlanarImages( ofxCvGrayscaleImage& red, ofxCvGrayscaleImage& green, ofxCvGrayscaleImage& blue){ if( red.width == width && red.height == height && green.width == width && green.height == height && blue.width == width && blue.height == height ) { cvCvtPlaneToPix(red.getCvImage(), green.getCvImage(), blue.getCvImage(),NULL, cvImage); flagImageChanged(); } else { ofLog(OF_LOG_ERROR, "in setFromGrayscalePlanarImages, images are different sizes"); } }
//-------------------------------------------------------------- void DepthHoleFiller::performMorphologicalOpen ( ofxCvGrayscaleImage &input, int nTimes){ // Clean up the holes using morphological close. // http://homepages.inf.ed.ac.uk/rbf/HIPR2/open.htm for (int i=0; i<nTimes; i++){ input.erode(); } for (int i=0; i<nTimes; i++){ input.dilate(); } }
//-------------------------------------------------------------- void DepthHoleFiller::performMorphologicalClose ( ofxCvGrayscaleImage &input, int diameter){ // Clean up the holes using morphological close. // use a "larger structural element" by repeated passes. // http://homepages.inf.ed.ac.uk/rbf/HIPR2/close.htm for (int i=0; i<diameter; i++){ input.dilate(); } for (int i=0; i<diameter; i++){ input.erode(); } }
void setup() { ofSetWindowShape(width, height); // Set the window size. grabber.initGrabber(width, height); // Set the grabber size. // Allocate each of our helper images. colorImage.allocate(width, height); grayscaleImage.allocate(width, height); grayscaleBackgroundImage.allocate(width, height); grayscaleAbsoluteDifference.allocate(width, height); grayscaleBinary.allocate(width, height); }
//-------------------------------------------------------------------------------- void ofxCvColorImage::convertToGrayscalePlanarImage (ofxCvGrayscaleImage& grayImage, int whichPlane){ if( !bAllocated ){ ofLogError("ofxCvColorImage") << "convertToGrayscalePlanarImage(): image not allocated"; return; } if( !grayImage.bAllocated ){ grayImage.allocate(width, height); } ofRectangle roi = getROI(); ofRectangle grayRoi = grayImage.getROI(); if( grayRoi.width == roi.width && grayRoi.height == roi.height ){ switch (whichPlane){ case 0: cvCvtPixToPlane(cvImage, grayImage.getCvImage(), NULL, NULL, NULL); grayImage.flagImageChanged(); break; case 1: cvCvtPixToPlane(cvImage, NULL, grayImage.getCvImage(), NULL, NULL); grayImage.flagImageChanged(); break; case 2: cvCvtPixToPlane(cvImage, NULL, NULL, grayImage.getCvImage(), NULL); grayImage.flagImageChanged(); break; } } else { ofLogError("ofxCvColorImage") << "convertToGrayscalePlanarImages(): image size or region of interest mismatch"; } }
//-------------------------------------------------------------------------------- void ofxCvShortImage::addWeighted( ofxCvGrayscaleImage& mom, float f ) { if( !bAllocated ){ ofLog(OF_LOG_ERROR, "in addWeighted, image is not allocated"); return; } if( matchingROI(getROI(), mom.getROI()) ) { convertGrayToShort(mom.getCvImage(), cvImageTemp); cvAddWeighted( cvImageTemp, f, cvImage, 1.0f-f,0, cvImage ); flagImageChanged(); } else { ofLog(OF_LOG_ERROR, "in addWeighted, ROI mismatch"); } }
//-------------------------------------------------------------------------------- void ofxCvGrayscaleImage::absDiff( ofxCvGrayscaleImage& mom, ofxCvGrayscaleImage& dad ) { if( !mom.bAllocated ){ ofLogError("ofxCvGrayscaleImage") << "absDiff(): first source image (mom) not allocated"; return; } if( !dad.bAllocated ){ ofLogError("ofxCvGrayscaleImage") << "absDiff(): second source image (dad) not allocated"; return; } if( !bAllocated ){ ofLogNotice("ofxCvGrayscaleImage") << "absDiff(): allocating to match dimensions: " << mom.getWidth() << " " << mom.getHeight(); allocate(mom.getWidth(), mom.getHeight()); } ofRectangle roi = getROI(); ofRectangle momRoi = mom.getROI(); ofRectangle dadRoi = dad.getROI(); if( (momRoi.width == roi.width && momRoi.height == roi.height ) && (dadRoi.width == roi.width && dadRoi.height == roi.height ) ) { cvAbsDiff( mom.getCvImage(), dad.getCvImage(), cvImage ); flagImageChanged(); } else { ofLogError("ofxCvGrayscaleImage") << "absDiff(): source image size mismatch between first (mom) & second (dad) image"; } }
//-------------------------------------------------------------- void testApp::setup(){ vidGrabber.setVerbose(true); vidGrabber.initGrabber(640, 480); colorImg.allocate(vidGrabber.getWidth(), vidGrabber.getHeight()); greyImage.allocate(vidGrabber.getWidth(), vidGrabber.getHeight()); greyImageSmall.allocate(120, 90); haarFinder.setup("haarcascade_frontalface_alt2.xml"); img.loadImage("stevejobs.png"); img.setAnchorPercent(0.5, 0.5); ofEnableAlphaBlending(); }
//-------------------------------------------------------------------------------- void ofxCvGrayscaleImage::absDiff( ofxCvGrayscaleImage& mom, ofxCvGrayscaleImage& dad ) { ofRectangle roi = getROI(); ofRectangle momRoi = mom.getROI(); ofRectangle dadRoi = dad.getROI(); if( (momRoi.width == roi.width && momRoi.height == roi.height ) && (dadRoi.width == roi.width && dadRoi.height == roi.height ) ) { cvAbsDiff( mom.getCvImage(), dad.getCvImage(), cvImage ); flagImageChanged(); } else { ofLog(OF_LOG_ERROR, "in absDiff, images are different sizes"); } }
//-------------------------------------------------------------- void DepthHoleFiller::performProperClose ( ofxCvGrayscaleImage &input, int diameter){ // http://homepages.inf.ed.ac.uk/rbf/HIPR2/close.htm // Defined as Max(f, O(C(O(f)))) ofxCv8uC1_Temp1 = input; // temp copy of original performMorphologicalOpen (input, diameter); performMorphologicalClose (input, diameter); performMorphologicalOpen (input, diameter); cvMax(input.getCvImage(), ofxCv8uC1_Temp1.getCvImage(), input.getCvImage()); }
void thresholdCalculator::drawPupilImageWithScanLine(int x, int y, int w, int h, ofxCvGrayscaleImage & img) { ofEnableAlphaBlending(); ofPushMatrix(); ofTranslate(x, y, 0); ofSetColor(255, 255, 255); img.draw(0, 0, w, h); ofSetColor(255, 255, 255,80); ofLine(0, scanY, w, scanY); ofLine(scanX, 0, scanX, h); ofSetColor(255, 0, 0, 50); ofFill(); ofCircle(whiteLocMin.x * (img.width / roi.width), whiteLocMin.y * (img.height / roi.height), 10); ofDisableAlphaBlending(); ofSetColor(255, 255, 255); ofDrawBitmapString("imgBeforeThreshold", 1, h + 12); ofPopMatrix(); }
// Set Pixel Data - Arrays //-------------------------------------------------------------------------------- void ofxCvFloatImage::operator = ( ofxCvGrayscaleImage& mom ) { if( mom.width == width && mom.height == height ) { cvConvert( mom.getCvImage(), cvImage ); } else { cout << "error in =, images are different sizes" << endl; } }
//---------------------------------------------------------------------------------- void ofxCvBlobFinder::findBlobs(ofxCvGrayscaleImage image, bool find_holes) { CvMemStorage *stor = cvCreateMemStorage(); IplImage *img = image.getCvImage(); CvSeq *contours; _width = img->width; _height = img->height; // CV_RETR_EXTERNAL to not find holes int mode = (find_holes)?CV_RETR_LIST:CV_RETR_EXTERNAL; cvFindContours(img, stor, &contours, sizeof(CvContour), mode, CV_CHAIN_APPROX_SIMPLE); blobz.clear(); while (contours) { ofxCvComplexBlob b = ofxCvComplexBlob(contours); b.setApproxFactor(approxFactor); b.getApproxPoints(); b.getHullPoints(); blobz.push_back( b ); contours = contours->h_next; } // sort blobs sort(blobz.begin(), blobz.end(), sort_blob_func); }
void draw(){ // sampleImg.draw(0, 0, 450, 450); cannyImg.draw(0, 0, 450, 450); cannyInvertImg.draw(450, 0, 450, 450); }
//-------------------------------------------------------------------------------- void ofxCvColorImage::operator = ( ofxCvGrayscaleImage& mom ) { if( mom.width == width && mom.height == height ) { cvCvtColor( mom.getCvImage(), cvImage, CV_GRAY2RGB ); } else { cout << "error in =, images are different sizes" << endl; } }
//-------------------------------------------------------------- void testApp::draw(){ ofSetColor(255, 255, 255); colorImg.draw(0, 0, ofGetWidth(), ofGetHeight()); glPushMatrix(); glScalef(ofGetWidth() / (float)greyImageSmall.getWidth(), ofGetHeight() / (float)greyImageSmall.getHeight(), 1); // haarTracker.draw(0, 0); ofNoFill(); for(int i = 0; i < haarFinder.blobs.size(); i++) { ofRectangle cur = haarFinder.blobs[i].boundingRect; // ofRect(cur.x, cur.y, cur.width, cur.height); int iw = cur.width * 1.4; img.draw(haarFinder.blobs[i].centroid, iw, iw * img.getHeight() / img.getWidth()); } glPopMatrix(); }
//-------------------------------------------------------------------------------- void ofxCvColorImage::convertToGrayscalePlanarImage (ofxCvGrayscaleImage& grayImage, int whichPlane){ ofRectangle roi = getROI(); ofRectangle grayRoi = grayImage.getROI(); if( grayRoi.width == roi.width && grayRoi.height == roi.height ){ switch (whichPlane){ case 0: cvCvtPixToPlane(cvImage, grayImage.getCvImage(), NULL, NULL, NULL); grayImage.flagImageChanged(); break; case 1: cvCvtPixToPlane(cvImage, NULL, grayImage.getCvImage(), NULL, NULL); grayImage.flagImageChanged(); break; case 2: cvCvtPixToPlane(cvImage, NULL, NULL, grayImage.getCvImage(), NULL); grayImage.flagImageChanged(); break; } } else { ofLog(OF_LOG_ERROR, "in convertToGrayscalePlanarImages, ROI/size mismatch"); } }
//-------------------------------------------------------------------------------- void ofxCvGrayscaleImage::absDiff( ofxCvGrayscaleImage& mom ){ if( !mom.bAllocated ){ ofLog(OF_LOG_ERROR, "in absDiff, mom needs to be allocated"); return; } if( !bAllocated ){ ofLog(OF_LOG_NOTICE, "in absDiff, allocating to match dimensions"); allocate(mom.getWidth(), mom.getHeight()); } if( matchingROI(getROI(), mom.getROI()) ) { cvAbsDiff( cvImage, mom.getCvImage(), cvImageTemp ); swapTemp(); flagImageChanged(); } else { ofLog(OF_LOG_ERROR, "in *=, ROI mismatch"); } }
void margDisplay::feedImg(ofxCvGrayscaleImage& _source) { if (image.getWidth() != _source.getWidth()) { image.clear(); image.allocate(source.getWidth(), source.getHeight()); } source = _source; cvWarpPerspective(source.getCvImage(), image.getCvImage(), translate); image.flagImageChanged(); }
void convert(ofxCvGrayscaleImage &src, ofImage &dst) { for(int i = 0; i < src.height*src.width; i += 1) { if(src.getPixels()[i]==0) { dst.getPixels()[i*4] = 0; } else { dst.getPixels()[i*4] = 0xffffffff; } } dst.update(); }