/* Function getLargestConnectedComps Finds the largest connected component in image Input: im (IplImage *): image input Output: the largest connected component (CvConnectedComp *) */ CvConnectedComp* LocalisationPupil::getLargestConnectedComps(IplImage *image) { CvScalar pixval; IplImage* im2 = cvCloneImage(image); CvConnectedComp *retComps,*comps; comps = new CvConnectedComp; retComps = new CvConnectedComp; retComps->area = 0; for (int j = 0; j < ((im2)->width)*((im2)->height); j++) { pixval = cvGet2D(im2,j/(im2)->width,j%(im2)->width); // if this pixel has the value of 255, this component has not yet been recorded if (pixval.val[0] > 0) { cvFloodFill(im2,cvPoint(j%(im2)->width,j/(im2)->width) ,cvScalar(0),cvScalar(0),cvScalar(0),comps,4); //if this component is lager if(comps->area > retComps->area){ retComps->area = comps->area; retComps->rect = comps->rect; } } } DELETE(comps); cvReleaseImage(&im2); return retComps; }
void flood(IplImage *img) { CvPoint seed=cvPoint(g,h); CvScalar color=CV_RGB(250,0,0); cvFloodFill(img,seed,color,cvScalarAll(200.0),cvScalarAll(200.0),NULL,CV_FLOODFILL_FIXED_RANGE,NULL); printf("ab %d %d\n",g,h); }
bool CAutoDetect4Pts::ConnectdCorner(IplImage *edge, CvPoint2D32f *pCorner, int &n) { CvPoint2D32f *tmpcorner = new CvPoint2D32f[n]; int *pflag = new int[n]; memset(pflag, 0, sizeof(int)*n); int m = 0; for (int i = 0; i < n; i ++)//挨个判断 { if (pflag[i] != 0)//之前判断过,换下一个 continue; m = 0; pflag [i] = 1; tmpcorner[m] = pCorner[i]; cvFloodFill(m_Edge, cvPoint(pCorner[i].x, pCorner[i].y), cvScalar(100,100,100)); for (int j = i+1; j < n; j ++) { if (edge->imageData[int(pCorner[j].y)* m_Edge->widthStep + int(pCorner[j]. x)] == 100) { m ++; pflag[j] = 1; tmpcorner[m] = pCorner[j]; } } if (m == 3) break; cvFloodFill(m_Edge, cvPoint(pCorner[i].x, pCorner[i].y), cvScalar(255,255,255)); } if (m == 3) { for (int i = 0; i < 4; i ++) { pCorner[i] = tmpcorner[i]; } delete []tmpcorner; n = 4; return true; } else { delete []tmpcorner; return false; } }
void on_mouse( int event, int x, int y, int flags, void* param ) { if( !color_img ) return; switch( event ) { case CV_EVENT_LBUTTONDOWN: { CvPoint seed = cvPoint(x,y); int lo = ffill_case == 0 ? 0 : lo_diff; int up = ffill_case == 0 ? 0 : up_diff; int flags = connectivity + (new_mask_val << 8) + (ffill_case == 1 ? CV_FLOODFILL_FIXED_RANGE : 0); int b = rand() & 255, g = rand() & 255, r = rand() & 255; CvConnectedComp comp; if( is_mask ) cvThreshold( mask, mask, 1, 128, CV_THRESH_BINARY ); if( is_color ) { CvScalar color = CV_RGB( r, g, b ); cvFloodFill( color_img, seed, color, CV_RGB( lo, lo, lo ), CV_RGB( up, up, up ), &comp, flags, is_mask ? mask : NULL ); cvShowImage( "image", color_img ); } else { CvScalar brightness = cvRealScalar((r*2 + g*7 + b + 5)/10); cvFloodFill( gray_img, seed, brightness, cvRealScalar(lo), cvRealScalar(up), &comp, flags, is_mask ? mask : NULL ); cvShowImage( "image", gray_img ); } printf("%g pixels were repainted\n", comp.area ); if( is_mask ) cvShowImage( "mask", mask ); } break; } }
int cv::floodFill( InputOutputArray _image, InputOutputArray _mask, Point seedPoint, Scalar newVal, Rect* rect, Scalar loDiff, Scalar upDiff, int flags ) { CvConnectedComp ccomp; CvMat c_image = _image.getMat(), c_mask = _mask.getMat(); cvFloodFill(&c_image, seedPoint, newVal, loDiff, upDiff, &ccomp, flags, c_mask.data.ptr ? &c_mask : 0); if( rect ) *rect = ccomp.rect; return cvRound(ccomp.area); }
int main(int argc,char** argv) { int f; IplImage *img,*src; CvCapture* capture=cvCaptureFromCAM(0); cvNamedWindow("floodfill",CV_WINDOW_AUTOSIZE); //img=cvLoadImage("img.jpg"); //cvCvtColor(src,img,CV_RGB2GRAY); //CvMouseCallback(7,x,y,16,NULL); cvSetMouseCallback("floodfill",my_mouse_callback,(void*)img); while(1) { img=cvQueryFrame(capture); if(l==7 ) { flag=1; seed=cvPoint(g,h); cvFloodFill(img,seed,color,cvScalarAll(50.0),cvScalarAll(10.0),&comp,CV_FLOODFILL_FIXED_RANGE,NULL); printf("ab %d %d\n",g,h); } if(flag==1) { cvFloodFill(img,seed,color,cvScalarAll(50.0),cvScalarAll(10.0),&comp, CV_FLOODFILL_FIXED_RANGE,NULL); printf("countour total %f %f \n",comp.area,comp.value.val[1]); //contour=comp->contour; //cvDrawContours(img,comp.contour,cvScalarAll(255),cvScalarAll(0),-1,1,8,cvPoint(0,0)); } cvShowImage("floodfill",img); cvWaitKey(33); if(cvWaitKey(33)==27) break; } cvWaitKey(); cvReleaseImage(&img); }
void SetImageFloodFill(IplImage *img) { CvPoint *imgPoint = new CvPoint; imgPoint->x = img->width / 2; imgPoint->y = img->height / 2; CvScalar *imgScalar = new CvScalar; imgScalar->val[0] = 215; imgScalar->val[1] = 59; imgScalar->val[2] = 62; cvFloodFill(img, *imgPoint, *imgScalar, cvScalarAll(7.0), cvScalarAll(7.0), NULL, 4, NULL); }
static void findCComp( IplImage* img ) { int x, y, cidx = 1; IplImage* mask = cvCreateImage( cvSize(img->width+2, img->height+2), 8, 1 ); cvZero(mask); cvRectangle( mask, cvPoint(0, 0), cvPoint(mask->width-1, mask->height-1), cvScalarAll(1), 1, 8, 0 ); for( y = 0; y < img->height; y++ ) for( x = 0; x < img->width; x++ ) { if( CV_IMAGE_ELEM(mask, uchar, y+1, x+1) != 0 ) continue; cvFloodFill(img, cvPoint(x,y), cvScalarAll(cidx), cvScalarAll(0), cvScalarAll(0), 0, 4, mask); cidx++; } }
void CV_FloodFillTest::run_func() { int flags = connectivity + (mask_only ? CV_FLOODFILL_MASK_ONLY : 0) + (range_type == 1 ? CV_FLOODFILL_FIXED_RANGE : 0) + (new_mask_val << 8); double* odata = test_mat[OUTPUT][0].ptr<double>(); if(!test_cpp) { CvConnectedComp comp; cvFloodFill( test_array[INPUT_OUTPUT][0], seed_pt, new_val, l_diff, u_diff, &comp, flags, test_array[INPUT_OUTPUT][1] ); odata[0] = comp.area; odata[1] = comp.rect.x; odata[2] = comp.rect.y; odata[3] = comp.rect.width; odata[4] = comp.rect.height; odata[5] = comp.value.val[0]; odata[6] = comp.value.val[1]; odata[7] = comp.value.val[2]; odata[8] = comp.value.val[3]; } else { cv::Mat img = cv::cvarrToMat(test_array[INPUT_OUTPUT][0]), mask = test_array[INPUT_OUTPUT][1] ? cv::cvarrToMat(test_array[INPUT_OUTPUT][1]) : cv::Mat(); cv::Rect rect; int area; if( mask.empty() ) area = cv::floodFill( img, seed_pt, new_val, &rect, l_diff, u_diff, flags ); else area = cv::floodFill( img, mask, seed_pt, new_val, &rect, l_diff, u_diff, flags ); odata[0] = area; odata[1] = rect.x; odata[2] = rect.y; odata[3] = rect.width; odata[4] = rect.height; odata[5] = odata[6] = odata[7] = odata[8] = 0; } }
int colorize_map() { int i; char str[4]; IplImage *img=cvLoadImage("chinamap.jpg",CV_LOAD_IMAGE_COLOR); CvPoint pt; CvFont font; cvInitFont(&font, CV_FONT_HERSHEY_PLAIN, .8, .8, 0, 1, 8); CvScalar color[5]= { CV_RGB(255,0,0),CV_RGB(0,255,0), CV_RGB(0,0,255),CV_RGB(0,0,0),CV_RGB(99,76,23) }; if(!img) { printf("open file chinamap.jpg failed"); } //cvNamedWindow("map of china",CV_WINDOW_AUTOSIZE); for(i=0;i<V;i++) { memcpy(&pt,&vertex[i].capcity,sizeof(CvPoint)); cvFloodFill(img,pt,color[vertex[i].color-1], CV_RGB(25,25,25),CV_RGB(25,25,25),NULL,4,NULL); sprintf(str,"%d",vertex[i].color); //cvPutText(img,vertex[i].proname,pt,&font,CV_RGB(25,55,255)); //cvPutText(img,vertex[i].proname,pt,&font,CV_RGB(25,55,255)); //cvShowImage("map of china",img); //cvWaitKey(0); } //cvShowImage("map of china",img); //cvSaveImage("colorized_map.jpg",img,0); cvSaveImage("colorized_map.jpg",img); //cvWaitKey(0); cvReleaseImage(&img); //cvDestroyWindow("chinamap.jpg"); return 1; }
//추후 수정 void FkPaperKeyboard_TypeA::cornerVerification(IplImage* srcImage){ CvSize size = cvGetSize(srcImage); IplImage* eigImage = cvCreateImage(size, IPL_DEPTH_8U,1); IplImage* tempImage = cvCreateImage(size, IPL_DEPTH_8U, 1); IplImage* grayImage = cvCreateImage(size, IPL_DEPTH_8U, 1); IplImage* veriImage = cvCreateImage(size, IPL_DEPTH_8U, 1); IplImage* dstImage = cvCreateImage(size, IPL_DEPTH_8U, 1); IplImage* mask = cvCreateImage(size, IPL_DEPTH_8U, 1); IplImage* mask2 = cvCreateImage(size, IPL_DEPTH_8U, 1); CvRect rect = cvRect(10, 10, 640 - 20, 480 - 20); CvPoint2D32f srcQuad[4], dstQuad[4]; CvMat* warp_matrix = cvCreateMat(3,3, CV_32FC1); CvMat* warp_matrix_invert = cvCreateMat(3,3, CV_32FC1); CvMat* result = cvCreateMat(3, 1, CV_32FC1); CvMat* dst = cvCreateMat(3, 1,CV_32FC1); int keyButtonCornerCount = 316; cvCvtColor(srcImage, grayImage, CV_BGR2GRAY); cvSetImageROI(grayImage, rect); cvSetImageROI(mask, rect); cvSetImageROI(dstImage, rect); cvSetImageROI(mask2, rect); // 150~255사이의 값만 추출해서 마스크에 저장 cvInRangeS(grayImage, cvScalar(100, 100, 100), cvScalar(255, 255, 255), mask); cvCopy(mask, mask2); //cvShowImage("mask", mask); //cvShowImage("mask2", mask2); // 20,20? 150 미만의 값을 제외하기 위해 0인 값(mask)과 추출한 값(mask2)을 XOR 연산 한다. cvFloodFill(mask, cvPoint(10, 10), cvScalar(0, 0, 0)); cvXor(mask2, mask, dstImage); //cvShowImage("mask3", mask); //cvShowImage("mask4", mask2); //cvShowImage("dstImage", dstImage); // 최종 연산된 이미지에서 코너 추출(각 키패드의 코너) cvGoodFeaturesToTrack(dstImage, eigImage, tempImage, keyButtonCorner, &keyButtonCornerCount, 0.01, 7, NULL, 7, 0); cvFindCornerSubPix (dstImage, keyButtonCorner, keyButtonCornerCount,cvSize (3, 3), cvSize (-1, -1), cvTermCriteria (CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, 0.03)); cvResetImageROI(dstImage); for(int i =0 ; i < 316 ; i++){ keyButtonCorner[i].x += rect.x; keyButtonCorner[i].y += rect.y; } initKeyButtonCorner(); srcQuad[CLOCKWISE_1].x = keyButtonCorner[315].x+10; srcQuad[CLOCKWISE_1].y = keyButtonCorner[315].y-10; srcQuad[CLOCKWISE_5].x = keyButtonCorner[31].x + 10; srcQuad[CLOCKWISE_5].y = keyButtonCorner[31].y + 10; srcQuad[CLOCKWISE_7].x = keyButtonCorner[0].x - 10; srcQuad[CLOCKWISE_7].y = keyButtonCorner[0].y + 10; srcQuad[CLOCKWISE_11].x = keyButtonCorner[290].x - 10; srcQuad[CLOCKWISE_11].y = keyButtonCorner[290].y - 10; dstQuad[CLOCKWISE_1].x = 640; dstQuad[CLOCKWISE_1].y = 0; dstQuad[CLOCKWISE_5].x = 640; dstQuad[CLOCKWISE_5].y = 480; dstQuad[CLOCKWISE_7].x = 0; dstQuad[CLOCKWISE_7].y = 480; dstQuad[CLOCKWISE_11].x = 0; dstQuad[CLOCKWISE_11].y = 0; cvGetPerspectiveTransform(srcQuad, dstQuad, warp_matrix); cvWarpPerspective(dstImage, veriImage, warp_matrix); detectKeyButtonCorner(veriImage); cvInvert(warp_matrix, warp_matrix_invert); for(int i = 0 ; i < 316 ; i++){ cvmSet(dst, 0, 0, keyButtonCorner[i].x); cvmSet(dst, 1, 0, keyButtonCorner[i].y); cvmSet(dst, 2, 0, 1); cvMatMul(warp_matrix_invert, dst, result); float t = cvmGet(result, 2,0); keyButtonCorner[i].x = cvmGet(result, 0,0)/t ; keyButtonCorner[i].y = cvmGet(result, 1,0)/t ; } cvResetImageROI(srcImage); cvResetImageROI(mask); cvReleaseImage(&eigImage); cvReleaseImage(&tempImage); cvReleaseImage(&grayImage); cvReleaseImage(&veriImage); cvReleaseImage(&dstImage); cvReleaseImage(&mask); cvReleaseImage(&mask2); cvReleaseMat(&warp_matrix); cvReleaseMat(&warp_matrix_invert); cvReleaseMat(&result); cvReleaseMat(&dst); }
FConnectedDomains* FConnectedDomains::FromImage(IplImage* image) { IplImage* image1 = cvCreateImage(cvGetSize(image), image->depth, image->nChannels); cvCopy(image, image1); FConnectedDomains* ret = new FConnectedDomains(); if(image1->nChannels == 1) { for(int i = 0; i < image1->height; i++) { for(int j = 0; j < image1->width; j++) { int index = i * image1->widthStep + j; if(((unsigned char)(image1->imageData[index]) == 255) || (image1->imageData[index] == 100) || (image1->imageData[index] == 101)) continue; CvConnectedComp ccc; cvFloodFill(image1, cvPoint(j, i), cvScalarAll(100), cvScalarAll(0), cvScalarAll(0), &ccc, 8); FConnectedDomain* fcd = new FConnectedDomain(); fcd->rect = ccc.rect; /* for(int k = ccc.rect.y; k < ccc.rect.y + ccc.rect.height; k++) { for(int l = ccc.rect.x; l < ccc.rect.x + ccc.rect.width; l++) { index = k * image1->widthStep + l; if(image1->imageData[index] == 100) { CvPoint* cp = new CvPoint(); cp->x = l; cp->y = k; fcd->points.push_back(cp); image1->imageData[index] = 101; } } }*/ for(int l = ccc.rect.x; l < ccc.rect.x + ccc.rect.width; l++) { for(int k = ccc.rect.y; k < ccc.rect.y + ccc.rect.height; k++) { index = k * image1->widthStep + l; if(image1->imageData[index] == 100) { CvPoint* cp = new CvPoint(); cp->x = l; cp->y = k; fcd->Xpoints.push_back(cp); image1->imageData[index] = 101; } } } ret->ConnectedDomains.push_back(fcd); } } } cvReleaseImage(&image1); return ret; }
void imageCallback(const sensor_msgs::ImageConstPtr& msg) { //bridge that will transform the message (image) from ROS code back to "image" code sensor_msgs::CvBridge bridge; fprintf(stderr, "\n call Back funtion \n"); //publish data (obstacle waypoints) back to the boat //ros::NodeHandle n; //std_msgs::Float32 xWaypoint_msg; // X coordinate obstacle message //std_msgs::Float32 yWaypoint_msg; // Y coordinate obstacle message //publish the waypoint data //ros::Publisher waypoint_info_pub = n.advertise<std_msgs::Float32>("waypoint_info", 1000); //ros::Publisher Ywaypoint_info_pub = n.advertise<std_msgs::Float32>("waypoint_info", 1000); //std::stringstream ss; /***********************************************************************/ //live image coming streamed straight from the boat's camera IplImage* boatFront = bridge.imgMsgToCv(msg, "bgr8"); //The boat takes flipped images, so you need to flip them back to normal cvFlip(boatFront, boatFront, 0); IplImage* backUpImage = cvCloneImage(boatFront); boatFront->origin = IPL_ORIGIN_TL; //sets image origin to top left corner int X = boatFront->height; int Y = boatFront->width; //cout << "height " << X << endl; //cout << "width " << Y << endl; /*********************Image Filtering variables****************************/ //these images are used for segmenting objects from the overall background //create a one channel image to convert from RGB to GRAY IplImage* grayImage = cvCreateImage(cvGetSize(boatFront),IPL_DEPTH_8U,1); //convert grayImage to binary (final step after converting from GRAY) IplImage* bwImage = cvCreateImage(cvGetSize(grayImage),IPL_DEPTH_8U,1); //variables used for the flood fill segmentation CvPoint seed_point = cvPoint(boatFront->height/1.45,0); //not sure how this variable works CvScalar color = CV_RGB(250,0,0); CvMemStorage* grayStorage = NULL; //memory storage for contour sequence CvSeq* contours = 0; // get blobs and filter them using their area //IplConvKernel* morphKernel = cvCreateStructuringElementEx(5, 5, 1, 1, CV_SHAPE_RECT, NULL); //IplImage* original, *originalThr; //IplImage* segmentated = cvCreateImage(cvGetSize(boatFront), 8, 1); //unsigned int blobNumber = 0; //IplImage* labelImg = cvCreateImage(cvGetSize(boatFront), IPL_DEPTH_LABEL, 1); CvMoments moment; /***********************************************************************/ //boat's edge distance from the camera. This is used for visual calibration //to know the distance from the boat to the nearest obstacles. //With respect to the mounted camera, distance is 21 inches (0.5334 m) side to side //and 15 inches (0.381 m). //float boatFrontDistance = 0.381; //distance in meters //float boatSideDistance = 0.5334; //distance in meters // These variables tell the distance from the center bottom of the image // (the camera) to the square surrounding a the obstacle float xObstacleDistance = 0.0; float yObstacleDistance = 0.0; float obstacleDistance = 0.0; float obstacleHeading = 0.0; int pixelsNumber = 50; //number of pixels for an n x n matrix and # of neighbors const int arraySize = pixelsNumber; const int threeArraySize = pixelsNumber; //if n gets changed, then the algorithm might have to be //recalibrated. Try to keep it constant //these variables are used for the k nearest neighbors //int accuracy; //reponses for each of the classifications float responseWaterH, responseWaterS, responseWaterV; float responseGroundH, responseGroundS, responseGroundV; float responseSkyH, responseSkyS, responseSkyV; float averageHue = 0.0; float averageSat = 0.0; float averageVal = 0.0; CvMat* trainClasses = cvCreateMat( pixelsNumber, 1, CV_32FC1 ); CvMat* trainClasses2 = cvCreateMat( pixelsNumber, 1, CV_32FC1 ); //CvMat sample = cvMat( 1, 2, CV_32FC1, _sample ); //used with the classifier CvMat* trainClassesH = cvCreateMat( pixelsNumber, 1, CV_32FC1 ); CvMat* trainClassesS = cvCreateMat( pixelsNumber, 1, CV_32FC1 ); CvMat* trainClassesV = cvCreateMat( pixelsNumber, 1, CV_32FC1 ); //CvMat* trainClasses2 = cvCreateMat( pixelsNumber, 1, CV_32FC1 ); //CvMat sample = cvMat( 1, 2, CV_32FC1, _sample ); //used with the classifier /*CvMat* nearestWaterH = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* nearestWaterS = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* nearestWaterV = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* nearestGroundH = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* nearestGroundS = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* nearestGroundV = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* nearestSkyH = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* nearestSkyS = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* nearestSkyV = cvCreateMat(1, pixelsNumber, CV_32FC1); //Distance CvMat* distanceWaterH = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* distanceWaterS = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* distanceWaterV = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* distanceGroundH = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* distanceGroundS = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* distanceGroundV = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* distanceSkyH = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* distanceSkyS = cvCreateMat(1, pixelsNumber, CV_32FC1); CvMat* distanceSkyV = cvCreateMat(1, pixelsNumber, CV_32FC1); */ //these variables are use to traverse the picture by blocks of n x n pixels at //a time. //Index(0,0) does not exist, so make sure kj and ki start from 1 (in the //right way, of course) //x and y are the dimensions of the local patch of pixels int x = (boatFront->height)/1.45;//(boatFront->height)/2.5 + 105; int y = 0; int skyX = 0; int skyY = 0; int row1 = 0; int column1 = 0; //these two variables are used in order to divide the grid in the //resample segmentation part int xDivisor = 200; int yDivisor = 200; //ground sample //CvMat* groundTrainingHue = cvCreateMat(threeArraySize,arraySize,CV_32FC1); //CvMat* groundTrainingSat = cvCreateMat(threeArraySize,arraySize,CV_32FC1); //CvMat* groundTrainingVal = cvCreateMat(threeArraySize,arraySize,CV_32FC1); //water sample CvMat* waterTrainingHue = cvCreateMat(threeArraySize,arraySize,CV_32FC1); CvMat* waterTrainingSat = cvCreateMat(threeArraySize,arraySize,CV_32FC1); CvMat* waterTrainingVal = cvCreateMat(threeArraySize,arraySize,CV_32FC1); //n x n sample patch taken from the picture CvMat* sampleHue = cvCreateMat(1,arraySize,CV_32FC1); CvMat* sampleSat = cvCreateMat(1,arraySize,CV_32FC1); CvMat* sampleVal = cvCreateMat(1,arraySize,CV_32FC1); CvMat* resampleHue = cvCreateMat(boatFront->height/xDivisor,boatFront->width/yDivisor,CV_32FC1); CvMat* resampleSat = cvCreateMat(boatFront->height/xDivisor,boatFront->width/yDivisor,CV_32FC1); CvMat* resampleVal = cvCreateMat(boatFront->height/xDivisor,boatFront->width/yDivisor,CV_32FC1); int xDiv = 20; int yDiv = 20; CvMat* resampleHue2 = cvCreateMat(boatFront->height/xDiv,boatFront->width/yDiv,CV_32FC1); CvMat* resampleSat2 = cvCreateMat(boatFront->height/xDiv,boatFront->width/yDiv,CV_32FC1); CvMat* resampleVal2 = cvCreateMat(boatFront->height/xDiv,boatFront->width/yDiv,CV_32FC1); //sky training sample CvMat* skyTrainingHue = cvCreateMat(arraySize,arraySize,CV_32FC1); CvMat* skyTrainingSat = cvCreateMat(arraySize,arraySize,CV_32FC1); CvMat* skyTrainingVal = cvCreateMat(arraySize,arraySize,CV_32FC1); //initialize each matrix element to zero for ease of use //cvZero(groundTrainingHue); //cvZero(groundTrainingSat); //cvZero(groundTrainingVal); cvZero(waterTrainingHue); cvZero(waterTrainingSat); cvZero(waterTrainingVal); cvZero(sampleHue); cvZero(sampleSat); cvZero(sampleVal); cvZero(resampleHue); cvZero(resampleSat); cvZero(resampleVal); cvZero(skyTrainingHue); cvZero(skyTrainingSat); cvZero(skyTrainingVal); //Stores the votes for each channel (whether it belongs to water or not //1 is part of water, 0 not part of water //if sum of votes is bigger than 1/2 the number of elements, then it belongs to water int votesSum = 0; int comparator[3]; //used when only three votes are needed //int comparatorTwo [3][3]; //used when six votes are needed //initial sum of votes is zero //Error if initialize both matrices inside a single for loop. Dont know why for(int i = 0; i < 3; i++) { comparator[i] = 0; } /***********************************************************************/ //Convert from RGB to HSV to control the brightness of the objects. //work with reflexion /*Sky recognition. Might be useful for detecting reflexion on the water. If the sky is detected, and the reflection has the same characteristics of something below the horizon, that "something" might be water. Assume sky wont go below the horizon */ //convert from RGB to HSV cvCvtColor(boatFront, boatFront, CV_BGR2HSV); cvCvtColor(backUpImage, backUpImage, CV_BGR2HSV); HsvImage I(boatFront); HsvImage IBackUp(backUpImage); //Sky detection /* for (int i=0; i<boatFront->height;i++) { for (int j=0; j<boatFront->width;j++) { //if something is bright enough, consider it sky and store the //value. HSV values go from 0 to 180 ... RGB goes from 0 to 255 if (((I[i][j].v >= 180) && (I[i][j].s <= 16))) // && ((I[i][j].h >=10)))) //&& (I[i][j].h <= 144)))) { //The HSV values vary between 0 and 1 cvmSet(skyTrainingHue,skyX,skyY,I[i][j].h); cvmSet(skyTrainingSat,skyX,skyY,I[i][j].s); cvmSet(skyTrainingVal,skyX,skyY,I[i][j].v); //I[i][j].h = 0.3*180; //H (color) //I[i][j].s = 0.3*180; //S (color intensity) //I[i][j].v = 0.6*180; //V (brightness) if (skyY == pixelsNumber-1) { if (skyX == pixelsNumber-1) skyX = 1; else skyX = skyX + 1; skyY = 1; } else skyY = skyY + 1; } } } /***********************************************************************/ //offline input pictures. Samples of water properties are taken from these //pictures to get a range of values for H, S, V that will be stored into a //pre-defined classifier IplImage* imageSample1 = cvLoadImage("20110805_032255.jpg"); cvSetImageROI(imageSample1, cvRect(0,0,imageSample1->height/0.5,imageSample1->width/1.83)); cvCvtColor(imageSample1, imageSample1, CV_BGR2HSV); HsvImage I1(imageSample1); IplImage* imageSample2 = cvLoadImage("20110805_032257.jpg"); cvCvtColor(imageSample2, imageSample2, CV_BGR2HSV); HsvImage I2(imageSample2); IplImage* imageSample3 = cvLoadImage("20110805_032259.jpg"); cvCvtColor(imageSample3, imageSample3, CV_BGR2HSV); HsvImage I3(imageSample3); IplImage* imageSample4 = cvLoadImage("20110805_032301.jpg"); cvCvtColor(imageSample4, imageSample4, CV_BGR2HSV); HsvImage I4(imageSample4); IplImage* imageSample5 = cvLoadImage("20110805_032303.jpg"); cvCvtColor(imageSample5, imageSample5, CV_BGR2HSV); HsvImage I5(imageSample5); IplImage* imageSample6 = cvLoadImage("20110805_032953.jpg"); cvCvtColor(imageSample6, imageSample6, CV_BGR2HSV); HsvImage I6(imageSample6); IplImage* imageSample7 = cvLoadImage("20110805_032955.jpg"); cvCvtColor(imageSample7, imageSample7, CV_BGR2HSV); HsvImage I7(imageSample7); IplImage* imageSample8 = cvLoadImage("20110805_032957.jpg"); cvCvtColor(imageSample8, imageSample8, CV_BGR2HSV); HsvImage I8(imageSample8); IplImage* imageSample9 = cvLoadImage("20110805_032959.jpg"); cvCvtColor(imageSample9, imageSample9, CV_BGR2HSV); HsvImage I9(imageSample9); IplImage* imageSample10 = cvLoadImage("20110805_033001.jpg"); cvCvtColor(imageSample10, imageSample10, CV_BGR2HSV); HsvImage I10(imageSample10); IplImage* imageSample11 = cvLoadImage("20110805_033009.jpg"); cvCvtColor(imageSample11, imageSample11, CV_BGR2HSV); HsvImage I11(imageSample11); IplImage* imageSample12 = cvLoadImage("20110805_033011.jpg"); cvCvtColor(imageSample12, imageSample12, CV_BGR2HSV); HsvImage I12(imageSample12); for (int i=0; i < threeArraySize; i++) { for (int j=0; j < arraySize; j++) { row1 = ceil(X/1.2866)+ceil(X/5.237)+i+ceil(-X/3.534545455) + ceil(X/4.8); column1 = ceil(Y/7.0755)+ceil(Y/21.01622)+j+ceil(X/1.495384615); averageHue = (I1[row1][column1].h + I2[row1][column1].h + I3[row1][column1].h + I4[row1][column1].h + I5[row1][column1].h + I6[row1][column1].h + I7[row1][column1].h + I8[row1][column1].h + I9[row1][column1].h + I10[row1][column1].h + I11[row1][column1].h + I12[row1][column1].h) / 12; averageSat = (I1[row1][column1].s + I2[row1][column1].s + I3[row1][column1].s + I4[row1][column1].s + I5[row1][column1].s + I6[row1][column1].s + I7[row1][column1].s + I8[row1][column1].s + I9[row1][column1].s + I10[row1][column1].s + I11[row1][column1].s + I12[row1][column1].s) / 12; averageVal = (I1[row1][column1].v + I2[row1][column1].v + I3[row1][column1].v + I4[row1][column1].v + I5[row1][column1].v + I6[row1][column1].v + I7[row1][column1].v + I8[row1][column1].v + I9[row1][column1].v + I10[row1][column1].v + I11[row1][column1].v + I12[row1][column1].v) / 12; //water patch sample (n X n matrix) cvmSet(waterTrainingHue,i,j,averageHue); cvmSet(waterTrainingSat,i,j,averageSat); cvmSet(waterTrainingVal,i,j,averageVal); //patch is red (this is for me to know where the ground patch sample is) //I[row1][column1].h = 0; //I[row1][column1].s = 255; //I[row1][column1].v = 255; } } //creating a training sample from the an image taken on the fly row1 = 0; column1 = 0; for (int i=0; i<pixelsNumber; i++) { for (int j=0; j<pixelsNumber; j++) { row1 = ceil(X/1.2866)+ceil(X/5.237)+i+ceil(-X/3.534545455) + ceil(X/4.8); column1 = ceil(Y/7.0755)+ceil(Y/21.01622)+j+ceil(X/1.495384615); cvmSet(trainClassesH,i,0,I[row1][column1].h); cvmSet(trainClassesS,i,0,I[row1][column1].s); cvmSet(trainClassesV,i,0,I[row1][column1].v); } } //order the water samples in ascending order on order to know a range cvSort(waterTrainingHue, waterTrainingHue, CV_SORT_ASCENDING); cvSort(waterTrainingSat, waterTrainingSat, CV_SORT_ASCENDING); cvSort(waterTrainingVal, waterTrainingVal, CV_SORT_ASCENDING); // find the maximum and minimum values in the array to create a range int maxH = cvmGet(waterTrainingHue,0,0); int maxS = cvmGet(waterTrainingSat,0,0); int maxV = cvmGet(waterTrainingVal,0,0); int minH = cvmGet(waterTrainingHue,0,0); int minS = cvmGet(waterTrainingSat,0,0); int minV = cvmGet(waterTrainingVal,0,0); for (int i=0; i < threeArraySize; i++) { for (int j=0; j < arraySize; j++) { if (cvmGet(waterTrainingHue,i,j) > maxH) maxH = cvmGet(waterTrainingHue,i,j); if (cvmGet(waterTrainingSat,i,j) > maxS) maxS = cvmGet(waterTrainingHue,i,j); if (cvmGet(waterTrainingVal,i,j) > maxV) maxV = cvmGet(waterTrainingVal,i,j); if (cvmGet(waterTrainingHue,i,j) < minH) minH = cvmGet(waterTrainingHue,i,j); if (cvmGet(waterTrainingSat,i,j) < minS) minS = cvmGet(waterTrainingSat,i,j); if (cvmGet(waterTrainingVal,i,j) < minV) minV = cvmGet(waterTrainingVal,i,j); } } //cout << "Min Value in the range: " << endl; //cout << minH << endl; //cout << minS << endl; //cout << minV << endl; //cout << "Max Value in the range: " << endl; //cout << maxH << endl; //cout << maxS << endl; //cout << maxV << endl << endl; /*********** Main loop. It traverses through the picture**********/ /**********************************************************************/ //Ignore unused parts of the image and convert them to black // for (int i=0; i<boatFront->height/1.45 - 1;i++) //{ // for (int j=0; j<Y-1;j++) // { // I[i][j].h = 0; // I[i][j].s = 0; // I[i][j].v = 0; // } // } /********************************************************************* // Use nearest neighbors to increase accuracy skyX = 0; skyY = 0; while (x < X-1) { //get a random sample taken from the picture. Must be determined whether //it is water or ground for (int i = 0; i<6;i++) { column1 = y+i; if (column1 > Y-1) column1 = Y-1; cvmSet(sampleHue,0,i,I[x][column1].h); cvmSet(sampleSat,0,i,I[x][column1].s); cvmSet(sampleVal,0,i,I[x][column1].v); } //Find the shortest distance between a pixel and the neighbors from each of //the training samples (sort of inefficient, but might do the job...sometimes) //HSV for water sample // learn classifier //CvKNearest knn(trainData, trainClasses, 0, false, itemsNumber); CvKNearest knnWaterHue(waterTrainingHue, trainClassesH, 0, false, pixelsNumber); CvKNearest knnWaterSat(waterTrainingSat, trainClassesS, 0, false, pixelsNumber); CvKNearest knnWaterVal(waterTrainingVal, trainClassesV, 0, false, pixelsNumber); //HSV for ground sample //CvKNearest knnGroundHue(groundTrainingHue, trainClasses2, 0, false, pixelsNumber); //CvKNearest knnGroundSat(groundTrainingSat, trainClasses2, 0, false, pixelsNumber); //CvKNearest knnGroundVal(groundTrainingVal, trainClasses2, 0, false, pixelsNumber); //HSV for sky sample //if (cvmGet(skyTrainingHue,0,0)!=0.0 && cvmGet(skyTrainingSat,0,0)!=0.0 && cvmGet(skyTrainingVal,0,0)!=0.0) //{ // CvKNearest knnSkyHue(skyTrainingHue, trainClasses, 0, false, pixelsNumber); // CvKNearest knnSkySat(skyTrainingSat, trainClasses, 0, false, pixelsNumber); // CvKNearest knnSkyVal(skyTrainingVal, trainClasses, 0, false, pixelsNumber); //} //scan nearest neighbors to each pixel responseWaterH = knnWaterHue.find_nearest(sampleHue,pixelsNumber,0,0,nearestWaterH,0); responseWaterS = knnWaterSat.find_nearest(sampleSat,pixelsNumber,0,0,nearestWaterS,0); responseWaterV = knnWaterVal.find_nearest(sampleVal,pixelsNumber,0,0,nearestWaterV,0); //responseGroundH = knnGroundHue.find_nearest(sampleHue,pixelsNumber,0,0,nearestGroundH,0); //responseGroundS = knnGroundSat.find_nearest(sampleSat,pixelsNumber,0,0,nearestGroundS,0); //responseGroundV = knnGroundVal.find_nearest(sampleVal,pixelsNumber,0,0,nearestGroundV,0); //for (int i=0;i<pixelsNumber;i++) //{ for (int j=0;j<pixelsNumber;j++) { if ((nearestWaterH->data.fl[j] == responseWaterH) )//&& (nearestWaterH->data.fl[j] == responseWaterH + 5)) // mark water samples as green comparator[0] = 1; else comparator[0] = 0; if ((nearestWaterS->data.fl[j] == responseWaterS) )//&& (nearestWaterS->data.fl[j] < responseWaterS + 5)) //mark water samples as green comparator[1] = 1; else comparator[1] = 0; if ((nearestWaterV->data.fl[j] == responseWaterV) )//&& (nearestWaterV->data.fl[j] < responseWaterV + 5)) //mark water samples as green comparator[2] = 1; else comparator[2] = 0; // similar sky pixels on the water //count votes for (int i3=0; i3 < 3; i3++) votesSum = votesSum + comparator[i3]; if (votesSum > 1) { I[x][y-6+j].h = 0; I[x][y-6+j].s = 255; I[x][y-6+j].v = 255; } votesSum = 0; } } if (y < Y-1) //5 use to be equal to pixelsNumber-1. y = y + 5; if (y > Y-1) y = Y-1; else if (y == Y-1) { //5 use to be equal to pixelsNumber-1 x = x + 1; y = 0; } // ix = 0; } /*********************************************************************/ for(int i = 0; i < 3; i++) { comparator[i] = 0; } //int counter = 0; column1 = 0; row1 = 0; x = boatFront->height/1.45; y = 0; while (x < X-1) { //get a random sample taken from the picture. Must be determined whether //it is water or ground for (int i = 0; i<6;i++) { column1 = y+i; if (column1 > Y-1) column1 = Y-1; cvmSet(sampleHue,0,i,I[x][column1].h); cvmSet(sampleSat,0,i,I[x][column1].s); cvmSet(sampleVal,0,i,I[x][column1].v); } for (int i=0;i<6;i++) { for (int j=0;j<6;j++) { if ((minH < cvmGet(sampleHue,0,j)) && (maxH > cvmGet(sampleHue,0,j))) //mark water samples as green comparator[0] = 1; else comparator[0] = 0; if ((minS < cvmGet(sampleSat,0,j)) && (maxS > cvmGet(sampleSat,0,j))) //mark water samples as green comparator[1] = 1; else comparator[1] = 0; if ((minV < cvmGet(sampleVal,0,j)) && (maxV > cvmGet(sampleVal,0,j))) //mark water samples as red comparator[2] = 1; else comparator[2] = 0; //count votes for (int i3=0; i3 < 3; i3++) votesSum = votesSum + comparator[i3]; if (votesSum > 1) { //use the known water samples as new training data if((i<boatFront->height/xDivisor) && (j<boatFront->width/yDivisor)) { cvmSet(resampleHue,i,j,cvmGet(sampleHue,0,j)); cvmSet(resampleSat,i,j,cvmGet(sampleSat,0,j)); cvmSet(resampleVal,i,j,cvmGet(sampleVal,0,j)); } //6 use to be equal to pixelsNumber. I[x][y-6+j].h = 0; I[x][y-6+j].s = 255; I[x][y-6+j].v = 255; } votesSum = 0; } } if (y < Y-1) //5 use to be equal to pixelsNumber-1. y = y + 5; if (y > Y-1) y = Y-1; else if (y == Y-1) { //5 use to be equal to pixelsNumber-1 x = x + 1; y = 0; } //ix = 0; } /***************Deal with reflection*****************/ for(int i = 0; i < 3; i++) { comparator[i] = 0; } //int counter = 0; votesSum = 0; column1 = 0; row1 = 0; x = boatFront->height/1.45; y = 0; while (x < X-1) { //get a random sample taken from the picture. Must be determined whether //it is water or ground for (int i = 0; i<6;i++) { column1 = y+i; if (column1 > Y-1) column1 = Y-1; cvmSet(sampleHue,0,i,I[x][column1].h); cvmSet(sampleSat,0,i,I[x][column1].s); cvmSet(sampleVal,0,i,I[x][column1].v); } for (int i=0;i<6;i++) { for (int j=0;j<6;j++) { if ((minH < cvmGet(sampleHue,0,j)) && (maxH > cvmGet(sampleHue,0,j))) //mark water samples as green comparator[0] = 1; else comparator[0] = 0; if ((0.8*255 > cvmGet(sampleSat,0,j)))// && (maxS < cvmGet(sampleSat,0,j))) //mark water samples as green comparator[1] = 1; else comparator[1] = 0; if ((0.6*255 < cvmGet(sampleVal,0,j)))// || (maxV < cvmGet(sampleVal,0,j))) //mark water samples as green comparator[2] = 1; else comparator[2] = 0; //count votes for (int i3=0; i3 < 3; i3++) votesSum = votesSum + comparator[i3]; if (votesSum > 1) { //use the known water samples as new training data if((i<boatFront->height/xDivisor) && (j<boatFront->width/yDivisor)) { cvmSet(resampleHue,i,j,cvmGet(sampleHue,0,j)); cvmSet(resampleSat,i,j,cvmGet(sampleSat,0,j)); cvmSet(resampleVal,i,j,cvmGet(sampleVal,0,j)); } //6 use to be equal to pixelsNumber. I[x][y-6+j].h = 0; I[x][y-6+j].s = 255; I[x][y-6+j].v = 255; } votesSum = 0; } } if (y < Y-1) //5 use to be equal to pixelsNumber-1. y = y + 5; if (y > Y-1) y = Y-1; else if (y == Y-1) { //5 use to be equal to pixelsNumber-1 x = x + 1; y = 0; } //ix = 0; } /**********Resample the entire patch**********/ /*********find a new min and max for a new sample range*************/ for(int i = 0; i < 3; i++) { comparator[i] = 0; } //int counter = 0; votesSum = 0; column1 = 0; row1 = 0; x = boatFront->height/1.45; y = 0; maxH = cvmGet(resampleHue,0,0); maxS = cvmGet(resampleSat,0,0); maxV = cvmGet(resampleVal,0,0); minH = cvmGet(resampleHue,0,0); minS = cvmGet(resampleSat,0,0); minV = cvmGet(resampleVal,0,0); for (int i=0; i < boatFront->height/xDivisor; i++) { for (int j=0; j < boatFront->width/yDivisor; j++) { if (cvmGet(resampleHue,i,j) > maxH) maxH = cvmGet(resampleHue,i,j); if (cvmGet(resampleSat,i,j) > maxS) maxS = cvmGet(resampleSat,i,j); if (cvmGet(resampleVal,i,j) > maxV) maxV = cvmGet(resampleVal,i,j); if (cvmGet(resampleHue,i,j) < minH) minH = cvmGet(resampleHue,i,j); if (cvmGet(resampleSat,i,j) < minS) minS = cvmGet(resampleSat,i,j); if (cvmGet(resampleVal,i,j) < minV) minV = cvmGet(resampleVal,i,j); } } while (x < X-1) { for (int i=0;i<6;i++) { for (int j=0;j<6;j++) { if ((minH < I[x][y-6+j].h) && (maxH > I[x][y-6+j].h)) //mark water samples as red I[x][y-6+j].h = 0; else comparator[0] = 0; if ((minS < I[x][y-6+j].s) && (maxS > I[x][y-6+j].s)) //mark water samples as red I[x][y-6+j].s = 255; else comparator[1] = 0; if ((minV < I[x][y-6+j].v) && (maxV > I[x][y-6+j].v)) //mark water samples as red I[x][y-6+j].v = 255; } } if (y < Y-1) //5 use to be equal to pixelsNumber-1. y = y + 5; if (y > Y-1) y = Y-1; else if (y == Y-1) { //5 use to be equal to pixelsNumber-1 x = x + 1; y = 0; } } //cout << "Sample data from current images" << endl; //for (int i = 0; i<20;i++) //{ // cout << "HUE: " << cvmGet(sampleHue,0,i) << endl; // cout << "Saturation: " << cvmGet(sampleSat,0,i) << endl; // cout << "Value: " << cvmGet(sampleVal,0,i) << endl; //} //traverse through the image one more time, divide the image in grids of // 500x500 pixels, and see how many pixels of water are in each grid. If // most of the pixels are labeled water, then mark all the other pixels // as water as well //int counter = 0; votesSum = 0; column1 = 0; row1 = 0; x = boatFront->height/1.45; y = 0; /***************Divide the picture in cells for filtering**********/ while (x < X-1) { //get a random sample taken from the picture. Must be determined whether //it is water or ground for (int i = 0; i < boatFront->height/xDivisor; i++) { for(int j = 0; j < boatFront->width/yDivisor; j++) { cvmSet(resampleHue2,i,j,I[x+i][y+j].h); cvmSet(resampleSat2,i,j,I[x+i][y+j].s); cvmSet(resampleVal2,i,j,I[x+i][y+j].v); if(cvmGet(resampleHue2,i,j)==0 && cvmGet(resampleSat2,i,j)==255 && cvmGet(resampleVal2,i,j)==255) { votesSum++; } } } if (votesSum > (((boatFront->height/xDivisor)*(boatFront->width/yDivisor))*(4/5))) { // if bigger than 4/5 the total number of pixels in a square, then consider the entire thing as water // We might need to use other smaller quantities (like 5/6 maybe?) for (int i = 0; i < boatFront->height/xDivisor;i++) { for (int j = 0; j < boatFront->width/yDivisor; j++) { row1 = x + i; if (row1 > X-1) row1 = X-1; column1 = y+j; if (column1 > Y-1) column1 = Y-1; I[row1][column1].h = 0; I[row1][column1].s = 255; I[row1][column1].v = 255; } } } else { // If not water, eliminate all red pixels and turn those pixels // back to the original color for (int i = 0; i < boatFront->height/xDivisor;i++) { for (int j = 0; j < boatFront->width/yDivisor; j++) { row1 = x + i; if (row1 > X-1) row1 = X-1; column1 = y+j; if (column1 > Y-1) column1 = Y-1; I[row1][column1].h = IBackUp[row1][column1].h;//255;//IBackUp[row1][column1].h; I[row1][column1].s = IBackUp[row1][column1].s;//255;//IBackUp[row1][column1].s; I[row1][column1].v = IBackUp[row1][column1].v;//255;//IBackUp[row1][column1].v; } } } y = y + boatFront->width/xDivisor; if (y > Y-1) { x = x + boatFront->height/yDivisor; y = 0; } votesSum = 0; } /********************Isolate obstacles************************/ votesSum = 0; int paint = 0; column1 = 0; row1 = 0; x = boatFront->height/1.45; y = 0; xDiv = 20; yDiv = 20; /***************Divide the picture in cells for filtering**********/ // Small pixel areas (noise) are going to be eliminated from the picture // living only the big obstacles while (x < X-2) { //get a random sample taken from the picture. Must be determined whether //it is water or ground for (int i = 0; i < boatFront->height/xDiv; i++) { for(int j = 0; j < boatFront->width/yDiv; j++) { row1 = x + i; if (row1 > X-2) row1 = X-2; column1 = y+j; if (column1 > Y-1) column1 = Y-1; cvmSet(resampleHue2,i,j,I[row1][column1].h); cvmSet(resampleSat2,i,j,I[row1][column1].s); cvmSet(resampleVal2,i,j,I[row1][column1].v); if(cvmGet(resampleHue2,i,j)==0 && cvmGet(resampleSat2,i,j)==255 && cvmGet(resampleVal2,i,j)==255) { votesSum++; } } } if (votesSum > (((boatFront->height/xDiv)*(boatFront->width/yDiv))*(4.5/5))) { // if bigger than 4/5 the total number of pixels in a square, then consider the entire thing as water // We might need to use other smaller quantities (like 5/6 maybe?) for (int i = 0; i < boatFront->height/xDiv;i++) { for (int j = 0; j < boatFront->width/yDiv; j++) { row1 = x + i; if (row1 > X-2) row1 = X-2; column1 = y+j; if (column1 > Y-1) column1 = Y-1; I[row1][column1].h = 0; I[row1][column1].s = 255; I[row1][column1].v = 255; } } } else { int count = 0; // If not water, eliminate all red pixels and turn those pixels // back to the original color for (int i = 0; i < boatFront->height/xDiv;i++) { for (int j = 0; j < boatFront->width/yDiv; j++) { row1 = x + i; if (row1 > X-2) row1 = X-2; column1 = y+j; if (column1 > Y-1) column1 = Y-1; I[row1][column1].h = IBackUp[row1][column1].h;//255; I[row1][column1].s = IBackUp[row1][column1].s;//255; I[row1][column1].v = IBackUp[row1][column1].v;//255; // count++; } } } y = y + boatFront->width/yDiv; if (y > Y-1) { x = x + boatFront->height/xDiv; if (x > X-2) x = X-2; y = 0; } votesSum = 0; } /****************Find Obstacles boundaries*********************************/ if( grayStorage == NULL ) { grayStorage = cvCreateMemStorage(0); } else { cvClearMemStorage(grayStorage); } backUpImage = cvCloneImage(boatFront); //convert from HSV to RGB cvCvtColor(boatFront, boatFront, CV_HSV2BGR); cvCvtColor(backUpImage, backUpImage, CV_HSV2BGR); //do flood fill for obstacles cvFloodFill( backUpImage, seed_point, color, cvScalarAll(255), cvScalarAll(2), NULL, 8, NULL); //convert to to gray to do more obstacle segmentation cvCvtColor(backUpImage, grayImage, CV_BGR2GRAY); //convert to binary cvThreshold(grayImage, bwImage, 100, 255, CV_THRESH_BINARY | CV_THRESH_OTSU); //eliminate small unnecessary pixel areas //bwImage is a pointer, so no need to reuse findCountours int findCountours = bwareaopen_(bwImage, 100); //find contours of obstacles in image cvFindContours(bwImage, grayStorage, &contours); cvZero( bwImage ); //redraw clean contours for( CvSeq* c=contours; c!=NULL; c=c->h_next) { cvDrawContours(bwImage, c, cvScalarAll(255), cvScalarAll(255), 8); cout << "Contour area: " << cvContourArea(c, CV_WHOLE_SEQ) << endl; //area in pixels //find the x,y coordinate of the center of a contour cvMoments(c, &moment, 0); //centroid/moment of the contour/obstacle //cout << "Contour center: " << moment.m10/moment.m00 << ", " << moment.m01/moment.m00 << endl; //The distance formula calculated by plotting points is given by: /*********** distance = 0.1622208546*pow(1.0186851612,pixels) *****************/ /*********** pixel = 87.0413255*pow(distance,0.4062956891) *****************/ //These formulas only work for 640X480 images // x,y coordinates of the obstacle from the bottom center of the image //Ignore everything less than 0.3 meters apart (anything too close to the boat) //if ((X - (row1 -(boatFront->height/xDiv)/2)) > (87.0413255*pow(0.3,0.4062956891))) //{ xObstacleDistance = 0.1622208546*pow(1.0186851612,X - (moment.m10/moment.m00)); if (xObstacleDistance == 0.0) //try to ignore obstacle that are too close xObstacleDistance = 0.01; //robot shall tell operator if there is //a problem with a close by obstacle yObstacleDistance = 0.1622208546*pow(1.0186851612,Y/2 - (moment.m01/moment.m00)); //obstacle distance obstacleDistance = sqrt(pow(xObstacleDistance,2) + pow(yObstacleDistance,2)); //obstacle heading obstacleHeading = tan((yObstacleDistance/xObstacleDistance)*PI/180); cout << "Obstacle polar coordinates: " << endl; cout << "x: " << xObstacleDistance << " Y: " << yObstacleDistance << endl; cout << "Distance (meters) " << obstacleDistance << endl; cout << "Direction (degrees): " << obstacleHeading << endl << endl; //} } /**************************************************************************/ try { //fprintf(stderr,"\n boatFront\n"); cvShowImage("Boat Front", boatFront); //cvShowImage("Color Segment", backUpImage); //cvShowImage("Obstacles", bwImage); } catch (sensor_msgs::CvBridgeException& e) { ROS_ERROR("Could not convert from '%s' to 'bgr8'.", msg->encoding.c_str()); } }
bool potential_cloud_shadow_snow_mask ( Input_t *input, float cloud_prob_threshold, float *ptm, float *t_templ, float *t_temph, unsigned char **cloud_mask, unsigned char **shadow_mask, unsigned char **snow_mask, unsigned char **water_mask, unsigned char **final_mask ) { char errstr[MAX_STR_LEN]; int nrows = input->size.l; int ncols = input->size.s; int ib = 0; int row =0; int col = 0; int mask_counter = 0; int clear_pixel_counter = 0; int clear_land_pixel_counter = 0; float ndvi, ndsi; int16 *f_temp = NULL; int16 *f_wtemp = NULL; float visi_mean; float whiteness = 0; float hot; float lndptm; float l_pt; float h_pt; int mask; float t_wtemp; float **wfinal_prob; float **final_prob; float wtemp_prob; int t_bright; float brightness_prob; int t_buffer; float temp_l; float temp_prob; float vari_prob; float max_value; float *prob = NULL; float clr_mask; float wclr_mask; int16 *nir = NULL; int16 *swir = NULL; float backg_b4; float backg_b5; float shadow_prob; int status; /* Dynamic memory allocation */ unsigned char **clear_mask; unsigned char **clear_land_mask; clear_mask = (unsigned char **)ias_misc_allocate_2d_array(input->size.l, input->size.s, sizeof(unsigned char)); clear_land_mask = (unsigned char **)ias_misc_allocate_2d_array( input->size.l, input->size.s, sizeof(unsigned char)); if (clear_mask == NULL || clear_land_mask ==NULL) { sprintf (errstr, "Allocating mask memory"); ERROR (errstr, "pcloud"); } printf("The first pass\n"); /* Loop through each line in the image */ for (row = 0; row < nrows; row++) { /* Print status on every 100 lines */ if (!(row%1000)) { printf ("Processing line %d\r",row); fflush (stdout); } /* For each of the image bands */ for (ib = 0; ib < input->nband; ib++) { /* Read each input reflective band -- data is read into input->buf[ib] */ if (!GetInputLine(input, ib, row)) { sprintf (errstr, "Reading input image data for line %d, " "band %d", row, ib); ERROR (errstr, "pcloud"); } } /* For the thermal band */ /* Read the input thermal band -- data is read into input->therm_buf */ if (!GetInputThermLine(input, row)) { sprintf (errstr, "Reading input thermal data for line %d", row); ERROR (errstr, "pcloud"); } for (col = 0; col < ncols; col++) { if ((input->buf[2][col] + input->buf[3][col]) != 0) ndvi = (input->buf[3][col] - input->buf[2][col]) / (input->buf[3][col] + input->buf[2][col]); else ndvi = 0.01; if ((input->buf[1][col] + input->buf[4][col]) != 0) ndsi = (input->buf[1][col] - input->buf[4][col]) / (input->buf[1][col] + input->buf[4][col]); else ndsi = 0.01; /* process non-fill pixels only */ if (input->therm_buf[col] > -9999) { mask = 1; mask_counter++; } else mask = 0; /* Basic cloud test */ if ((ndsi < 0.8) && (ndvi < 0.8) && (input->buf[5][col] > 300) && (input->therm_buf[col] < 2700)) cloud_mask[row][col] = 1; else cloud_mask[row][col] = 0; /* It takes every snow pixels including snow pixel under thin clouds or icy clouds */ if ((ndsi > 0.15) && (input->therm_buf[col] < 380) && (input->buf[3][col] > 1100) && (input->buf[1][col] > 1000)) snow_mask[row][col] = 1; else snow_mask[row][col] = 0; /* Zhe's water test (works over thin cloud) */ if (((ndvi < 0.01) && (input->buf[3][col] < 1100)) || ((ndvi < 0.1) && (ndvi > 0.0) && (input->buf[3][col] < 500))) water_mask[row][col] = 1; else water_mask[row][col] = 0; if (mask == 0) water_mask[row][col] = 255; /* visible bands flatness (sum(abs)/mean < 0.6 => brigt and dark cloud) */ visi_mean = (input->buf[0][col] + input->buf[1][col] + input->buf[2][col]) / 3.0; whiteness = ((abs(input->buf[0][col] - visi_mean) + abs(input->buf[1][col] - visi_mean) + abs(input->buf[2][col] - visi_mean)))/ visi_mean; /* Update cloud_mask, if one visible band is saturated, whiteness = 0 */ if (input->buf[0][col] > input->meta.therm_satu_value_ref || input->buf[1][col] > input->meta.therm_satu_value_ref || input->buf[2][col] > input->meta.therm_satu_value_ref) whiteness = 0; if (cloud_mask[row][col] == 1 && whiteness < 0.7) cloud_mask[row][col] = 1; else cloud_mask[row][col] = 0; /* Haze test */ hot = input->buf[0][col] - 0.5 *input->buf[2][col] - 800; if (cloud_mask[row][col] == 1 && (hot > 0.0 || abs(whiteness) < MINSIGMA)) cloud_mask[row][col] = 1; else cloud_mask[row][col] = 0; /* Ratio 4/5 > 0.75 test */ if (cloud_mask[row][col] == 1 && (input->buf[3][col] / input->buf[4][col] > 0.75)) cloud_mask[row][col] = 1; else cloud_mask[row][col] = 0; /* Test whether use thermal band or not */ if (cloud_mask[row][col] == 0 && mask == 1) { clear_mask[row][col] = 1; clear_pixel_counter++; } else clear_mask[row][col] = 0; if (water_mask[row][col] != 1 && clear_mask[row][col] == 1) { clear_land_mask[row][col] = 1; clear_land_pixel_counter++; } else clear_land_mask[row][col] = 0; } } *ptm = 100. * ((float)clear_pixel_counter / (float)mask_counter); lndptm = 100. * ((float)clear_land_pixel_counter / (float)mask_counter); if (*ptm <= 0.1) majority_filter(cloud_mask, nrows, ncols); else { f_temp = malloc(input->size.l * input->size.s * sizeof(int16)); f_wtemp = malloc(input->size.l * input->size.s * sizeof(int16)); if (f_temp == NULL || f_wtemp == NULL) { sprintf (errstr, "Allocating temp memory"); ERROR (errstr, "pcloud"); } } printf("The second pass\n"); int16 f_temp_max = 0; int16 f_temp_min = 0; int16 f_wtemp_max = 0; int16 f_wtemp_min = 0; int index = 0; int index2 = 0; /* Loop through each line in the image */ for (row = 0; row < nrows; row++) { /* Print status on every 100 lines */ if (!(row%1000)) { printf ("Processing line %d\r", row); fflush (stdout); } /* For each of the image bands */ for (ib = 0; ib < input->nband; ib++) { /* Read each input reflective band -- data is read into input->buf[ib] */ if (!GetInputLine(input, ib, row)) { sprintf (errstr, "Reading input image data for line %d, " "band %d", row, ib); ERROR (errstr, "pcloud"); } } /* For the thermal band */ /* Read the input thermal band -- data is read into input->therm_buf */ if (!GetInputThermLine(input, row)) { sprintf (errstr, "Reading input thermal data for line %d", row); ERROR (errstr, "pcloud"); } for (col =0; col < ncols; col++) { if (*ptm <= 0.1) /* No thermal test, meaningless for snow detection */ { /* All cloud */ if (cloud_mask[row][col] != 1) shadow_mask[row][col] = 1; else shadow_mask[row][col] = 0; #if 0 /* Tempoarary outpouts */ if (water_mask[row][col] == 1) final_mask[row][col] = 1; if (shadow_mask[row][col] == 1) final_mask[row][col] = 2; if (cloud_mask[row][col] == 1) final_mask[row][col] = 4; if (input->therm_buf[col] = -9999) final_mask[row][col] = 255; #endif } else { if (lndptm >= 0.1) { /* get clear land temperature */ if (clear_land_mask[row][col] == 1 && input->therm_buf[col] != -9999) { f_temp[index] = input->therm_buf[col]; if (f_temp_max < f_temp[index]) f_temp_max = f_temp[index]; if (f_temp_min > f_temp[index]) f_temp_min = f_temp[index]; index++; } } else { /*get clear water temperature */ if (clear_mask[row][col] == 1 && input->therm_buf[col] != -9999) { f_temp[index] = input->therm_buf[col]; if (f_temp_max < f_temp[index]) f_temp_max = f_temp[index]; if (f_temp_min > f_temp[index]) f_temp_min = f_temp[index]; index++; } } if (water_mask[row][col] == 1 && input->therm_buf[col] <= 300 && input->therm_buf[col] != -9999) { f_wtemp[index2] = input->therm_buf[col]; if (f_wtemp[index2] > f_wtemp_max) f_wtemp_max = f_wtemp[index2]; if (f_wtemp[index2] < f_wtemp_max) f_wtemp_min = f_wtemp[index2]; index2++; } } } } printf("Clear sky pixel percentage in this scene = %.2f\n", *ptm); if (*ptm <= 0.1) { *t_templ = -1.0; *t_temph = -1.0; return 0; } else { /* Tempearture for snow test */ l_pt = 0.175; h_pt = 1 - l_pt; printf("====%d,%d,%d,%d\n",f_wtemp_max,f_wtemp_min,f_temp_max,f_temp_min); #if 0 prctile(f_wtemp, index2 + 1, 100*h_pt, &t_wtemp); /* 0.175 percentile background temperature (low) */ prctile(f_temp, index + 1, 100*l_pt, t_templ); /* 0.825 percentile background temperature (high) */ prctile (f_temp, index + 1, 100*h_pt, t_temph); #endif t_wtemp = h_pt * (float)(f_wtemp_max-f_wtemp_min) + (float)f_wtemp_min; *t_templ = l_pt * (float)(f_temp_max-f_temp_min) + (float)f_temp_min; *t_temph = h_pt * (float)(f_temp_max-f_temp_min) + (float)f_temp_min; int f_temp_length; int f_wtemp_length; f_temp_length = f_temp_max - f_temp_min + 1; f_wtemp_length = f_wtemp_max - f_wtemp_min + 1; #if 0 prctile(f_wtemp, index2 + 1, f_wtemp_length, 100*h_pt, &t_wtemp); /* 0.175 percentile background temperature (low) */ prctile(f_temp, index + 1, f_temp_length, 100*l_pt, t_templ); /* 0.825 percentile background temperature (high) */ prctile (f_temp, index + 1, f_temp_length, 100*h_pt, t_temph); #endif printf("index, index2 = %d,%d\n",index,index2); /* Temperature test */ t_buffer = 4*100; *t_templ -= t_buffer; *t_temph += t_buffer; temp_l=*t_temph-*t_templ; printf("t_wtemp,t_templ,t_temph = %f,%f,%f\n",t_wtemp,*t_templ,*t_temph); /* Relase f_temp memory */ free(f_wtemp); free(f_temp); wfinal_prob = (float **)ias_misc_allocate_2d_array(input->size.l, input->size.s, sizeof(float)); final_prob = (float **)ias_misc_allocate_2d_array(input->size.l, input->size.s, sizeof(float)); if (wfinal_prob == NULL || final_prob == NULL) { sprintf (errstr, "Allocating prob memory"); ERROR (errstr, "pcloud"); } printf("The third pass\n"); /* Loop through each line in the image */ for (row = 0; row < nrows; row++) { /* Print status on every 100 lines */ if (!(row%1000)) { printf ("Processing line %d\r",row); fflush (stdout); } /* For each of the image bands */ for (ib = 0; ib < input->nband; ib++) { /* Read each input reflective band -- data is read into input->buf[ib] */ if (!GetInputLine(input, ib, row)) { sprintf (errstr, "Reading input image data for line %d, " "band %d", row, ib); ERROR (errstr, "pcloud"); } } /* For the thermal band */ /* Read the input thermal band -- data is read into input->therm_buf */ if (!GetInputThermLine(input, row)) { sprintf (errstr, "Reading input thermal data for line %d", row); ERROR (errstr, "pcloud"); } for (col = 0; col <ncols; col++) { /* Get cloud prob over water */ /* Temperature test over water */ wtemp_prob = (t_wtemp - input->therm_buf[col]) / 400.0; /* Brightness test (over water) */ t_bright = 1100; brightness_prob = input->buf[4][col] / t_bright; if (brightness_prob > 1) brightness_prob = 1; /*Final prob mask (water), cloud over water probability */ wfinal_prob[row][col] =100 * wtemp_prob * brightness_prob; temp_prob=(*t_temph-input->therm_buf[col]) / temp_l; /* Temperature can have prob > 1 */ if (temp_prob < 0) temp_prob = 0; if ((input->buf[2][col] + input->buf[3][col]) != 0) ndvi = (input->buf[3][col] - input->buf[2][col]) / (input->buf[3][col] + input->buf[2][col]); else ndvi = 0.01; if ((input->buf[1][col] + input->buf[4][col]) != 0) ndsi = (input->buf[1][col] - input->buf[4][col]) / (input->buf[1][col] + input->buf[4][col]); else ndsi = 0.01; /* NDVI and NDSI should not be negative */ if (input->buf[2][col] >= input->meta.therm_satu_value_ref && ndsi < 0) ndsi = 0; if (input->buf[3][col] >= input->meta.therm_satu_value_ref && ndvi < 0) ndvi = 0; /* Vari_prob=1-max(max(abs(NDSI),abs(NDVI)),whiteness); */ if (abs(ndsi) > abs(ndvi)) max_value = abs(ndsi); else max_value = abs(ndvi); if (whiteness > max_value) max_value = whiteness; vari_prob = 1 - max_value; /*Final prob mask (land) */ final_prob[row][col] = 100 * (temp_prob * vari_prob); } } prob = malloc(input->size.l * input->size.s * sizeof(float)); if(prob == NULL) { sprintf (errstr, "Allocating prob memory"); ERROR (errstr, "pcloud"); } float prob_max = 0.0; float prob_min = 0.0; int index3 = 0; for (row = 0; row < nrows; row++) { for (col = 0; col <ncols; col++) { if (clear_land_mask[row][col] == 1); { prob[index3] = final_prob[row][col]; if ((prob[index3] - prob_max) > MINSIGMA) prob_max = prob[index3]; if ((prob_min - prob[index3]) > MINSIGMA) prob_min = prob[index3]; index3++; } } } /*Dynamic threshold for land */ // prctile2(prob, index3+1, 100*h_pt, &clr_mask); printf("index3,prob_max,prob_min =%d, %f, %f\n",index3,prob_max,prob_min); clr_mask = h_pt * (prob_max - prob_min) + prob_min; printf("clr_mask =%d, %f\n",index3,clr_mask); #if 0 prctile2(prob, index3+1, 100*h_pt, &clr_mask); printf("clr_mask = %f\n",clr_mask); #endif clr_mask += cloud_prob_threshold; printf("clr_mask = %f\n",clr_mask); /* Relase memory for prob */ free(prob); /* Fixed threshold for water */ wclr_mask = 50.0; printf("pcloud probability threshold (land) = %.2f\n", clr_mask); printf("The fourth pass\n"); /* Loop through each line in the image */ for (row = 0; row < nrows; row++) { /* Print status on every 100 lines */ if (!(row%1000)) { printf ("Processing line %d\r",row); fflush (stdout); } /* For each of the image bands */ for (ib = 0; ib < input->nband; ib++) { /* Read each input reflective band -- data is read into input->buf[ib] */ if (!GetInputLine(input, ib, row)) { sprintf (errstr, "Reading input image data for line %d, " "band %d", row, ib); ERROR (errstr, "pcloud"); } } /* For the thermal band */ /* Read the input thermal band -- data is read into input->therm_buf */ if (!GetInputThermLine(input, row)) { sprintf (errstr, "Reading input thermal data for line %d", row); ERROR (errstr, "pcloud"); } for (col =0; col < ncols; col++) { if ((cloud_mask[row][col] == 1 && final_prob[row][col] > clr_mask && water_mask[row][col] == 0) || (cloud_mask[row][col] == 1 && wfinal_prob[row][col] > wclr_mask && water_mask[row][col] == 1) || (final_prob[row][col] > 99 && water_mask[row][col] == 0) || (input->therm_buf[col] < *t_templ - 3500)) cloud_mask[row][col] = 1; } } /* Free the memory */ status = ias_misc_free_2d_array((void **)wfinal_prob); status = ias_misc_free_2d_array((void **)final_prob); /* Band 4 flood fill */ nir = malloc(input->size.l * input->size.s * sizeof(int16)); swir = malloc(input->size.l * input->size.s * sizeof(int16)); if (nir == NULL || swir == NULL) { sprintf(errstr, "Allocating nir and swir memory"); ERROR (errstr, "pcloud"); } int16 nir_max = 0; int16 swir_max = 0; index = 0; index2 = 0; for (row = 0; row < nrows; row++) { for (col = 0; col < ncols; col++) { if (clear_land_mask[row][col] == 1) { nir[index] = input->buf[3][col]; if (nir[index] > nir_max) nir_max = nir[index]; index++; } if (clear_land_mask[row][col] == 1) { nir[index2] = input->buf[4][col]; if (swir[index2] > swir_max) swir_max = swir[index2]; index2++; } } } status = ias_misc_free_2d_array((void **)clear_mask); status = ias_misc_free_2d_array((void **)clear_land_mask); /* Improve cloud mask by majority filtering */ majority_filter(cloud_mask, nrows, ncols); /* Estimating background (land) Band 4 Ref */ backg_b4 = l_pt * nir_max; backg_b5 = h_pt * swir_max; /* Release the memory */ free(nir); free(swir); /* May need allocate two memory for new band 4 and 5 after imfill (flood filling), also may need read in whole scene of bands 4 and 5 for flood filling purpose */ int16 **new_nir; int16 **new_swir; new_nir = (int16 **)ias_misc_allocate_2d_array(input->size.l, input->size.s, sizeof(int16)); new_swir = (int16 **)ias_misc_allocate_2d_array(input->size.l, input->size.s, sizeof(int16)); if (wfinal_prob == NULL || final_prob == NULL) { sprintf (errstr, "Allocating prob memory"); ERROR (errstr, "pcloud"); } printf("The fifth pass\n"); /* Loop through each line in the image */ for (row = 0; row < nrows; row++) { /* Print status on every 100 lines */ if (!(row%1000)) { printf ("Processing line %d\r",row); fflush (stdout); } /* For each of the image bands */ for (ib = 0; ib < input->nband; ib++) { /* Read each input reflective band -- data is read into input->buf[ib] */ if (!GetInputLine(input, ib, row)) { sprintf (errstr, "Reading input image data for line %d, " "band %d", row, ib); ERROR (errstr, "pcloud"); } } /* For the thermal band */ /* Read the input thermal band -- data is read into input->therm_buf */ if (!GetInputThermLine(input, row)) { sprintf (errstr, "Reading input thermal data for line %d", row); ERROR (errstr, "pcloud"); } for (col = 0; col < ncols; col++) { if (input->therm_buf[col] == -9999) { new_nir[row][col] = backg_b4; new_swir[row][col] = backg_b5; } else { new_nir[row][col] = input->buf[3][col]; new_swir[row][col] = input->buf[4][col]; } } } /* TODO: Fill in regional minimum band 4 ref*/ IplImage* img = cvCreateImage(cvSize(ncols, nrows), IPL_DEPTH_8U, 1); if (!img) { sprintf (errstr, "Reading input image data for line %d, " "band %d", row, ib); ERROR (errstr, "pcloud"); } // cvSet2D(img, ncols, nrows, new_nir); for (row = 0; row < nrows; row++) { for (col = 0; col < ncols; col++) { img->imageData[row*ncols+col] = new_nir[row][col]; // cvSet2D(img, col, row, new_nir[row][col]); } } CvPoint seed_point = cvPoint(3,3); CvScalar color = CV_RGB(1,0,0); cvFloodFill(img, seed_point, color, cvScalarAll(5.0), cvScalarAll(5.0), NULL, 4, NULL ); // cvGet2D(img, ncols, nrows, new_nir); for (row = 0; row < nrows; row++) { for (col = 0; col < ncols; col++) { new_nir[row][col] = img->imageData[row*ncols+col]; // new_nir[row][col] = cvGet2D(img, col, row); } } /* Release image memory */ cvReleaseImage(&img); /* TODO: Fill in regional minimum band 5 ref*/ IplImage* img2 = cvCreateImage(cvSize(ncols, nrows), IPL_DEPTH_8U, 1); if (!img2) { sprintf (errstr, "Reading input image data for line %d, " "band %d", row, ib); ERROR (errstr, "pcloud"); } // cvSet2D(img, ncols, nrows, new_swir); for (row = 0; row < nrows; row++) { for (col = 0; col < ncols; col++) { img2->imageData[row*ncols+col] = new_swir[row][col]; // cvSet2D(img2, col, row, new_swir[row][col]); } } cvFloodFill(img2, seed_point, color, cvScalarAll(5.0), cvScalarAll(5.0), NULL, 4, NULL ); // cvGet2D(img, ncols, nrows, new_swir); for (row = 0; row < nrows; row++) { for (col = 0; col < ncols; col++) { new_swir[row][col] = img2->imageData[row*ncols+col]; // new_swir[row][col] = cvGet2D(img2, col, row); } } /* Release image memory */ cvReleaseImage(&img2); for (row = 0; row < nrows; row++) { for (col = 0; col < ncols; col++) { if (new_nir[row][col] < new_swir[row][col]) shadow_prob = new_nir[row][col]; else shadow_prob = new_swir[row][col]; if (shadow_prob > 200) shadow_mask[row][col] = 1; else shadow_mask[row][col] = 0; } } status = ias_misc_free_2d_array((void **)new_nir); status = ias_misc_free_2d_array((void **)new_swir); } printf("The sixth pass\n"); /* Loop through each line in the image */ for (row = 0; row < nrows; row++) { /* Print status on every 100 lines */ if (!(row%1000)) { printf ("Processing line %d\r",row); fflush (stdout); } /* For each of the image bands */ for (ib = 0; ib < input->nband; ib++) { /* Read each input reflective band -- data is read into input->buf[ib] */ if (!GetInputLine(input, ib, row)) { sprintf (errstr, "Reading input image data for line %d, " "band %d", row, ib); ERROR (errstr, "pcloud"); } } /* For the thermal band */ /* Read the input thermal band -- data is read into input->therm_buf */ if (!GetInputThermLine(input, row)) { sprintf (errstr, "Reading input thermal data for line %d", row); ERROR (errstr, "pcloud"); } for (col = 0; col < ncols; col++) { /* refine Water mask - Zhe's water mask (no confusion water/cloud) */ if (water_mask[row][col] == 1 && cloud_mask[row][col] == 0) water_mask[row][col] = 1; else water_mask[row][col] = 0; if (input->therm_buf[col]==-9999) { cloud_mask[row][col] = 255; shadow_mask[row][col] = 255; // final_mask[row][col] = 255; } #if 0 /* Temporary outputs */ if (water_mask[row][col] == 1) final_mask[row][col] = 1; if (snow_mask[row][col] == 1) final_mask[row][col] = 3; if (shadow_mask[row][col] == 1) final_mask[row][col] = 2; if (cloud_mask[row][col] == 1) final_mask[row][col] = 4; if ((water_mask[row][col] != 1) && (snow_mask[row][col] != 1) && (shadow_cal[row][col] != 1) && (cloud_cal[row][col] != 1) && boundary_test[row][col] != 255) final_mask[row][col] = 0; #endif } } printf("t_wtemp,t_templ,t_temph = %f,%f,%f\n",t_wtemp,*t_templ,*t_temph); return 0; }
int main( int argc, char** argv ) { if (argc>=2){ //Load image IplImage*src = cvLoadImage( argv[1]); //Create Windows and Position cvNamedWindow("Input",0); cvResizeWindow("Input",500,350); cvMoveWindow("Input", 0, 0); cvNamedWindow("Output",0); cvResizeWindow("Output",500,350); cvMoveWindow("Output", 0, 600); cvNamedWindow( "Hough", 0 ); cvResizeWindow("Hough",500,350); cvMoveWindow("Hough",700,0); cvNamedWindow( "FloodFill", 0 ); cvResizeWindow("FloodFill",500,350); cvMoveWindow("FloodFill",700,600); //Display Original Image cvShowImage( "Input", src ); IplImage*srcCopy = cvCloneImage(src); //Flood Fill CvPoint seedPoint= cvPoint((srcCopy->width)/2,(srcCopy->height)/2); CvScalar pixColor=avgpixelcolor(srcCopy,seedPoint,5); //Takes avg pixel color value (5x5 grid) cvLine( srcCopy, cvPoint(seedPoint.x,srcCopy->height*.9), cvPoint(seedPoint.x,srcCopy->height*.1), pixColor, 3, 8 ); cvFloodFill(srcCopy,seedPoint,cvScalarAll(255),cvScalarAll(50),cvScalarAll(50),NULL,8|CV_FLOODFILL_FIXED_RANGE, NULL); cvShowImage("FloodFill",srcCopy); //Convert to Grayscale IplImage*dst = cvCreateImage( cvGetSize(src), IPL_DEPTH_8U, 1 ); cvCvtColor( srcCopy, dst , CV_BGR2GRAY ); //Display Flood Fill Results cvCircle(srcCopy,seedPoint,5,cvScalarAll(0),3,8,0); //Threshold IplImage*thresh = cvCreateImage(cvGetSize(dst), IPL_DEPTH_8U,1); cvThreshold(dst,thresh, 230, //Set Manually during Initialization 255, //Max Pixel Intensity Value (Do not Change) CV_THRESH_TOZERO ); //Canny Edge Detection cvCanny( thresh, dst, 0, //Low Threshold 255, //High Threshold 3 ); //Storage for Hough Line Endpoints CvMemStorage* storage = cvCreateMemStorage(0); //Hough CvSeq* lines = cvHoughLines2( dst,storage,CV_HOUGH_PROBABILISTIC, 1, //rho 1*CV_PI/180, //theta 150, //Accumulator threshold 500, //Min Line Length 200 //Min Colinear Separation ); //Draw Vertical Lines on src image for(int i = 0; i < lines->total; i++ ) { CvPoint* Point = (CvPoint*)cvGetSeqElem(lines,i); cvLine( src, Point[0], Point[1], CV_RGB(0,220,20), 3, 8 ); //Reject Horizontal lines float slope=(Point[0].y-Point[1].y)/(Point[0].x-Point[1].x); } //Create a Trapazodal Mask //Detect Horizontal Lines //Isolate 4 points //Display Image cvShowImage( "Output", src); //For Calibration Purposes "what the Hough transform sees" cvShowImage( "Hough", dst ); //Wait for User 10sec cvWaitKey(10000); //Deallocate Memory cvReleaseImage( &src ); cvReleaseImage( &dst ); cvReleaseImage( &thresh ); cvDestroyWindow( "Input" ); cvDestroyWindow( "Output" ); cvDestroyWindow("Hough"); } else{ printf("Hough Transform Code Requires \n"); return 0; } }
int main (int argc, const char * argv[]) { if ( argc != 4 ) { fprintf(stderr, "Expected 3 image filenames <no_cup> <with_cup> <outdoor_scene>.\n"); exit(1); } IplImage* src1 = cvLoadImage(argv[1], CV_LOAD_IMAGE_COLOR); if ( src1 == NULL ) { fprintf(stderr, "Couldn't load file 1 %s\n", argv[1]); exit(1); } IplImage* src2 = cvLoadImage(argv[2], CV_LOAD_IMAGE_COLOR); if ( src2 == NULL ) { fprintf(stderr, "Couldn't load file 2 %s\n", argv[2]); exit(1); } if ( src1->width != src2->width || src1->height != src2->height ) { fprintf(stderr, "2 images should have the same width and height but they don't.\n"); exit(1); } IplImage* grayscale1 = cvCreateImage(cvSize(src1->width, src1->height), src1->depth, 1); IplImage* grayscale2 = cvCreateImage(cvSize(src2->width, src2->height), src2->depth, 1); cvCvtColor(src1, grayscale1, CV_RGB2GRAY); cvCvtColor(src2, grayscale2, CV_RGB2GRAY); IplImage* diff = cvCreateImage(cvSize(src2->width, src2->height), src2->depth, 1); cvAbsDiff(grayscale1, grayscale2, diff); IplImage* result = cvCreateImage(cvSize(src2->width, src2->height), src2->depth, 1); cvThreshold(diff, result, 40, 255, CV_THRESH_BINARY); IplImage* mopResult = cvCreateImage(cvSize(diff->width, diff->height), diff->depth, 1); cvMorphologyEx(result, mopResult, NULL, NULL, CV_MOP_OPEN, 1); // start of exercise 5.6 specific code (the above is from 5.5). CvConnectedComp largestComponent; bzero(&largestComponent, sizeof(largestComponent)); CvPoint largestRegionPoint = cvPoint(-1, -1); int x, y; for(y = 0; y < mopResult->height; ++y) { unsigned char* row = (unsigned char*)(mopResult->imageData + mopResult->widthStep * y); for(x = 0; x < mopResult->width; ++x) { if ( row[x] == 255 ) { CvConnectedComp region; cvFloodFill(mopResult, cvPoint(x, y), cvScalarAll(100), cvScalarAll(0), cvScalarAll(0), ®ion, 4, NULL); if ( largestRegionPoint.x == -1 ) { // first region found largestRegionPoint = cvPoint(x, y); largestComponent = region; } else if ( largestComponent.area < region.area ) { // Clear the last area. cvFloodFill(mopResult, largestRegionPoint, cvScalarAll(0), cvScalarAll(0), cvScalarAll(0), NULL, 4, NULL); largestRegionPoint = cvPoint(x, y); largestComponent = region; } else { cvFloodFill(mopResult, cvPoint(x, y), cvScalarAll(0), cvScalarAll(0), cvScalarAll(0), NULL, 4, NULL); } } } } if ( largestRegionPoint.x != -1 ) { cvFloodFill(mopResult, largestRegionPoint, cvScalarAll(255), cvScalarAll(0), cvScalarAll(0), NULL, 4, NULL); } // Start of Exercise 5.7. Before this is the same as 5.6 IplImage* outdoorScene = cvLoadImage(argv[3], CV_LOAD_IMAGE_COLOR); if ( outdoorScene == NULL ) { fprintf(stderr, "Couldn't load file 3 %s\n", argv[3]); exit(1); } cvCopy(src2, outdoorScene, mopResult); cvNamedWindow("outdoorScene", CV_WINDOW_NORMAL); cvShowImage("outdoorScene", outdoorScene); cvWaitKey(0); return 0; }
CV_IMPL CvSeq* cvSegmentMotion( const CvArr* mhiimg, CvArr* segmask, CvMemStorage* storage, double timestamp, double seg_thresh ) { CvSeq* components = 0; cv::Ptr<CvMat> mask8u; CvMat mhistub, *mhi = cvGetMat(mhiimg, &mhistub); CvMat maskstub, *mask = cvGetMat(segmask, &maskstub); Cv32suf v, comp_idx; int stub_val, ts; int x, y; if( !storage ) CV_Error( CV_StsNullPtr, "NULL memory storage" ); mhi = cvGetMat( mhi, &mhistub ); mask = cvGetMat( mask, &maskstub ); if( CV_MAT_TYPE( mhi->type ) != CV_32FC1 || CV_MAT_TYPE( mask->type ) != CV_32FC1 ) CV_Error( CV_BadDepth, "Both MHI and the destination mask" ); if( !CV_ARE_SIZES_EQ( mhi, mask )) CV_Error( CV_StsUnmatchedSizes, "" ); mask8u = cvCreateMat( mhi->rows + 2, mhi->cols + 2, CV_8UC1 ); cvZero( mask8u ); cvZero( mask ); components = cvCreateSeq( CV_SEQ_KIND_GENERIC, sizeof(CvSeq), sizeof(CvConnectedComp), storage ); v.f = (float)timestamp; ts = v.i; v.f = FLT_MAX*0.1f; stub_val = v.i; comp_idx.f = 1; for( y = 0; y < mhi->rows; y++ ) { int* mhi_row = (int*)(mhi->data.ptr + y*mhi->step); for( x = 0; x < mhi->cols; x++ ) { if( mhi_row[x] == 0 ) mhi_row[x] = stub_val; } } for( y = 0; y < mhi->rows; y++ ) { int* mhi_row = (int*)(mhi->data.ptr + y*mhi->step); uchar* mask8u_row = mask8u->data.ptr + (y+1)*mask8u->step + 1; for( x = 0; x < mhi->cols; x++ ) { if( mhi_row[x] == ts && mask8u_row[x] == 0 ) { CvConnectedComp comp; int x1, y1; CvScalar _seg_thresh = cvRealScalar(seg_thresh); CvPoint seed = cvPoint(x,y); cvFloodFill( mhi, seed, cvRealScalar(0), _seg_thresh, _seg_thresh, &comp, CV_FLOODFILL_MASK_ONLY + 2*256 + 4, mask8u ); for( y1 = 0; y1 < comp.rect.height; y1++ ) { int* mask_row1 = (int*)(mask->data.ptr + (comp.rect.y + y1)*mask->step) + comp.rect.x; uchar* mask8u_row1 = mask8u->data.ptr + (comp.rect.y + y1+1)*mask8u->step + comp.rect.x+1; for( x1 = 0; x1 < comp.rect.width; x1++ ) { if( mask8u_row1[x1] > 1 ) { mask8u_row1[x1] = 1; mask_row1[x1] = comp_idx.i; } } } comp_idx.f++; cvSeqPush( components, &comp ); } } } for( y = 0; y < mhi->rows; y++ ) { int* mhi_row = (int*)(mhi->data.ptr + y*mhi->step); for( x = 0; x < mhi->cols; x++ ) { if( mhi_row[x] == stub_val ) mhi_row[x] = 0; } } return components; }
/*=================================================== Test body ========================== */ static int fmaFloodFill( void ) { /* Some Variables */ int nx, ny, stepX, stepY, numTest, ROI_offset, mp=0, mp4=0; int i, j, k, ij, it, n, n4, ov, d1, d2, ntest2, nerr, nerr1=0, nerr2=0, nerr3=0, nerr4=0; int step, step4; uchar *pI0, *pI1, *pI2; float* pI3, *pI4; IplImage *I0, *I1, *I2, *I3, *I4; CvSize size; CvPoint seed; CvConnectedComp Comp; CvStatus r; /* Reading test parameters */ trsiRead( &nx, "64", "Image width" ); trsiRead( &stepX, "16", "Seed point horizontal step" ); trsiRead( &numTest, "32", "Number of each seed point tests" ); trsiRead( &ROI_offset, "0", "ROI offset" ); ny = nx; stepY = stepX; n = nx*ny; ntest2 = numTest/2; size.width = nx; size.height = ny; I0 = cvCreateImage( size, IPL_DEPTH_8U, 1 ); I1 = cvCreateImage( size, IPL_DEPTH_8U, 1 ); I2 = cvCreateImage( size, IPL_DEPTH_8U, 1 ); I3 = cvCreateImage( size, IPL_DEPTH_32F,1 ); I4 = cvCreateImage( size, IPL_DEPTH_32F,1 ); pI0 = (uchar*)I0->imageData; pI1 = (uchar*)I1->imageData; pI2 = (uchar*)I2->imageData; pI3 = (float*)I3->imageData; pI4 = (float*)I4->imageData; step = I1->widthStep; step4 = I3->widthStep; n = step*ny; n4 = (step4/4)*ny; if(ROI_offset) { mp = ROI_offset + ROI_offset*step; mp4= ROI_offset + ROI_offset*step4; size.width = nx - 2*ROI_offset; size.height = ny - 2*ROI_offset; I1->roi->xOffset = I1->roi->yOffset = ROI_offset; I1->roi->height = size.height; I1->roi->width = size.width; I3->roi->xOffset = I3->roi->yOffset = ROI_offset; I3->roi->height = size.height; I3->roi->width = size.width; } /* T E S T I N G */ /* Zero interval */ d1 = d2 = 0; ats1bInitRandom ( 0, 1.5, pI0, n ); /*for(i=0;i<n;i++)printf(" %d",pI0[i]);getchar(); */ for(j=0; j<size.height; j=j+stepY) { seed.y = j; for(i=0; i<size.width; i=i+stepX) { seed.x = i; for(k=0; k<n; k++) pI1[k]=pI2[k]=pI0[k]; for(k=0; k<n4; k++) pI3[k]=pI4[k]=(float)pI0[k]; /* 8U */ Counter = 0; X1 = X2 = i; Y1 = Y2 = j; /* Run CVL function */ cvFloodFill ( I1, seed, 10.0, 0.0, 0.0, &Comp ); /* Run test function */ r = _cvFloodFill8uC1R_slow (pI2+mp, step, size, seed, 10, 0, 0 ); /* Comparison */ for(k=0; k<n; k++) if( (pI1[k]-pI2[k]) ) nerr1++; if( Comp.area!=Counter ) nerr1++; if(X1!=Comp.rect.x) nerr1++; if(Y1!=Comp.rect.y) nerr1++; if((X2-X1+1)!=Comp.rect.width) nerr1++; if((Y2-Y1+1)!=Comp.rect.height) nerr1++; /* 32F */ Counter = 0; X1 = X2 = i; Y1 = Y2 = j; /* Run CVL function */ cvFloodFill ( I3, seed, 10.0, 0.0, 0.0, &Comp ); /* Run test function */ r = _cvFloodFill32fC1R_slow (pI4+mp4, step4, size, seed, 10.0, 0.0f, 0.0f ); /* Comparison */ for(k=0; k<n4; k++) if( (pI3[k]-pI4[k]) ) nerr2++; if( Comp.area!=Counter ) nerr2++; if(X1!=Comp.rect.x) nerr2++; if(Y1!=Comp.rect.y) nerr2++; if((X2-X1+1)!=Comp.rect.width) nerr2++; if((Y2-Y1+1)!=Comp.rect.height) nerr2++; if( nerr1 != 0 || nerr2 != 0 ) goto test_end; } } /* Non-zero interval */ ats1bInitRandom ( 0, 254.99, pI0, n ); for(j=1; j<size.height; j=j+stepY) { seed.y = j; for(i=1; i<size.width; i=i+stepX) { ij=i+step*j; ov=pI0[ij+mp]; seed.x = i; for(it=0; it<numTest; it++) { for(k=0; k<n; k++) pI1[k]=pI2[k]=pI0[k]; for(k=0; k<n4; k++) pI3[k]=pI4[k]=(float)pI0[k]; if(it<ntest2) /* sequential increase interval */ { d1=(ov*(it+1))/ntest2; d2=((255-ov)*(it+1))/ntest2; } else /* random interval */ { d1 = (int)atsInitRandom(1.0, 127); d2 = (int)atsInitRandom(1.0, 127); if(it>(3*numTest)/4){d1/=2; d2/=2;} } /* 8U */ Counter = 0; X1 = X2 = i; Y1 = Y2 = j; /* Run CVL function */ cvFloodFill ( I1, seed, 255.0, (double)d1, (double)d2, &Comp ); /* Run test function */ r = _cvFloodFill8uC1R_slow (pI2+mp, step, size, seed, 255, d1, d2 ); /* Comparison */ for(k=0; k<n; k++) if( (pI1[k]-pI2[k]) ) nerr3++; if( Comp.area!=Counter ) nerr3++; if(X1!=Comp.rect.x) nerr3++; if(Y1!=Comp.rect.y) nerr3++; if((X2-X1+1)!=Comp.rect.width) nerr3++; if((Y2-Y1+1)!=Comp.rect.height) nerr3++; /* 32F */ Counter = 0; X1 = X2 = i; Y1 = Y2 = j; /* Run CVL function */ cvFloodFill ( I3, seed, 255.0, (double)d1, (double)d2, &Comp ); /* Run test function */ r = _cvFloodFill32fC1R_slow (pI4+mp4, step4, size, seed, 255.0, (float)d1, (float)d2 ); /* Comparison */ for(k=0; k<n4; k++) if( (pI3[k]-pI4[k]) ) nerr4++; if( Comp.area!=Counter ) nerr4++; if(X1!=Comp.rect.x) nerr4++; if(Y1!=Comp.rect.y) nerr4++; if((X2-X1+1)!=Comp.rect.width) nerr4++; if((Y2-Y1+1)!=Comp.rect.height) nerr4++; if( nerr3 != 0 || nerr4 != 0 ) goto test_end; } } trsWrite(TW_RUN|TW_CON, " %d%% ", ((j+stepY)*100)/size.height); } test_end: cvReleaseImage( &I0 ); cvReleaseImage( &I1 ); cvReleaseImage( &I2 ); cvReleaseImage( &I3 ); cvReleaseImage( &I4 ); nerr = nerr1 + nerr2 + nerr3 + nerr4; printf( "\n zero: %d %d non-zero: %d %d\n", nerr1, nerr2, nerr3, nerr4 ); trsWrite(TW_RUN|TW_CON|TW_SUM, " Nerr = %d\n", nerr); if( nerr == 0 ) return trsResult( TRS_OK, "No errors fixed by this test" ); else return trsResult( TRS_FAIL, "Total fixed %d errors", nerr ); } /*fma*/