void* FundusSegmentServer(char* receiveImage, size_t length, int *sendImageSize)
{
	//std::string directory("d:/");

	std::string directory("Data//");

	printf("Start startup func.....\n");

	startup(directory.c_str());

	printf("success startup\n");


	printf("Start segmentImage func.....\n");
	void *outputImage; size_t bytes;
	auto error = segmentImage(receiveImage, length, &outputImage, &bytes);
	if (error) return NULL;
	printf("success segmentImage\n");

	*sendImageSize = bytes;

	delete[] receiveImage;

	//shutdown1();

	return outputImage;
}
/*
 * Creates a pattern out of a image or adds it to existing
 */
void PatternRecognitioner::addToBrain(string _id, Mat _img, int minObjSize) {

    PR::Pattern *pattern = NULL;

    // Check if pattern is already in database
    for(int i = 0; i < m_Patterns.size(); i++){
        if(m_Patterns[i].getID() == _id){
            pattern = &m_Patterns[i];
            break;
        }
    }

    // If not, create new pattern
    if(pattern == NULL){
        m_Patterns.push_back(PR::Pattern(_id));
        pattern = &m_Patterns.back();
    }

    // Create Object out of image
    vector<PR::Object> objects = segmentImage(_img, minObjSize);

    // Scale to standart
    Mat scaled;
    cv::resize(objects[0].img(),scaled,cv::Size(100,100),0,0,INTER_LINEAR);

    // Add to pointvalue
    for(int i = 0; i < scaled.cols; i++){
        for(int j = 0; j < scaled.rows; j++){
            if(scaled.at<uchar>(cv::Point(j,i)) != 255){
                pattern->getPointValues()[i][j]++;
            }
        }
    }
}
Example #3
0
int main (int argc, const char* argv[]) {
	char inputFileName[64], outputFileName[64];
	int nClusters;
	RgbImage srcImage;
	Clusters clusters;

	if (argc < 4) {
		printf("Error: Oops! Too few parameters!\n");
		printf("Usage: %s <INPUT_RGB_FILE> <N_CLUSTERS> <OUTPUT_RGB_FILE>\n", argv[0]);

		return -1;
	}

	strcpy(inputFileName, argv[1]);
	nClusters = atoi(argv[2]);
	strcpy(outputFileName, argv[3]);

	RgbImage srcImage;
	initRgbImage(&srcImage);
	if (loadRgbImage(inputFileName, &srcImage, 256) == 0) {
		printf("Error! Oops: Cannot load the input image: %s!\n", inputFileName);

		return -1;
	}

	initClusters(&clusters, nClusters, 1);
	segmentImage(&srcImage, &clusters, 1);

	saveRgbImage(&srcImage, outputFileName, 255);

	freeRgbImage(&srcImage);
	freeClusters(&clusters);

	return 0;
}
Example #4
0
/**
 * @brief RetrImage::reComputeSuperPixels Segment the image using
 */
void RetrImage::reComputeSuperPixels(){
    IplImage iplimg = *(image.getImage());
    vector<vector<Pixel> > superPixels;
    segmentImage(&iplimg, superPixels);

    //cv::Mat segmented(image.getImage()->size(), CV_8UC1);
    int nSp = superPixels.size();
    bool computeSIFT = true;
    vector<int> labelsFreq(GeoLabel::getLabelsNumber(), 0);
    cv::Mat *labeledMat = labeledImg.getLabeledImg();
    for(int i=0; i<nSp; ++i){
        SuperPixel *toAdd = new SuperPixel(superPixels[i], *image.getImage(), computeSIFT);
        labelsFreq = vector<int>(GeoLabel::getLabelsNumber(), 0);
        for(uint j=0; j<superPixels[i].size(); ++j){
            short currentLabel = labeledMat->at<short>(superPixels[i][j].y, superPixels[i][j].x);
            labelsFreq[currentLabel]++;
            //assert(superPixels[i][j].y<segmented.rows);
            //assert(superPixels[i][j].x<segmented.cols);
            //segmented.at<uchar>(superPixels[i][j].y, superPixels[i][j].x) = (uchar)255*i/nSp;
        }
        vector<int>::iterator iter = std::max_element(labelsFreq.begin(), labelsFreq.end());
        int toAssignLabel = iter-labelsFreq.begin();
        int freq = *iter;
        if(freq>(superPixels[i].size()/2)){
            superPixelList.push_back(toAdd);
            toAdd->setLabel(toAssignLabel);
        }else{
            delete toAdd;
        }
    }
    //SuperPixel::computeAdiacents(superPixelList, image.getImage()->rows, image.getImage()->cols);
    //SuperPixel::computeWeight(superPixelList);

}
string PatternRecognitioner::recognize(Mat _img, int minObjSize) {

    vector<PR::Object> objects = segmentImage(_img, minObjSize);

    string outString = "";

    //Check with existing patterns
    for(int i = 0; i < objects.size(); i++){
        outString += checkObject(objects[i]);
        imwrite("Object"+to_string(i)+".png",objects[i].img());
    }

    return outString;
}
Example #6
0
/**
 * @brief QueryImage::QueryImage Costruttore della classe che si occupa di generare le feature per la query image
 * @param filename Nome della query image
 */
QueryImage::QueryImage(string filename, bool computeSIFT): image(filename), imgName(filename) {
    IplImage iplimg = *(image.getImage());
    vector<vector<Pixel> > superPixels;
    segmentImage(&iplimg, superPixels);

    cv::Mat segmented(image.getImage()->size(), CV_8UC1);
    int nSp = superPixels.size();
    for(int i=0; i<nSp; ++i){
        superPixelList.push_back(new SuperPixel(superPixels[i], *image.getImage(), computeSIFT));
        for(uint j=0; j<superPixels[i].size(); ++j){
            assert(superPixels[i][j].y<segmented.rows);
            assert(superPixels[i][j].x<segmented.cols);
            segmented.at<uchar>(superPixels[i][j].y, superPixels[i][j].x) = (uchar)255*i/nSp;
        }
    }
    SuperPixel::computeAdiacents(superPixelList, image.getImage()->rows, image.getImage()->cols);
    SuperPixel::computeWeight(superPixelList);

    cvNamedWindow("superPixels",2);
    imshow("superPixels",segmented);
}
//image topic callback hook
void  VesselnessNodeBase::imgTopicCallback(const sensor_msgs::ImageConstPtr& msg) {


    std::cout << "Processing an image" << std::endl;

    cv_bridge::CvImagePtr cv_ptrIn;
    cv_bridge::CvImage    cv_Out;

    //Attempt to pull the image into an opencv form.
    try
    {
        cv_ptrIn = cv_bridge::toCvCopy(msg, sensor_msgs::image_encodings::BGR8);
    }

    catch (cv_bridge::Exception& e)
    {
        ROS_ERROR("cv_bridge exception: %s", e.what());
        return;
    }

    std::cout << "converted image to opencv" << std::endl;
    //Actual process segmentation code:

    segmentImage(cv_ptrIn->image,outputImage);


    //The result is outputImage.
    //Publish this output
    //Fill in the headers and encoding type
    cv_Out.image = outputImage;
    cv_Out.header =  cv_ptrIn->header;
    cv_Out.encoding = std::string("32FC2");

    //publish the outputdata now.
    image_pub_.publish(cv_Out.toImageMsg());
    /*Mat outputImageDisp,preOutputImageDisp; */

    std::cout << "Finished an image" << std::endl;
}
Example #8
0
int main(int argc, char* argv[]) {
	//Load Matrix Q
	cv::FileStorage fs("/home/bodereau/Bureau/OpenCVReprojectImageToPointCloud/Q.xml", cv::FileStorage::READ);
	cv::Mat Q;

	pcl::visualization::CloudViewer viewer ("3D Viewer");

	fs["Q"] >> Q;

	//If size of Q is not 4x4 exit
	if (Q.cols != 4 || Q.rows != 4)
	{
		std::cerr << "ERROR: Could not read matrix Q (doesn't exist or size is not 4x4)" << std::endl;
		return 1;
	}


	//Get the interesting parameters from Q
	double Q03, Q13, Q23, Q32, Q33;
	Q03 = Q.at<double>(0,3);
	Q13 = Q.at<double>(1,3);
	Q23 = Q.at<double>(2,3);
	Q32 = Q.at<double>(3,2);
	Q33 = Q.at<double>(3,3);

	std::cout << "Q(0,3) = "<< Q03 <<"; Q(1,3) = "<< Q13 <<"; Q(2,3) = "<< Q23 <<"; Q(3,2) = "<< Q32 <<"; Q(3,3) = "<< Q33 <<";" << std::endl;

	cv::Size size(752, 480);
	vision::StereoRectifier rectifier(size);
	rectifier.load_intrinsic_params("/home/bodereau/Bureau/intrinsics.yml");
	rectifier.load_extrinsic_params("/home/bodereau/Bureau/extrinsics.yml");
	rectifier.stereo_rectify();
	rectifier.dump_rectification_info();
	int idx = 220;
	 //time
	clock_t start,other_clock,clock2,clock4;
	float time,time2,timtotal,time3,time4;
	timtotal = 0.0;
	start = clock();
	//~time
	cv::namedWindow("vigne");
	cv::namedWindow("bord");
	while(idx++ < 500) {
		//printf("boucle : %d \n",idx);
		std::string ref = std::to_string(idx);


		std::string leftImageFilename = "/home/bodereau/Bureau/tiff2/" + ref + "_l.tiff";

		std::string rightImageFilename = "/home/bodereau/Bureau/tiff2/" + ref + "_r.tiff";
		cv::Mat g1, g2;
		clock2 = clock();
		cv::Size size_rectified = rectifier.get_recified_size();
		cv::Mat3b mat_r = cv::imread(rightImageFilename);
		cv::Mat3b mat_l = cv::imread(leftImageFilename);
		cv::Mat3b rectified_l = cv::Mat3b::zeros(size_rectified);
		cv::Mat3b rectified_r = cv::Mat3b::zeros(size_rectified);
		rectifier.generate_rectified_mat(mat_l, mat_r, rectified_l, rectified_r);

		DTSStereo dtsStereo;
		/*int test = dtsStereo.computeLab(rectified_l.data, size_rectified.area(), size_rectified.width);
			int test2 = dtsStereo.compute(rectified_l.data, size_rectified.area(), size_rectified.width);*/
		int height_without_sky = dtsStereo.computeHisto(rectified_l, size_rectified.area(), size_rectified.width);

		cv::namedWindow("img test1", CV_WINDOW_AUTOSIZE);

		printf("height without sky %d \n", height_without_sky);
		clock2 = clock() - clock2;
		time3 = ((float) (clock2)) / CLOCKS_PER_SEC;
		printf("time for rectifie : %f  \n", time3);
		other_clock = clock();

		cv::cvtColor(rectified_l, g1, CV_BGR2GRAY);
		cv::cvtColor(rectified_r, g2, CV_BGR2GRAY);

		cv::Mat left_image_rectified_crop, right_image_rectified_crop;
		/*cv::Rect win(1, 339-test, 560, test);
		cv::Rect win2(1,339-test2,560,test2);*/
		cv::Rect win3(1, 339 - height_without_sky, 560, height_without_sky);
		rectified_l(win3).copyTo(left_image_rectified_crop);
		rectified_r(win3).copyTo(right_image_rectified_crop);



		//printf("convert to png::image left done \n");
		//png::image<png::rgb_pixel> rightImage("right.png");
		other_clock = clock() - other_clock;
		time2 = ((float) (other_clock)) / CLOCKS_PER_SEC;
		timtotal = timtotal + time2;

		printf("time lost in png make : %f \n", time2);
		//~time*/
		/*idxVect = 0;

		rightImage.write("rgb2.png");*/
		//printf("convert to png::image right done \n");
		SPSStereo sps;
		sps.setIterationTotal(outerIterationTotal, innerIterationTotal);
		sps.setWeightParameter(lambda_pos, lambda_depth, lambda_bou, lambda_smo);
		sps.setInlierThreshold(lambda_d);
		sps.setPenaltyParameter(lambda_hinge, lambda_occ, lambda_pen);

		cv::Mat segmentImage(600, 600, CV_16UC1);

		std::vector <std::vector<double>> disparityPlaneParameters;
		std::vector <std::vector<int>> boundaryLabels;
		//printf("go to compute \n");
		other_clock = clock();
		cv::Mat disparity(600, 600, CV_16UC1);
		sps.compute(superpixelTotal, left_image_rectified_crop, right_image_rectified_crop, segmentImage, disparity, disparityPlaneParameters, boundaryLabels);
		/*cv::StereoSGBM sgbm;
		sgbm.SADWindowSize = 5;
		sgbm.numberOfDisparities = 256;
		sgbm.preFilterCap = 0;
		sgbm.minDisparity = 0;
		sgbm.uniquenessRatio = 1;
		sgbm.speckleWindowSize = 150;
		sgbm.speckleRange = 2;
		sgbm.disp12MaxDiff = 10;
		sgbm.fullDP = false;
		sgbm.P1 = 1000;
		sgbm.P2 = 2400;
		cv::Mat disper;
		sgbm(dst, dst2, disper);
		normalize(disper, disparity, 0, 255, CV_MINMAX, CV_8U);*/
		other_clock = clock() - other_clock;
		time2 = ((float) (other_clock)) / CLOCKS_PER_SEC;
		printf("time to compute  : %f \n", time2);
		//printf("compute done \n");
		other_clock = clock();
		/*png::image<png::rgb_pixel> segmentBoundaryImage;
					makeSegmentBoundaryImage(dst, segmentImage, boundaryLabels, segmentBoundaryImage);
					segmentBoundaryImage.write(ref + "__bound.png");*/
		//disparityImage.write(ref + "__disparity.png");
		other_clock = clock() - other_clock;

		cv::imshow("vigne", disparity);
		/*cv::Mat copy;
		disparity.copyTo(copy);
		STVFlow stvflow;
		stvflow.compute(disparity,dst);*/
		//copy.convertTo(copy, CV_8U,1/255.0,1/255.0);
		/*cv::Mat dst455;
		cv::threshold(disparity,dst455,50, 255, cv::THRESH_BINARY);*/

		/*cv::imshow("bord",disparity);
		cv::threshold(copy,copy,100, 255, cv::THRESH_BINARY);
		cv::imshow("img test1",dst);*/
		cv::waitKey(100);
		cv::Mat img_disparity, img_rgb;
		img_disparity = disparity;
		img_rgb = left_image_rectified_crop;
		img_disparity.convertTo(img_disparity, CV_8U, 1 / 255.0, 1 / 255.0); //A REMETRE EN SPS
		//Create matrix that will contain 3D corrdinates of each pixel
		cv::Mat recons3D(img_disparity.size(), CV_32FC3);

		//Reproject image to 3D
		std::cout << "Reprojecting image to 3D..." << std::endl;
		cv::reprojectImageTo3D(img_disparity, recons3D, Q, false, CV_32F);

		std::cout << "Creating Point Cloud..." << std::endl;
		pcl::PointCloud<pcl::PointXYZRGB>::Ptr point_cloud_all_the_image_rgb(new pcl::PointCloud<pcl::PointXYZRGB>);
		pcl::PointCloud<pcl::PointXYZ>::Ptr cloud(new pcl::PointCloud<pcl::PointXYZ>);

		double px, py, pz;
		uchar pr, pg, pb;
		clock4 = clock();

		for (int i = 0; i < img_rgb.rows; i++) {
			uchar *rgb_ptr = img_rgb.ptr<uchar>(i);

			uchar *disp_ptr = img_disparity.ptr<uchar>(i);

			//double* recons_ptr = recons3D.ptr<double>(i);
			for (int j = 0; j < img_rgb.cols; j++) {
				//Get 3D coordinates

				uchar d = disp_ptr[j];
				if (d == 0) continue; //Discard bad pixels
				double pw = -1.0 * static_cast<double>(d) * Q32 + Q33;
				px = static_cast<double>(j) + Q03;
				py = static_cast<double>(i) + Q13;
				pz = Q23;

				px = px / pw;
				py = py / pw;
				pz = pz / pw;

				//Get RGB info
				pb = rgb_ptr[3 * j];
				pg = rgb_ptr[3 * j + 1];
				pr = rgb_ptr[3 * j + 2];

				//Insert info into point cloud structure
				pcl::PointXYZRGB point;
				point.x = px;
				point.y = py;

				point.z =-1* pz;
				uint32_t rgb = (static_cast<uint32_t>(pr) << 16 |
						static_cast<uint32_t>(pg) << 8 | static_cast<uint32_t>(pb));
				point.rgb = *reinterpret_cast<float *>(&rgb);
				//if(-1*pz < 90){
					point_cloud_all_the_image_rgb->points.push_back(point);//}
				pcl::PointXYZ pointx;
				pointx.x = px;
				pointx.y = py;
				pointx.z = -1*pz;
				if(-1*pz <90)
					cloud->points.push_back(pointx);
			}
		}

		point_cloud_all_the_image_rgb->width = (int) point_cloud_all_the_image_rgb->points.size();
		point_cloud_all_the_image_rgb->height = 1;
		cloud->width = (int) cloud->points.size();
		cloud->height = 1;
		clock4 = clock() - clock4;
		time4 = ((float) (clock4)) / CLOCKS_PER_SEC;
		printf("temps a faire les pointcloud : %f \n", time4);

		clock4 = clock();
		// Read in the cloud data
		pcl::PCDReader reader;
		pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_f(new pcl::PointCloud<pcl::PointXYZ>);


		//cloud = cloud_ptr;
		std::cout << "PointCloud before filtering has: " << cloud->points.size() << " data points." << std::endl; //*
		pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_filtered(new pcl::PointCloud<pcl::PointXYZ>);
		pcl::PointCloud<pcl::PointXYZ>::Ptr cloud_ground(new pcl::PointCloud<pcl::PointXYZ>);
		pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_rgbseg(new pcl::PointCloud<pcl::PointXYZRGB>);

		ExtractTheGround extractTheGround;
		extractTheGround.compute(cloud, cloud_filtered, cloud_ground);
		clock4 = clock() - clock4;
		time4 = ((float) (clock4)) / CLOCKS_PER_SEC;
		printf("temps a sortir le sol : %f \n", time4);
		clock4 = clock();

		std::cout << "ytyt" << std::endl;

		std::vector<pcl::PointCloud<pcl::PointXYZRGB>::Ptr> clusters;

		EuclidianClusters euclidianClusters;
		euclidianClusters.compute(cloud_filtered, clusters);

		clock4 = clock() - clock4;
		time4 = ((float) (clock4)) / CLOCKS_PER_SEC;
		printf("temps a faire la seg euclid : %f \n", time4);
		clock4 = clock();

		/*pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_rgbseg_less(new pcl::PointCloud<pcl::PointXYZRGB>);
		for(int i=0;i < cloud_filtered->points.size();i++){
			for(int j=0;j < point_cloud_ptr->points.size();j++){
				if(cloud_filtered->points[i].x == point_cloud_ptr->points[j].x && cloud_filtered->points[i].y == point_cloud_ptr->points[j].y &&
						cloud_filtered->points[i].z == point_cloud_ptr->points[j].z){
					cloud_rgbseg_less->points.push_back(point_cloud_ptr->points[j]);
					break;
				}
			}
		}
		RGBSegmentation rgbseg;
		rgbseg.compute(cloud_rgbseg_less, cloud_rgbseg);*/

		//viewer.removeVisualizationCallable("jhjh");
		viewer.removeVisualizationCallable("jhjha");
		viewer.showCloud(point_cloud_all_the_image_rgb,"jhjha");
		clock4 = clock() - clock4;
		time4 = ((float) (clock4)) / CLOCKS_PER_SEC;
		printf("temps afficher image: %f \n", time4);
		clock4 = clock();
		//viewer.showCloud(cloud_rgbseg,"jhjh");

		/*ClustersFilter clustersFilter;
		clustersFilter.compute(clusters);*/

		pcl::PointCloud<pcl::PointXYZ>::Ptr cube(new pcl::PointCloud<pcl::PointXYZ>);
		for(int i=0; i< clusters.size();i++) {
			float minX, minY, minZ,maxZ,maxX,maxY,memY,memZ;
			int nbrY,nbrZ;
			bool dontgo = false;
			bool zminbool = false;

			for(int j=0; j < clusters.at(i)->points.size();j++) {
				//pour chaque points du cluster en question, eh ouais !
				float xpoint = clusters.at(i)->points[j].x;
				float ypoint = clusters.at(i)->points[j].y;
				float zpoint = clusters.at(i)->points[j].z;
				if(j==0){
					minX = xpoint; minY = ypoint; minZ = zpoint;
					maxX = xpoint; maxY = ypoint; maxZ= zpoint;
					memY = ypoint; memZ = zpoint;
				}

				if(memZ == zpoint ){
					nbrZ++;
				}else {

					if (nbrZ >= 50) {
						maxZ = zpoint;
						if (!zminbool) {
							zminbool = true;
							minZ = zpoint;
						}
					}
					nbrZ = 1;
				}

				if(memY != ypoint)
					memY = ypoint;
				if(memZ != zpoint) {
					memZ = zpoint;
				}

				/*if(zpoint > maxZ)
					maxZ = zpoint;*/
				/*if(zpoint < minZ)
					minZ = zpoint;*/
				if(xpoint < minX)
					minX= xpoint;
				if(xpoint > maxX)
					maxX = xpoint;
				if( ypoint < minY)
					minY = ypoint;
				if( ypoint > maxY)
					maxY = ypoint;
				if(maxZ > (minZ +15))
					break;

			}
			if (maxX - minX > 3 * (maxY - minY))
				dontgo = true;
			if(maxZ - minZ < 3)
				dontgo = true;
			if(dontgo)
				continue;

			float xdebut = minX, ydebut = minY, zdebut = minZ, xfin = maxX, yfin = maxY, zfin = maxZ;
			//float xdebut = -10.20, ydebut = 0, zdebut = 12.20, xfin = -50.40, yfin = 15.24, zfin = -13.56;
			if (ydebut > yfin) {
				float tmp = ydebut;
				ydebut = yfin;
				yfin = tmp;
			}
			if (xdebut > xfin) {
				float tmp = xdebut;
				xdebut = xfin;
				xfin = tmp;
			}
			if (zdebut > zfin) {
				float tmp = zdebut;
				zdebut = zfin;
				zfin = tmp;
			}

			for (float i = xdebut; i <= xfin; i+=0.3) {
				for (float j = ydebut; j <= yfin; j+=0.3) {

					pcl::PointXYZ p;
					pcl::PointXYZ p2;
					p.x = i;
					if (i == xdebut || (i >= xfin - 0.3 && i <= xfin + 0.3)) {
						p.y = j;

					}
					else {
						p.y = ydebut;

						p2.y = yfin;
					}
					p.z = zfin;

					p2.x = p.x;
					p2.z = p.z;
					cube->points.push_back(p);
					cube->points.push_back(p2);
					p.z = zdebut;
					p2.z = zdebut;
					cube->points.push_back(p);
					cube->points.push_back(p2);
				}
			}
			for(float j=ydebut; j <=yfin;j+=0.3){
			for(float z =zdebut;z<=zfin;z+=0.3){
				pcl::PointXYZ p;
				pcl::PointXYZ p2;
				p.x=xdebut;
				p2.x=xfin;
				p.y=j;
				p.z=z;
				p2.y=p.y;
				p2.z=p.z;
				if (j == ydebut || (j>= yfin - 0.3 && j <= yfin + 0.3)) {

					cube->points.push_back(p);
					cube->points.push_back(p2);
				}
			}
		}
		}

		viewer.showCloud (cube,"oh");

		/*pcl::PointCloud<pcl::PointXYZRGB>::Ptr cloud_rgb_memory2 (new pcl::PointCloud<pcl::PointXYZRGB>);
		int r2=15,b2=255,g3r=125;
		for(int i = 0; i < clusters.size();i++){
			uint32_t rgb = (static_cast<uint32_t>(r2) << 16 |
					static_cast<uint32_t>(g3r) << 8 | static_cast<uint32_t>(b2));
			for(int j=0;j< clusters.at(i)->points.size();j++){
				clusters.at(i)->points[j].rgb = *reinterpret_cast<float*>(&rgb);
			}
			r2+=20;
			b2+=100;
			g3r+=5;
			if(r2 >252)
				r2=0;
			if(g3r>252)
				g3r=0;
			if(b2>253)
				b2=0;
			*cloud_rgb_memory2 += *clusters.at(i);
		}
		clock4 = clock() - clock4;
		time4 = ((float) (clock4)) / CLOCKS_PER_SEC;
		printf("temps a faire les clusters avant : %f \n", time4);
		clock4 = clock();
		std::cout<<"clusters size :"<<clusters.size() <<std::endl;
		//viewer.showCloud (cloud_rgb_memory2,"eh eh eh");
		clock4 = clock() - clock4;
		time4 = ((float) (clock4)) / CLOCKS_PER_SEC;
		printf("temps a faire les clusters aprés : %f \n", time4);
		clock4 = clock();*/
	}

	//time
	start = clock() - start;
	time = ((float) ( start))/ CLOCKS_PER_SEC;
	printf("time for all : %f and time without loss :%f \n",time,time - timtotal);
}
int blobDetectionOmp(frame_t *frame){
    //detect blobs in the current frame and fill out the box struct
    //      --- look into segmentation of images (blur the image first then segment)
    // don't add a blob smaller than a certain size.
    unsigned long largestLabel;
    
    printf("at blobDetection, about to segment Image\n");
    if (segmentImage(frame, frame, &largestLabel) != 0) {
        printf("blobDetection: segmentImage failed\n");
        return 1;
    }
    printf("at blobDetection, segmentImage finished\n");

    box_t *box = frame->boxes;

    if (box != NULL) {
        printf("blobDetection: frame already has bounding boxes filled in!!\n");
        printf("                free boxes before calling this function\n");
        return 1;
    }

    int i, j;
    int left, right, up, down; 
    int x=0,y=0,count=0;
    int tag=1;
    int w, h, cx, cy;
    int centerx,centery;
    int done = 0;
    //detect blobs based on size - mean of pixels connected together
    // Check the segmented pixels and create a bounding box for each segment
    while(done == 0) {
        // Add blobs based on the segment - we're done when we've looked 
        //  through the whole list of segmentations
        // Based on segmentation, decide what the centroid, width, height        
        //      We're going to go through the image by each tag to find the 
        //          corresponding left, right, up, and down of the box.
        //      THIS IS COMPUTATIONALLY EXPENSIVE
        // TODO: change this operation to look around the area instead of
        //          the entire image.

        left = frame->image->width;
        right = 0;
        up = frame->image->height;
        down = 0;
        tag++;       
        count = 0;
        x = 0;
        y = 0;

        if (frame->image->data == NULL) {
            printf("ERROR: data is null\n");
        }

        #pragma omp parallel for
        for (i = 0; i < frame->image->height; i++){
            for (j = 0; j < frame->image->width; j++){
                
                pixel_t p;
                p = frame->image->data[i*frame->image->width+j];

                if (p.label == tag) {
                    // The pixel has label we're looking for, so we include it
                    //  Find the left, right, up, and down most values for the label

                    if (j < left) {
                        #pragma omp critical
                        left = j;
                    }
                    if (j > right) {
                        #pragma omp critical
                        right = j;
                    }
                    if (i < up) {
                        #pragma omp critical
                        up = i;
                    }
                    if (i > down) {
                        #pragma omp critical
                        down = i;
                    }
                    #pragma omp atomic
                    x+=j;
                    #pragma omp atomic
                    y+=i;
                    #pragma omp atomic
                    count++;
                }
                if (tag > largestLabel) {
                    // If we looked through the largest tag value, then we're done
                    done = 1;
                }
            }
        }
        
        #pragma omp barrier

        if (count == 0) {
            continue;
        }        
        printf("loop done, tag = %d, count = %d\n",tag, count);

        // update the corresponding values for the blob
        cx = x/count;
        cy = y/count;
        centerx = (right-left)/2 + left;
        centery = (up - down)/2 + down;
        w = abs(right - left);
        h = abs(up - down);

/*
        // Remove all blobs which do not fit within the constraints. 
        // Update the centroid and get min and max width and height of blob
        if (minBlob(w,h,centerx,centery) != 0) {
            LOG_ERR("Blob too small, Removing blob at (cx, cy) -> (%d, %d)\n", centerx, centery);
        } else if (maxBlob(w,h,centerx,centery) != 0) {
            LOG_ERR("Blob too large, Splitting blob at (cx,cy) -> (%d, %d)\n", centerx, centery);
            // TODO: Check if we can split the blob into multiple boxes or not
            //  for now, we'll just add the box to the list
            createNewBox(frame, cx, cy, centerx, centery, h, w);         
        } else {
            createNewBox(frame, cx, cy, centerx, centery, h, w);
        }
*/        
        if (count > 30) {
            printf("adding new box at (%d,%d) centroid = (%d,%d) (w,h) = (%d,%d)\n",
                left,up, centerx,centery, w, h);
            createNewBox(frame, cx, cy, left, up, w, h);
        }
    }

    return 0;
}
int blobDetection(frame_t *frame){
    //detect blobs in the current frame and fill out the box struct
    //      --- look into segmentation of images (blur the image first then segment)
    // don't add a blob smaller than a certain size.
    int largestLabel;
    if (segmentImage(frame, frame, &largestLabel) != 0) {
        printf("blobDetection: segmentImage failed\n");
        return 1;
    }

    box_t *box = frame->boxes;

    if (box != NULL) {
        printf("blobDetection: frame already has bounding boxes filled in!!\n");
        printf("                free boxes before calling this function\n");
        return 1;
    }

    int i, j;
    int left, right, up, down; 
    int tag=1;
    pixel_t p;
    int w, h, cx, cy;
    int done = 0;
    //detect blobs based on size - mean of pixels connected together
    // Check the segmented pixels and create a bounding box for each segment
    while(done == 0) {
        // Add blobs based on the segment - we're done when we've looked 
        //  through the whole list of segmentations
        // Based on segmentation, decide what the centroid, width, height        
        //      We're going to go through the image by each tag to find the 
        //          corresponding left, right, up, and down of the box.
        //      THIS IS COMPUTATIONALLY EXPENSIVE
        // TODO: change this operation to look around the area instead of
        //          the entire image.

        left = frame->image->width;
        right = 0;
        up = frame->image->height;
        down = 0;
        tag++;       
 
        for (i = 0; i < frame->image->height; i++){
            for (j = 0; j < frame->image->width; j++){
                p = frame->image->data[i*frame->image->width+j];
                if (p.L == tag) {
                    // The pixel has label we're looking for, so we include it
                    //  Find the left, right, up, and down most values for the label
                    if (j < left) {
                        left = j;
                    }
                    if (j > right) {
                        right = j;
                    }
                    if (i < up) {
                        up = i;
                    }
                    if (i > down) {
                        down = i;
                    }
                }
                if (tag > largestLabel) {
                    // If we looked through the largest tag value, then we're done
                    done = 1;
                }
            }
        }

        // update the corresponding values for the blob
        cx = (right-left)/2 + left;
        cy = (up - down)/2 - up;
        w = abs(right - left);
        h = abs(up - down);

        // Remove all blobs which do not fit within the constraints. 
        // Update the centroid and get min and max width and height of blob
        if (minBlob(w,h,cx,cy) != 0) {
            LOG_ERR("Blob too small, Removing blob at (cx, cy) -> (%d, %d)\n", cx, cy);
        } else if (maxBlob(w,h,cx,cy) != 0) {
            LOG_ERR("Blob too large, Splitting blob at (cx,cy) -> (%d, %d)\n", cx, cy);
            // TODO: Check if we can split the blob into multiple boxes or not
            //  for now, we'll just add the box to the list
            createNewBox(frame, cx, cy, h, w);         
        } else {
            createNewBox(frame, cx, cy, h, w);
        }
    }

    return 0;
}