void Cluster::kMeans(const Points &pts, int clusterNum, vector<Points> &clusteredPts) { int length = pts.size(); // Preparing the input data format CvMat* points = cvCreateMat(length, 1, CV_32FC2); CvMat* clusters = cvCreateMat(length, 1, CV_32SC1); for (int row = 0; row < points->rows; row++) { float* ptr = (float*) (points->data.ptr + row * points->step); for (int col = 0; col < points->cols; col++) { *ptr = static_cast<float> (pts[row].x); ptr++; *ptr = static_cast<float> (pts[row].y); } } // The Kmeans algorithm function (OpenCV function) cvKMeans2(points, clusterNum, clusters, cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 1, 2)); // Pack result to 'clusteredPts': each element in 'clusteredPts' means one cluster, // each cluster is one vector<CvPoint> which contains all points belong to that cluster packIntoClusteredPts(clusterNum, points, clusters, clusteredPts); removeEmptyCluster(clusteredPts); cvReleaseMat(&points); cvReleaseMat(&clusters); }
void get_hand_interval_2 (IplImage *body, int *interval) { CvMat *data, *labels, *means; int count; #define CLUSTERS 2 count = cvCountNonZero(body); data = cvCreateMat(count, 1, CV_32FC1); labels = cvCreateMat(count, 1, CV_32SC1); means = cvCreateMat(CLUSTERS, 1, CV_32FC1); fill_mat(body, data); cvKMeans2(data, CLUSTERS, labels, cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 10, 10.0), 1, 0, 0, means, 0); double tmp; cvMinMaxLoc(body, &tmp, NULL, NULL, NULL, NULL); interval[0] = tmp; cvMinMaxLoc(means, &tmp, NULL, NULL, NULL, NULL); interval[1] = tmp; cvReleaseMat(&data); cvReleaseMat(&labels); }
void KMeans::startClustering(const ClusterMethodParameters* pParameters, Document *pDocument, ClusteringResult *pClusteringResult) { mpDocument = pDocument; mpClusteringResult = pClusteringResult; this->mpDocument->setDistanceType(pParameters->dataType); if (pParameters->dataType == DISTANCE_BASED) { throw Exception("Distance based clustering not possible for KMeans!"); } std::cout << "computing features..." << std::endl; this->mpDocument->computeFeatures(); std::cout << "finished..." << std::endl; #if 1 // old code const KMeansParameters *pParams = (const KMeansParameters*)(pParameters); std::cout << "starting kmeans clustering..." << std::endl; std::cout << "nr of clusters: " << pParams->nClusters << std::endl; std::cout << "stopping parameters (max its, eps): " << pParams->maxIts << ", " << pParams->eps << std::endl; StopWatch watch; watch.start(); ublas::matrix<float>& dataMatrix = this->mpDocument->getCharFeatureCollectionPointer()->dataMatrixRef(); // std::cout << dataMatrix << std::endl; // std::cout << dataMatrix.size1() << ", " << dataMatrix.size2() << std::endl; const int nSamples = dataMatrix.size1(); if (dataMatrix.size1()*dataMatrix.size2() <= 0) { throw NoDataException("No features found for clustering with kmeans!"); } CvMat *dataMatrixOCV = OpenCV::cvMatFromBoostMat<float>(dataMatrix); // std::cout << "dataMatrixOCV, rows = " << dataMatrixOCV->rows << ", cols = " << dataMatrixOCV->cols << std::endl; // std::cout << "computed data matrix opencv!" << std::endl; CvMat* clusters = cvCreateMat( nSamples, 1, CV_32SC1 ); cvKMeans2( dataMatrixOCV, pParams->nClusters, clusters, cvTermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, pParams->maxIts, pParams->eps ) ); // std::cout << "cvmat rows = " << clusters->rows << ", cols = " << clusters->cols << std::endl; std::cout << "finished k-means clustering using opencv!" << std::endl; watch.stop(); // save cluster label vector to cluster result: std::vector<int> labels; for (int i=0; i<nSamples; ++i) { labels.push_back(clusters->data.i[i]); } mpClusteringResult->createClusteringResultFromLabelVector(labels, mpDocument); pDocument->clearAllPreprocessing(); pDocument->clearFeatures(); pClusteringResult->computePrototypeFeatures(); pClusteringResult->updatePrototypes(true); cvReleaseMat( &clusters ); #endif } // end startClustering
// use k-means to reduce color number MyQuantifiedImage* kmeansQuantification(IplImage* img, int tableSize) { // step 1: transfer image to kmeans samples int sample_count = img->height * img->width; CvMat* samples = cvCreateMat(sample_count, 1, CV_32FC3); CvRNG rng = cvRNG(0xffffffff); int idx = 0; for (int i = 0; i < img->height; i++) { for (int j = 0; j < img->width; j++) { cvSet1D(samples, idx++, cvGet2D(img, i, j)); } } // step 2: apply kmeans; CvMat* labels = cvCreateMat(sample_count, 1, CV_32SC1); CvMat* centers = cvCreateMat(tableSize, 1, CV_32FC3); cvSetZero(labels); cvSetZero(centers); cvKMeans2(samples, tableSize, labels, cvTermCriteria(CV_TERMCRIT_ITER + CV_TERMCRIT_EPS, 10, CV_KMEANS_ACC), CV_KMEANS_ATTEMPTS, &rng, CV_KMEANS_PP_CENTERS, centers, 0); // flag = KMEANS_PP_CENTERS // step 3: rebuild the image IplImage* quantImg = cvCreateImage(cvGetSize(img), IPL_DEPTH_32F, 3); CvMat* labelImg = cvCreateMat(img->height, img->width, CV_32SC1); cvSetZero(quantImg); cvSetZero(labelImg); idx = 0; for (int i = 0; i < img->height; i++) { for (int j = 0; j < img->width; j++) { int cluster_idx = labels->data.i[idx++]; CvScalar color = cvGet1D(centers, cluster_idx); cvSet2D(quantImg, i, j, color); cvSetReal2D(labelImg, i, j, (double) cluster_idx); } } MyQuantifiedImage* re = malloc(sizeof(MyQuantifiedImage)); re->labelMat = labelImg; re->qImg = quantImg; re->tableSize = tableSize; CvScalar* colorTable = calloc(tableSize, sizeof(CvScalar)); for (int i = 0; i < tableSize; i++) { colorTable[i] = cvGet1D(centers, i); } re->colorTable = colorTable; return re; }
bool Classifier::kmeans(DataSet *data) { cout << "------------------------------------------" << endl; cout << "\t\tK-Means" << endl; if (kmeans_load) { cout << "Loading..." << endl; centers = (CvMat *)cvLoad("centers.dat"); data->samples = (CvMat *)cvLoad("samples.dat"); data->responses = (CvMat *)cvLoad("responses.dat"); data->centers = centers; cout << "Loaded Successfully" << endl; return true; } CvMat *desc = data->kmeans_input(); data->clusters = cvCreateMat(data->num_samples, 1, CV_32SC1); centers = cvCreateMat(num_clusters, SURF_SIZE, CV_32FC1); data->centers = centers; cout << "Running with k = " << num_clusters << endl; flush(cout); cvKMeans2( desc, // samples num_clusters, // clusters data->clusters, // labels cvTermCriteria( CV_TERMCRIT_EPS|CV_TERMCRIT_ITER, // End criteria 10, // Max iter 0.1), //accuracy 1, // attempts &rng, //rng 0, // flags centers, // centers NULL // compactness ); if (kmeans_save) { cvSave("centers.dat", centers); cvSave("samples.dat", data->cluster_samples() ); cvSave("responses.dat", data->cluster_responses() ); cout << "Saved!" << endl; } cvReleaseMat(&desc); data->to_kmeans = NULL; return true; }
void cvKMeans( int num_clusters, float** samples, int num_samples, int vec_size, CvTermCriteria termcrit, int* cluster_idx ) { CvMat* samples_mat = cvCreateMat( num_samples, vec_size, CV_32FC1 ); CvMat cluster_idx_mat = cvMat( num_samples, 1, CV_32SC1, cluster_idx ); int i; for( i = 0; i < num_samples; i++ ) memcpy( samples_mat->data.fl + i*vec_size, samples[i], vec_size*sizeof(float)); cvKMeans2( samples_mat, num_clusters, &cluster_idx_mat, termcrit, 1, 0, 0, 0, 0 ); cvReleaseMat( &samples_mat ); }
void color_similarity_init(string comparer, CvScalar center_values[]){ const char* path ="/Users/Rango/Documents/coding/XCode/TextureSimilarity_data/"; char buff[512]; sprintf(buff, "%s%s.jpg", path, comparer.c_str()); IplImage* src1 = cvLoadImage(buff, CV_LOAD_IMAGE_COLOR); //Kmeans CvMat *samples = cvCreateMat(NSAMPLE, 1, CV_32FC3); CvMat *clusters = cvCreateMat(NSAMPLE, 1, CV_32SC1); uchar* data = (uchar*)src1->imageData; int step = src1->widthStep/sizeof(uchar); int channels = src1->nChannels; CvMat *m_centers = cvCreateMat(NCLUSTER, 1, CV_32FC3); CvRNG random = cvRNG(cvGetTickCount()); //printf("%d", cvRandInt(&random)%src1->height); for (int k = 0; k < NSAMPLE; k++) { int i = cvRandInt(&random)%src1->height; int j = cvRandInt(&random)%src1->width; CvScalar s; s.val[0] = (double)data[i*step+j*channels+0]; s.val[1] = (double)data[i*step+j*channels+1]; s.val[2] = (double)data[i*step+j*channels+2]; cvSet2D(samples, k, 0, s); } //CvRNG* rng; cout << "Kmeans..." << endl; cvKMeans2(samples, NCLUSTER, clusters, TermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 10, 1.0) , 1, &random ,KMEANS_PP_CENTERS, m_centers); for (int i = 0; i < NCLUSTER; i++) { center_values[i].val[0] = (float)cvGet2D(m_centers, i, 0).val[0]; center_values[i].val[1] = (float)cvGet2D(m_centers, i, 0).val[1]; center_values[i].val[2] = (float)cvGet2D(m_centers, i, 0).val[2]; } for (int i = 0; i < NCLUSTER; i++) { cout << center_values[i].val[0] << " " << center_values[i].val[1] << " " << center_values[i].val[2] << endl; } }
void create_kmeans_clusters(IplImage* in, CvMat* points, CvMat* cluster, int numclusters, int numsamples) { float weight = 0.25; CvScalar colour; int x,y, i=0; cvZero( points ); cvZero( cluster); for( x = 0; x < in->width; x++ ){ for( y = 0; y < in->height; y++ ){ colour = cvGet2D( in, y, x); //printf(" %d,%d, %d\n", x ,y, i); CV_MAT_ELEM( *points, float, i , 0) = (1- weight) * colour.val[0]; // R or B CV_MAT_ELEM( *points, float, i , 1) = (1- weight) * colour.val[1]; // G CV_MAT_ELEM( *points, float, i , 2) = (1- weight) * colour.val[2]; // B or R CV_MAT_ELEM( *points, float, i , 3) = weight * (x*255/in->width); // x in [0,255] CV_MAT_ELEM( *points, float, i , 4) = weight * (y*255/in->height); // y in [0,255] i++; // dont put into the CV_MAT_ELEM !!!! } } // points is a matrix of N rows where each row has the points to cluster, here: RGBXY cvKMeans2( points, numclusters, cluster, cvTermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 10, 1.0 )); // back propagate colours to clusters cvZero( in ); i = 0; for( x = 0; x < in->width; x++ ){ for( y = 0; y < in->height; y++ ){ cvSet2D( in, y, x, CV_RGB(127, (cluster->data.i[i++] + 1)*255 / numclusters, 127)); // careful to put the moving value in the 2nd coordinate, is used in adjust_bodybbox_w_clusters() } } }
void floatDoKmeans(int nClusters, int nPoints, float **vetPoints, int *vetPointsClusterIdx){ int k; CvMat* points = cvCreateMat( nPoints, 1, CV_32FC2 ); CvMat* clusters = cvCreateMat( nPoints, 1, CV_32SC1 ); for( k = 0; k < nPoints; k++ ){ points->data.fl[k*2] = vetPoints[k][0]; points->data.fl[k*2+1] = vetPoints[k][1]; } cvKMeans2( points, nClusters, clusters, cvTermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 10, 1.0 ), 5, 0, 0, 0, 0 ); for( k = 0; k < nPoints; k++ ){ vetPointsClusterIdx[k] = clusters->data.i[k]; } cvReleaseMat( &points ); cvReleaseMat( &clusters ); return; }
int main(int argc, char** argv) { // Load and display original image puts("Loading image..."); CvMat* img = cvLoadImageM(PATH, CV_LOAD_IMAGE_COLOR); CvMat* orig = cvCloneMat(img); cvCvtColor(img, img, CV_BGR2Lab); if (SMOOTH_ORIGINAL) { cvSmooth(img, img, CV_GAUSSIAN, SMOOTH_ORIGINAL, 0, 0, 0); } //chromacity(img); //show(ORIGINAL_IMAGE_WINDOW_NAME, orig); //show(PRETREATED_IMAGE_WINDOW_NAME, img); // Generate a Gabor filter bank puts("Generating Gabor filter bank..."); FilterBank filter_bank; generate_gabor_filter_bank(&filter_bank, N_BANDWIDTHS, bandwidths, N_FREQS, spatial_frequencies, N_ORIENTATIONS, orientations); // Separate each channel puts("Separating channels..."); CvMat *ch1 = cvCreateMat(img->rows, img->cols, CV_8UC1); CvMat *ch2 = cvCreateMat(img->rows, img->cols, CV_8UC1); CvMat *ch3 = cvCreateMat(img->rows, img->cols, CV_8UC1); cvSplit(img, ch1, ch2, ch3, NULL); // Apply the filter bank on each one of them puts("Applying filters..."); CvMat **results = (CvMat**) malloc(3 * filter_bank.size * sizeof (CvMat*)); CvMat **filtered_channel_1 = results; apply_filter_bank(&filter_bank, ch1, filtered_channel_1); CvMat **filtered_channel_2 = results + filter_bank.size; apply_filter_bank(&filter_bank, ch2, filtered_channel_2); CvMat **filtered_channel_3 = results + 2 * filter_bank.size; apply_filter_bank(&filter_bank, ch3, filtered_channel_3); // Now sort the samples puts("Sorting..."); int n_channels = (IGNORAR_L ? 2 : 3); results = (IGNORAR_L ? filtered_channel_2 : results); CvMat *samples; sort_samples(n_channels * filter_bank.size, results, &samples); printf("Samples: %d(x%d)", samples->rows, samples->cols); fflush(stdout); // And cluster them printf("Clustering... "); CvScalar color_tab[8]; color_tab[0] = CV_RGB(255, 0, 0); color_tab[1] = CV_RGB(0, 255, 0); color_tab[2] = CV_RGB(0, 0, 255); color_tab[3] = CV_RGB(0, 255, 255); color_tab[4] = CV_RGB(255, 0, 255); color_tab[5] = CV_RGB(255, 255, 0); color_tab[6] = CV_RGB(255, 255, 255); color_tab[7] = CV_RGB(0, 0, 0); CvMat *labels = cvCreateMat(samples->rows, 1, CV_32SC1); cvKMeans2(samples, K_CLUSTERS, labels, cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 10, 1.0), 10, NULL, 0, NULL, NULL); puts("done"); fflush(stdout); CvMat *color_labels = cvCreateMat(img->rows, img->cols, CV_8UC3); CvMat **classes = malloc(K_CLUSTERS * sizeof (CvMat*)); for (int i = 0; i < K_CLUSTERS; i++) { classes[i] = cvCreateMat(img->rows, img->cols, CV_8UC1); cvZero(classes[i]); } img_from_labels(labels, classes, color_labels, color_tab); //show("Labels", labeled_img); CvMat *mix = cvClone(img); cvAddWeighted(orig, 0.7, color_labels, 0.3, 0, mix); // puts("Outputting..."); char out_file_name[256]; sprintf(out_file_name, "%s/%s.png", OUTPUT_PATH, "original"); cvSaveImage(out_file_name, orig, NULL); output_base_channels(img); if (!IGNORAR_L) { output_filtered_images("CH1", filter_bank.size, filtered_channel_1); } output_filtered_images("CH2", filter_bank.size, filtered_channel_2); output_filtered_images("CH3", filter_bank.size, filtered_channel_3); output_filter_bank(&filter_bank); // output labels output_classes(classes, orig); // output colored and mix sprintf(out_file_name, "%s/%s.png", OUTPUT_PATH, "coloured"); cvSaveImage(out_file_name, color_labels, NULL); sprintf(out_file_name, "%s/%s.png", OUTPUT_PATH, "mix"); cvSaveImage(out_file_name, mix, NULL); //show("Mix", mix); // cvWaitKey(0); // cvWaitKey(0); // cvWaitKey(0); // Should do some cleanup here... :_( return (EXIT_SUCCESS); }
// ###################################################################### //img : input image in RGB space //K : number of groups create by kmean //dist: distance weight when doing the segment, higher weight will make // each region group by its neighbor pixel // ###################################################################### Image<PixRGB<byte> > getKMeans(Image<PixRGB<byte> > img,int K,float dist) { //convert iNVT image to cvImage IplImage *cvImg = img2ipl(img); int h = img.getHeight(); int w = img.getWidth(); LINFO("Image Width %d Height %d cv W %d,H %d",w,h,cvImg->width,cvImg->height); CvMat *sample = cvCreateMat(w*h, 1, CV_32FC(5)); //CvMat *sample = cvCreateMat(w*h, 1, CV_32FC(3)); CvMat *cluster = cvCreateMat(w*h, 1, CV_32SC1); for(int y = 0; y < h; y++) { for(int x = 0; x < w; x++) { int idx= y*w+x; int idxpix= y*w*3+x*3; MAT_ELEM(sample,idx,0,0,x*dist); MAT_ELEM(sample,idx,0,1,y*dist); MAT_ELEM(sample,idx,0,2, *(cvImg->imageData + idxpix + 0)); MAT_ELEM(sample,idx,0,3, *(cvImg->imageData + idxpix + 1)); MAT_ELEM(sample,idx,0,4, *(cvImg->imageData + idxpix + 2)); } } //Doing cvKmean cvKMeans2(sample, K, cluster, cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, TT_KMEANS_ITERATIONS, TT_KMEANS_PRECISION)) ; IplImage *dst = cvCreateImage(cvGetSize(cvImg),8,3); cvZero(dst); std::vector<std::vector<Point2D<int> > > groups; groups.resize(K); // Put Pixel Color to each labeled bin for(int y = 0; y < h; y++) { for(int x = 0; x < w; x++) { int idx = cluster->data.i[y*w+x]; groups[idx].push_back(Point2D<int>(x,y)); } } // Given a int label map, we will create a average color map Image<PixRGB<byte> > output(img.getDims(), ZEROS); //Compute avg color for each region for(size_t grpIdx=0; grpIdx < groups.size(); grpIdx++) { //Compute Average Color PixRGB<long> avgColor(0,0,0); for(size_t pntIdx=0; pntIdx<groups[grpIdx].size(); pntIdx++) avgColor += img.getVal(groups[grpIdx][pntIdx]); if(groups[grpIdx].size() != 0) avgColor /= groups[grpIdx].size(); //Asign avg color to region pixels for(size_t pntIdx=0; pntIdx<groups[grpIdx].size(); pntIdx++) output.setVal(groups[grpIdx][pntIdx],avgColor); } cvReleaseMat(&sample); cvReleaseMat(&cluster); cvReleaseImage(&cvImg); return output; }
int main(int argc, char **argv) { float priors[] = { 1.0, 10.0 }; // Edible vs poisonos weights CvMat *var_type; CvMat *data; // jmh add data = cvCreateMat(20, 30, CV_8U); // jmh add var_type = cvCreateMat(data->cols + 1, 1, CV_8U); cvSet(var_type, cvScalarAll(CV_VAR_CATEGORICAL)); // all these vars // are categorical CvDTree *dtree; dtree = new CvDTree; dtree->train(data, CV_ROW_SAMPLE, responses, 0, 0, var_type, missing, CvDTreeParams(8, // max depth 10, // min sample count 0, // regression accuracy: N/A here true, // compute surrogate split, // as we have missing data 15, // max number of categories // (use sub-optimal algorithm for // larger numbers) 10, // cross-validations true, // use 1SE rule => smaller tree true, // throw away the pruned tree branches priors // the array of priors, the bigger // p_weight, the more attention // to the poisonous mushrooms ) ); dtree->save("tree.xml", "MyTree"); dtree->clear(); dtree->load("tree.xml", "MyTree"); #define MAX_CLUSTERS 5 CvScalar color_tab[MAX_CLUSTERS]; IplImage *img = cvCreateImage(cvSize(500, 500), 8, 3); CvRNG rng = cvRNG(0xffffffff); color_tab[0] = CV_RGB(255, 0, 0); color_tab[1] = CV_RGB(0, 255, 0); color_tab[2] = CV_RGB(100, 100, 255); color_tab[3] = CV_RGB(255, 0, 255); color_tab[4] = CV_RGB(255, 255, 0); cvNamedWindow("clusters", 1); for (;;) { int k, cluster_count = cvRandInt(&rng) % MAX_CLUSTERS + 1; int i, sample_count = cvRandInt(&rng) % 1000 + 1; CvMat *points = cvCreateMat(sample_count, 1, CV_32FC2); CvMat *clusters = cvCreateMat(sample_count, 1, CV_32SC1); /* generate random sample from multivariate Gaussian distribution */ for (k = 0; k < cluster_count; k++) { CvPoint center; CvMat point_chunk; center.x = cvRandInt(&rng) % img->width; center.y = cvRandInt(&rng) % img->height; cvGetRows(points, &point_chunk, k * sample_count / cluster_count, k == cluster_count - 1 ? sample_count : (k + 1) * sample_count / cluster_count); cvRandArr(&rng, &point_chunk, CV_RAND_NORMAL, cvScalar(center.x, center.y, 0, 0), cvScalar(img->width / 6, img->height / 6, 0, 0)); } /* shuffle samples */ for (i = 0; i < sample_count / 2; i++) { CvPoint2D32f *pt1 = (CvPoint2D32f *) points->data.fl + cvRandInt(&rng) % sample_count; CvPoint2D32f *pt2 = (CvPoint2D32f *) points->data.fl + cvRandInt(&rng) % sample_count; CvPoint2D32f temp; CV_SWAP(*pt1, *pt2, temp); } cvKMeans2(points, cluster_count, clusters, cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 10, 1.0)); cvZero(img); for (i = 0; i < sample_count; i++) { CvPoint2D32f pt = ((CvPoint2D32f *) points->data.fl)[i]; int cluster_idx = clusters->data.i[i]; cvCircle(img, cvPointFrom32f(pt), 2, color_tab[cluster_idx], CV_FILLED); } cvReleaseMat(&points); cvReleaseMat(&clusters); cvShowImage("clusters", img); int key = cvWaitKey(0); if (key == 27) // 'ESC' break; } }
int main() { #define MAX_CLUSTER 5 CvScalar color_tab[MAX_CLUSTER]; IplImage* img = cvCreateImage(cvSize(500,500) , 8 , 3); CvRNG rng = cvRNG(0xffffffff); color_tab[0] = CV_RGB(255 , 0 , 0); color_tab[1] = CV_RGB( 0 , 255 , 0); color_tab[2] = CV_RGB(100 , 100 , 255); color_tab[3] = CV_RGB(255 , 0 , 255); color_tab[4] = CV_RGB(255 , 255 , 0); cvNamedWindow("clusters" , 1); while(1) { int cluster_count = cvRandInt(&rng)%MAX_CLUSTER + 1; int sample_count = cvRandInt(&rng)%1000 + 1; CvMat* points = cvCreateMat(sample_count , 1 , CV_32FC2); CvMat* clusters = cvCreateMat(sample_count , 1 ,CV_32SC1); int k; for (k = 0 ; k<cluster_count ; k++) { CvPoint center; CvMat point_chunk; center.x = cvRandInt(&rng)%(img->width); center.y = cvRandInt(&rng)%(img->height); cvGetRows( points , &point_chunk , k*sample_count/cluster_count , (k+1)*sample_count/cluster_count , 1); cvRandArr(&rng , &point_chunk , CV_RAND_NORMAL , cvScalar(center.x , center.y , 0 , 0), cvScalar(img->width/6 , img->height/6 , 0 , 0) ); } int i; for (i = 0; i<sample_count/2 ; i++) {//random find two and exchange CvPoint2D32f* pt1 = (CvPoint2D32f*)points->data.fl + cvRandInt(&rng)%sample_count; CvPoint2D32f* pt2 = (CvPoint2D32f*)points->data.fl + cvRandInt(&rng)%sample_count; CvPoint2D32f temp; CV_SWAP(*pt1 , *pt2 , temp); } cvKMeans2(points , cluster_count , clusters , cvTermCriteria(CV_TERMCRIT_EPS+CV_TERMCRIT_ITER,10,1.0), 1, 0, 0, 0, 0); cvZero(img); for (i = 0; i<sample_count/2 ; i++) { CvPoint2D32f pt = ((CvPoint2D32f*)points->data.fl)[i]; int cluster_idx = clusters->data.i[i]; cvCircle(img , cvPointFrom32f(pt), 2, color_tab[cluster_idx] , CV_FILLED, 8, 0); } cvReleaseMat(&points); cvReleaseMat(&clusters); cvShowImage("clusters" , img); int key = cvWaitKey(0); if(key == 27) break; }//while(1) return 0; }
bool BagOfFeatures::buildKMeans(int numClusters, CvTermCriteria criteria = cvTermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 5, 1.0), int repeat=5) { if(numFeatures == 0 || trainObject == NULL) return false; if(dictionary != NULL) cvReleaseMat(&dictionary); int i, j, k = 0, l = 0, m = 0; int size, index; int totalImages; int emptyClusters = 0; float* ptrRaw = NULL, *ptrCenter = NULL; int* ptrIndex = NULL; int* indexCount = NULL; // create a matrix that will contain all the features CvMat* feature_mat = cvCreateMat(numFeatures, descrSize, CV_32FC1); CvMat* descriptor_clusters = cvCreateMat(numFeatures, 1, CV_32SC1); //keep track of how many descriptors there are in each cluster indexCount = new int [numClusters]; // initialize the count to zero for(i = 0; i < numClusters; i++) indexCount[i] = 0; // For each class for(m = 0; m < numClasses; m++) { totalImages = data[m].getTrainSize(); // For each image in that class... for(l = 0; l < totalImages; l++) { size = trainObject[m].featureSet[l].size; // for each feature in that image... for(i = 0; i < size; i++) { ptrRaw = (float *)(feature_mat->data.ptr + k * feature_mat->step); // put them in the raw descriptors matrix for(j = 0; j < descrSize; j++) { ptrRaw[j] = trainObject[m].featureSet[l].descriptors[i][j]; } k++; } } } // Cluster the raw matrix with a number of cluster found previously cvKMeans2( feature_mat, numClusters, descriptor_clusters, criteria, repeat); // Repeat the clustering by CLUSTER_REPEAT times to get best results cout << "Done clustering... \nRegecting empty clusters..." << endl; // Figure out how many clusters per index for(i = 0; i < numFeatures; i++) { ptrIndex = (int *)(descriptor_clusters->data.ptr + i * descriptor_clusters->step); index = *ptrIndex; // increment the number of vectors found in that cluster indexCount[index]++; } // Find how many empty clusters there are for(i = 0; i < numClusters; i++) { if(indexCount[i] == 0) { emptyClusters++; } } // Descriptor cluster centers: This will look at all the clusters, even the empty CvMat* raw_cluster_centers = cvCreateMat(numClusters, descrSize, CV_32FC1); for(i = 0; i < numClusters; i++) { ptrCenter = (float *)(raw_cluster_centers->data.ptr + i * raw_cluster_centers->step); for(j = 0; j < descrSize; j++) { ptrCenter[j] = 0; } } cout << "Total Empty clusters found: " << emptyClusters << " out of " << numClusters << " total clusters" << endl; // Calculate the cluster center for the descriptors for(i = 0; i < numFeatures; i++) { ptrRaw = (float *)(feature_mat->data.ptr + i * feature_mat->step); // This will give the cluster index number for each descriptor ptrIndex = (int *)(descriptor_clusters->data.ptr + i * descriptor_clusters->step); index = *ptrIndex; ptrCenter = (float *)(raw_cluster_centers->data.ptr + index * raw_cluster_centers->step); // Sum up the vectors for each cluster for(j = 0; j < descrSize; j++) { ptrCenter[j] += ptrRaw[j]; } } dictionary = cvCreateMat(numClusters - emptyClusters, descrSize, CV_32FC1); cvSetZero(dictionary); k = 0; // Copy all the non-empty clusters to the cluster_center matrix // And output the clusters to the file for(i = 0; i < numClusters; i++) { ptrRaw = (float *)(raw_cluster_centers->data.ptr + i * raw_cluster_centers->step); if(indexCount[i] > 0) { ptrCenter = (float *)(dictionary->data.ptr + k * dictionary->step); //cout << i << " \t\t\t" << indexCount[i] << endl << endl; for(j = 0; j < descrSize; j++) { // Calulate the average by dividing by how many in that cluster ptrCenter[j] = (ptrRaw[j] / indexCount[i]); } k++; } } // Release all the matrices allocated cvReleaseMat(&feature_mat); cvReleaseMat(&descriptor_clusters); cvReleaseMat(&raw_cluster_centers); // Release the index count delete [] indexCount; return true; }
IplImage * find_macbeth( const char *img ) { IplImage * macbeth_img = cvLoadImage( img, CV_LOAD_IMAGE_ANYCOLOR|CV_LOAD_IMAGE_ANYDEPTH ); IplImage * macbeth_original = cvCreateImage( cvSize(macbeth_img->width, macbeth_img->height), macbeth_img->depth, macbeth_img->nChannels ); cvCopy(macbeth_img, macbeth_original); IplImage * macbeth_split[3]; IplImage * macbeth_split_thresh[3]; for(int i = 0; i < 3; i++) { macbeth_split[i] = cvCreateImage( cvSize(macbeth_img->width, macbeth_img->height), macbeth_img->depth, 1 ); macbeth_split_thresh[i] = cvCreateImage( cvSize(macbeth_img->width, macbeth_img->height), macbeth_img->depth, 1 ); } cvSplit(macbeth_img, macbeth_split[0], macbeth_split[1], macbeth_split[2], NULL); if( macbeth_img ) { int adaptive_method = CV_ADAPTIVE_THRESH_MEAN_C; int threshold_type = CV_THRESH_BINARY_INV; int block_size = cvRound( MIN(macbeth_img->width,macbeth_img->height)*0.02)|1; fprintf(stderr,"Using %d as block size\n", block_size); double offset = 6; // do an adaptive threshold on each channel for(int i = 0; i < 3; i++) { cvAdaptiveThreshold(macbeth_split[i], macbeth_split_thresh[i], 255, adaptive_method, threshold_type, block_size, offset); } IplImage * adaptive = cvCreateImage( cvSize(macbeth_img->width, macbeth_img->height), IPL_DEPTH_8U, 1 ); // OR the binary threshold results together cvOr(macbeth_split_thresh[0],macbeth_split_thresh[1],adaptive); cvOr(macbeth_split_thresh[2],adaptive,adaptive); for(int i = 0; i < 3; i++) { cvReleaseImage( &(macbeth_split[i]) ); cvReleaseImage( &(macbeth_split_thresh[i]) ); } int element_size = (block_size/10)+2; fprintf(stderr,"Using %d as element size\n", element_size); // do an opening on the threshold image IplConvKernel * element = cvCreateStructuringElementEx(element_size,element_size,element_size/2,element_size/2,CV_SHAPE_RECT); cvMorphologyEx(adaptive,adaptive,NULL,element,CV_MOP_OPEN); cvReleaseStructuringElement(&element); CvMemStorage* storage = cvCreateMemStorage(0); CvSeq* initial_quads = cvCreateSeq( 0, sizeof(*initial_quads), sizeof(void*), storage ); CvSeq* initial_boxes = cvCreateSeq( 0, sizeof(*initial_boxes), sizeof(CvBox2D), storage ); // find contours in the threshold image CvSeq * contours = NULL; cvFindContours(adaptive,storage,&contours); int min_size = (macbeth_img->width*macbeth_img->height)/ (MACBETH_SQUARES*100); if(contours) { int count = 0; for( CvSeq* c = contours; c != NULL; c = c->h_next) { CvRect rect = ((CvContour*)c)->rect; // only interested in contours with these restrictions if(CV_IS_SEQ_HOLE(c) && rect.width*rect.height >= min_size) { // only interested in quad-like contours CvSeq * quad_contour = find_quad(c, storage, min_size); if(quad_contour) { cvSeqPush( initial_quads, &quad_contour ); count++; rect = ((CvContour*)quad_contour)->rect; CvScalar average = contour_average((CvContour*)quad_contour, macbeth_img); CvBox2D box = cvMinAreaRect2(quad_contour,storage); cvSeqPush( initial_boxes, &box ); // fprintf(stderr,"Center: %f %f\n", box.center.x, box.center.y); double min_distance = MAX_RGB_DISTANCE; CvPoint closest_color_idx = cvPoint(-1,-1); for(int y = 0; y < MACBETH_HEIGHT; y++) { for(int x = 0; x < MACBETH_WIDTH; x++) { double distance = euclidean_distance_lab(average,colorchecker_srgb[y][x]); if(distance < min_distance) { closest_color_idx.x = x; closest_color_idx.y = y; min_distance = distance; } } } CvScalar closest_color = colorchecker_srgb[closest_color_idx.y][closest_color_idx.x]; // fprintf(stderr,"Closest color: %f %f %f (%d %d)\n", // closest_color.val[2], // closest_color.val[1], // closest_color.val[0], // closest_color_idx.x, // closest_color_idx.y // ); // cvDrawContours( // macbeth_img, // quad_contour, // cvScalar(255,0,0), // cvScalar(0,0,255), // 0, // element_size // ); // cvCircle( // macbeth_img, // cvPointFrom32f(box.center), // element_size*6, // cvScalarAll(255), // -1 // ); // cvCircle( // macbeth_img, // cvPointFrom32f(box.center), // element_size*6, // closest_color, // -1 // ); // cvCircle( // macbeth_img, // cvPointFrom32f(box.center), // element_size*4, // average, // -1 // ); // CvRect rect = contained_rectangle(box); // cvRectangle( // macbeth_img, // cvPoint(rect.x,rect.y), // cvPoint(rect.x+rect.width, rect.y+rect.height), // cvScalarAll(0), // element_size // ); } } } ColorChecker found_colorchecker; fprintf(stderr,"%d initial quads found", initial_quads->total); if(count > MACBETH_SQUARES) { fprintf(stderr," (probably a Passport)\n"); CvMat* points = cvCreateMat( initial_quads->total , 1, CV_32FC2 ); CvMat* clusters = cvCreateMat( initial_quads->total , 1, CV_32SC1 ); CvSeq* partitioned_quads[2]; CvSeq* partitioned_boxes[2]; for(int i = 0; i < 2; i++) { partitioned_quads[i] = cvCreateSeq( 0, sizeof(**partitioned_quads), sizeof(void*), storage ); partitioned_boxes[i] = cvCreateSeq( 0, sizeof(**partitioned_boxes), sizeof(CvBox2D), storage ); } // set up the points sequence for cvKMeans2, using the box centers for(int i = 0; i < initial_quads->total; i++) { CvBox2D box = (*(CvBox2D*)cvGetSeqElem(initial_boxes, i)); cvSet1D(points, i, cvScalar(box.center.x,box.center.y)); } // partition into two clusters: passport and colorchecker cvKMeans2( points, 2, clusters, cvTermCriteria( CV_TERMCRIT_EPS+CV_TERMCRIT_ITER, 10, 1.0 ) ); for(int i = 0; i < initial_quads->total; i++) { CvPoint2D32f pt = ((CvPoint2D32f*)points->data.fl)[i]; int cluster_idx = clusters->data.i[i]; cvSeqPush( partitioned_quads[cluster_idx], cvGetSeqElem(initial_quads, i) ); cvSeqPush( partitioned_boxes[cluster_idx], cvGetSeqElem(initial_boxes, i) ); // cvCircle( // macbeth_img, // cvPointFrom32f(pt), // element_size*2, // cvScalar(255*cluster_idx,0,255-(255*cluster_idx)), // -1 // ); } ColorChecker partitioned_checkers[2]; // check each of the two partitioned sets for the best colorchecker for(int i = 0; i < 2; i++) { partitioned_checkers[i] = find_colorchecker(partitioned_quads[i], partitioned_boxes[i], storage, macbeth_img, macbeth_original); } // use the colorchecker with the lowest error found_colorchecker = partitioned_checkers[0].error < partitioned_checkers[1].error ? partitioned_checkers[0] : partitioned_checkers[1]; cvReleaseMat( &points ); cvReleaseMat( &clusters ); } else { // just one colorchecker to test fprintf(stderr,"\n"); found_colorchecker = find_colorchecker(initial_quads, initial_boxes, storage, macbeth_img, macbeth_original); } // render the found colorchecker draw_colorchecker(found_colorchecker.values,found_colorchecker.points,macbeth_img,found_colorchecker.size); // print out the colorchecker info for(int y = 0; y < MACBETH_HEIGHT; y++) { for(int x = 0; x < MACBETH_WIDTH; x++) { CvScalar this_value = cvGet2D(found_colorchecker.values,y,x); CvScalar this_point = cvGet2D(found_colorchecker.points,y,x); printf("%.0f,%.0f,%.0f,%.0f,%.0f\n", this_point.val[0],this_point.val[1], this_value.val[2],this_value.val[1],this_value.val[0]); } } printf("%0.f\n%f\n",found_colorchecker.size,found_colorchecker.error); } cvReleaseMemStorage( &storage ); if( macbeth_original ) cvReleaseImage( &macbeth_original ); if( adaptive ) cvReleaseImage( &adaptive ); return macbeth_img; } if( macbeth_img ) cvReleaseImage( &macbeth_img ); return NULL; }
//参数说明:nCuster为聚类的类数 int color_cluster(char *filename,int nCuster ) { IplImage* img=cvLoadImage(filename); int i,j; CvMat *samples=cvCreateMat((img->width)*(img->height),1,CV_32FC3);//创建样本矩阵,CV_32FC3代表32位浮点3通道(彩色图像) CvMat *clusters=cvCreateMat((img->width)*(img->height),1,CV_32SC1);//创建类别标记矩阵,CV_32SF1代表32位整型1通道 int k=0; for (i=0;i<img->width;i++) { for (j=0;j<img->height;j++) { CvScalar s; //获取图像各个像素点的三通道值(BGR) s.val[0]=(float)cvGet2D(img,j,i).val[0];//B s.val[1]=(float)cvGet2D(img,j,i).val[1];//G s.val[2]=(float)cvGet2D(img,j,i).val[2];//R cvSet2D(samples,k++,0,s);//将像素点三通道的值按顺序排入样本矩阵 } } //聚类类别数,后期可以通过学习确定分类数。 cvKMeans2(samples,nCuster,clusters,cvTermCriteria(CV_TERMCRIT_ITER,100,1.0));//开始聚类,迭代100次,终止误差1.0 //创建用于显示的图像,二值图像 IplImage *binimg=cvCreateImage(cvSize(img->width,img->height),IPL_DEPTH_8U,1); //创建用于单独显示每个聚类结果的图像 IplImage *cluster_img0=cvCreateImage(cvSize(img->width,img->height),IPL_DEPTH_8U,1); IplImage *cluster_img1=cvCreateImage(cvSize(img->width,img->height),IPL_DEPTH_8U,1); IplImage *cluster_img2=cvCreateImage(cvSize(img->width,img->height),IPL_DEPTH_8U,1); k=0; int val=0; float step=255/(nCuster-1); CvScalar bg={255,0,0,0};//背景设置为白色 for (i=0;i<img->width;i++) { for (j=0;j<img->height;j++) { cvSet2D(cluster_img0,j,i,bg); cvSet2D(cluster_img1,j,i,bg); cvSet2D(cluster_img2,j,i,bg); } } for (i=0;i<img->width;i++) { for (j=0;j<img->height;j++) { val=(int)clusters->data.i[k++]; CvScalar s; s.val[0]=255-val*step;//这个是将不同类别取不同的像素值, cvSet2D(binimg,j,i,s); //将每个聚类进行分离 switch(val) { case 0: cvSet2D(cluster_img0,j,i,s);break;//白色类 case 1: cvSet2D(cluster_img1,j,i,s);break;//灰色类 case 2: cvSet2D(cluster_img2,j,i,s);break;//黑色类 default: break; } } } cvSaveImage("PicVideo//cluster_img0.png",cluster_img0); cvSaveImage("PicVideo//cluster_img1.png",cluster_img1); cvSaveImage("PicVideo//cluster_img2.png",cluster_img2); cvNamedWindow( "原始图像", 1 ); cvShowImage( "原始图像", img ); cvNamedWindow( "聚类图像", 1 ); cvShowImage( "聚类图像", binimg ); cvSaveImage("PicVideo//clusterimg.png",binimg); cvWaitKey(0); //等待按键 cvDestroyWindow( "原始图像" ); cvDestroyWindow( "聚类图像" ); cvReleaseImage( &img ); cvReleaseImage( &binimg ); cvReleaseImage(&cluster_img0); cvReleaseImage(&cluster_img1); cvReleaseImage(&cluster_img0); return 0; }
bool ColorImageSegmentByKMeans2 ( const IplImage * img , IplImage * pResult, int nClusters, int sortFlag ) { assert ( img != NULL && pResult != NULL ); assert ( img -> nChannels == 3 && pResult -> nChannels == 1); int i , j ; CvMat * samples = cvCreateMat (( img -> width )*( img -> height ),1, CV_32FC3 ); // 创建样本矩阵, CV_32FC3 代表位浮点通道(彩色图像) CvMat * clusters = cvCreateMat (( img -> width )*( img -> height ),1, CV_32SC1 ); // 创建类别标记矩阵, CV_32SF1 代表位整型通道 int k =0; for ( i =0; i < img -> width ; i ++) { for ( j =0; j < img -> height ; j ++) { CvScalar s ; // 获取图像各个像素点的三通道值( RGB ) s . val [0]=( float ) cvGet2D ( img , j , i ). val [0]; s . val [1]=( float ) cvGet2D ( img , j , i ). val [1]; s . val [2]=( float ) cvGet2D ( img , j , i ). val [2]; cvSet2D ( samples , k ++,0, s ); // 将像素点三通道的值按顺序排入样本矩阵 } } cvKMeans2 ( samples , nClusters , clusters , cvTermCriteria ( CV_TERMCRIT_ITER ,50,1.0)); // 开始聚类,迭代次,终止误差 .0 k =0; int val =0; float step =255/( nClusters -1); for ( i =0; i < img -> width ; i ++) { for ( j =0; j < img -> height ; j ++) { val =( int ) clusters -> data . i [ k ++]; CvScalar s ; s . val [0]=255- val * step ; // 这个是将不同类别取不同的像素值, cvSet2D ( pResult , j , i , s ); // 将每个像素点赋值 } } cvReleaseMat (& samples ); cvReleaseMat (& clusters ); return true ; }
bool GrayImageSegmentByKMeans2 ( const IplImage * pImg , IplImage * pResult , int nClusters, int sortFlag ) { assert ( pImg != NULL && pImg -> nChannels == 1); // 创建样本矩阵, CV_32FC1 代表位浮点通道(灰度图像) CvMat * samples = cvCreateMat (( pImg -> width )* ( pImg -> height ),1, CV_32FC1 ); // 创建类别标记矩阵, CV_32SF1 代表位整型通道 CvMat * clusters = cvCreateMat (( pImg -> width )* ( pImg -> height ),1, CV_32SC1 ); // 创建类别中心矩阵 CvMat * centers = cvCreateMat ( nClusters , 1, CV_32FC1 ); // 将原始图像转换到样本矩阵 { int k = 0; CvScalar s ; for ( int i = 0; i < pImg -> width ; i ++) { for ( int j =0; j < pImg -> height ; j ++) { s . val [0] = ( float ) cvGet2D ( pImg , j , i ). val [0]; cvSet2D ( samples , k ++, 0, s ); } } } // 开始聚类,迭代次,终止误差 .0 cvKMeans2 ( samples , nClusters , clusters , cvTermCriteria ( CV_TERMCRIT_ITER + CV_TERMCRIT_EPS ,50, 1.0), 1, 0, 0, centers ); // 无需排序直接输出时 if ( sortFlag == 0) { int k = 0; int val = 0; float step = 255 / (( float ) nClusters - 1); CvScalar s ; for ( int i = 0; i < pImg -> width ; i ++) { for ( int j = 0; j < pImg -> height ; j ++) { val = ( int ) clusters -> data . i [ k ++]; s . val [0] = 255- val * step ; // 这个是将不同类别取不同的像素值, cvSet2D ( pResult , j , i , s ); // 将每个像素点赋值 } } return true ; } }