void clusclus::ComputeSampleDistances() { #pragma omp parallel for for(int i=0; i<num_samples; i++) { for(int j=0; j<i+1; j++) { sample_distances[i*(i+1)/2+j]= ComputeDistance(features[i],features[j]); } } }
void CRegionalMetaModel::Train( const string& strTrainData, const string& strValidData ) { int nInputs = GetInputs(); int nOutputs = GetOutputs(); vector< vector<REAL> > vcInputs; vector< vector<REAL> > vcOutputs; vector< REAL > vcPtCen = m_vcPtCen; dist_pair_vector vcDists; vector< vector<int> > vcIdSet; Release(); //read all the data into vcInputs and vcOutputs. ReadTrainingData( strTrainData, nInputs, nOutputs, vcInputs, vcOutputs ); //compute the distance to the center point. ComputeDistance( vcInputs, vcPtCen, vcDists ); //subdivid the training data into clusters. SubdividTrainingData( vcDists, vcIdSet, m_nMinCutPts ); //create the training set for each hierarch. for( int i=0; i<vcIdSet.size(); i++ ){ sort( vcIdSet[i].begin(), vcIdSet[i].end() ); //write a training set to files and run the matlab trainer WriteTrainingFile( REG_TRAIN_FILE, REG_VALID_FILE, vcInputs, vcOutputs, vcIdSet[i] ); CMetaModel* pModel = CreateMetaModel(); pModel->Train( REG_TRAIN_FILE, REG_VALID_FILE ); CTrustRegion* pRegion = new CTrustRegion(); ComputeTrustRegion( pRegion, vcInputs, vcIdSet[i] ); m_vcMetaModels.push_back( pModel ); pRegion->SetModelId( m_vcMetaModels.size()-1 ); m_vcRegions.push_back( pRegion ); } cdump<<"training finsihed:"<<vcIdSet.size()<<" nets were trained!"<<endl; }
void cSkeleton<_DataType>::Skeletonize(char *OutFileName, _DataType MatMin, _DataType MatMax) { BinarySegment(MatMin, MatMax); // Using only Min & Max intensity values // BinarySegment2(MatMin, MatMax); // Removing the zero 2nd D values SaveInitVolume(MatMin, MatMax); ComputeDistance(); // Compute the distance from bondary field SaveDistanceVolume(); ComputeGVF(); // Allocate memory to "GVFDistance_mf" and Compute GVF FlagNonUniformGradient(); // Marking nonuniform voxels SaveVolume(VoxelFlags_muc, (float)0.0, (float)255.0, "Flagged"); printf ("The flagged voxel volume is saved\n"); fflush(stdout); ConnectingFlaggedVoxels(); SaveVolume(VoxelFlags_muc, (float)0.0, (float)255.0, "Connected"); printf ("The Connected voxel volume is saved\n"); fflush(stdout); int MaxCCLoc_Ret; ConnectedComponents("CC", MaxCCLoc_Ret); SaveVolume(CCVolume_muc, (float)0.0, (float)255.0, "CCVolume"); printf ("The Connected Component volume is saved\n"); fflush(stdout); int MaxCCLoc = MaxCCLoc_Ret, RootLoc_Ret, EndLoc_Ret; FindingRootAndEndLoc(MaxCCLoc, RootLoc_Ret, EndLoc_Ret); int Threshold_Distance = 70; ComputeSkeletons(RootLoc_Ret, EndLoc_Ret, Threshold_Distance); SaveVolume(Skeletons_muc, (float)0.0, (float)255.0, "Skeletons"); printf ("The skeleton volume dataset is saved\n"); fflush(stdout); }
int RTreeOverlap(struct Point *P, struct Rect *R, float max_distance, int dist_func) { register struct Point *p = P; register struct Rect *r = R; float dist_array[NUMDIMS]; float distance; register int i/*, j*/; assert(p && r); for (i=0; i<NUMDIMS; i++) { if (p->position[i] < r->boundary[i]) dist_array[i]=r->boundary[i]-p->position[i]; else if (p->position[i] > r->boundary[i+NUMDIMS]) dist_array[i]=p->position[i]-r->boundary[i+NUMDIMS]; else /* r->boundary[i] <= p->position[i] <= r->boundary[i+NUMDIMS]*/ dist_array[i]=0; } distance=ComputeDistance(dist_array, dist_func); if (distance > max_distance) return FALSE; else return TRUE; }
/* arr is the string, curr is the current index to start permutation from and size is sizeof the arr */ unsigned long permutation(struct city **arr, int curr, int size) { if(curr == size-1) { return ComputeDistance(arr, size); } else { unsigned long largestDist = 0, dist; int i; for(i=curr; i<size; i++) { if (i != curr) { swap(&arr[curr], &arr[i], sizeof(struct city *)); } dist = permutation(arr, curr+1, size); if (i != curr) { swap(&arr[curr], &arr[i], sizeof(struct city *)); } if (largestDist < dist) { largestDist = dist; } } return largestDist; } }
/*------------------------------------------------------------------------*/ void MergeInsignificantProtos(LIST ProtoList, const char* label, CLUSTERER *Clusterer, CLUSTERCONFIG *Config) { PROTOTYPE *Prototype; bool debug = strcmp(test_ch, label) == 0; LIST pProtoList = ProtoList; iterate(pProtoList) { Prototype = (PROTOTYPE *) first_node (pProtoList); if (Prototype->Significant || Prototype->Merged) continue; FLOAT32 best_dist = 0.125; PROTOTYPE* best_match = NULL; // Find the nearest alive prototype. LIST list_it = ProtoList; iterate(list_it) { PROTOTYPE* test_p = (PROTOTYPE *) first_node (list_it); if (test_p != Prototype && !test_p->Merged) { FLOAT32 dist = ComputeDistance(Clusterer->SampleSize, Clusterer->ParamDesc, Prototype->Mean, test_p->Mean); if (dist < best_dist) { best_match = test_p; best_dist = dist; } } } if (best_match != NULL && !best_match->Significant) { if (debug) tprintf("Merging red clusters (%d+%d) at %g,%g and %g,%g\n", best_match->NumSamples, Prototype->NumSamples, best_match->Mean[0], best_match->Mean[1], Prototype->Mean[0], Prototype->Mean[1]); best_match->NumSamples = MergeClusters(Clusterer->SampleSize, Clusterer->ParamDesc, best_match->NumSamples, Prototype->NumSamples, best_match->Mean, best_match->Mean, Prototype->Mean); Prototype->NumSamples = 0; Prototype->Merged = 1; } else if (best_match != NULL) { if (debug) tprintf("Red proto at %g,%g matched a green one at %g,%g\n", Prototype->Mean[0], Prototype->Mean[1], best_match->Mean[0], best_match->Mean[1]); Prototype->Merged = 1; } } // Mark significant those that now have enough samples. int min_samples = (inT32) (Config->MinSamples * Clusterer->NumChar); pProtoList = ProtoList; iterate(pProtoList) { Prototype = (PROTOTYPE *) first_node (pProtoList); // Process insignificant protos that do not match a green one if (!Prototype->Significant && Prototype->NumSamples >= min_samples && !Prototype->Merged) { if (debug) tprintf("Red proto at %g,%g becoming green\n", Prototype->Mean[0], Prototype->Mean[1]); Prototype->Significant = true; } } } /* MergeInsignificantProtos */
void FindSimilarities(FILE* fp, char* szTable, mysqlpp::Row& row, bool bFindForward) { int nKeyIndex = COL_KEY1+g_i; DWORD dwKeyVal = (DWORD) atoll(row.raw_data(nKeyIndex)); //offset for multikeys DWORD dwFileSize = (DWORD) atoll(row.raw_data(COL_SIZE)); DWORD dwKeyTolerance = (int)( ((float)dwFileSize) * g_fSumToler / 100.0f ); dwKeyTolerance = max(dwKeyTolerance, MIN_KEY_TOL); DWORD ub = dwKeyVal + dwKeyTolerance; DWORD lb = bFindForward ? dwKeyVal : dwKeyVal - dwKeyTolerance; mysqlpp::Query query = g_pdbcon2->query(); query.reset(); query << "SELECT * FROM " << szTable << " WHERE "; query << g_vStrKeys[g_i].c_str() << " BETWEEN " << lb << " AND " << ub; // printf(query.str().c_str()); mysqlpp::ResUse res = query.use(); if (!res) { printf("FindSimilarities: Failed to find similar rows"); return; //error with query } mysqlpp::Row rowsim; char szFilePath[MAX_PATH]; char szSimPath[MAX_PATH]; DWORD dwSimKeyVal; sprintf(szFilePath, "%s%s", row.raw_data(COL_PATH), row.raw_data(COL_NAME)); bool bAddedHeader = false; try { while (rowsim = res.fetch_row()) { sprintf(szSimPath, "%s%s", rowsim.raw_data(COL_PATH), rowsim.raw_data(COL_NAME)); dwSimKeyVal = (DWORD) atoll(rowsim.raw_data(nKeyIndex)); // NOTE: Our SQL query doesn't filter out the same file. // It also double-counts rows with same key. This weeds those out. // Hopefully, this works. if ( (dwKeyVal == dwSimKeyVal) && (strcmp(szSimPath, szFilePath) <= 0) ) continue; g_pnKeyHits[g_i]++; // Count hash key collisions // TODO: Do this BEFORE or AFTER other weed-out checks? if (dwKeyVal == dwSimKeyVal) g_pnCollisions[g_i]++; // Discard files that differ more than 50% in size // Perhaps this should be tolerance% of the file? // ***** Add a global to count this DWORD dwSimSize = (DWORD) atoll(rowsim.raw_data(COL_SIZE)); if ( (dwSimSize > 3*dwFileSize/2) || (dwFileSize > 3*dwSimSize/2) ) continue; // Discard files with too large a sum table distance DWORD dwDist = ComputeDistance(row, rowsim); DWORD dwMaxSumDist = (int) ( ((float)(dwFileSize+dwSimSize) * g_fSumToler / 200.0f) ); if (dwDist > max(dwMaxSumDist, MIN_SUM_TOL)) continue; // Files are similar... report g_pnSumHits[g_i]++; // Print current "row" for the first time if (!bAddedHeader && g_bReportAll) { fprintf(fp, "%s\n", szFilePath); bAddedHeader = true; } // Print found similarity if ( false && (dwDist == 0) && AreRowsSameFile(row, rowsim) ) //don't print identical { if (g_bReportAll) fprintf(fp, " <same> %s\n", szFilePath); /* // This code deletes identical files, should that be desired char szFilePath[MAX_PATH]; sprintf(szFilePath, "%s%s", row.raw_data(1), row.raw_data(0)); remove(szFilePath); */ g_nIdentical++; } else if (g_bReportAll) fprintf(fp, " %6d %s\n", dwDist, szSimPath); } } catch (const mysqlpp::EndOfResults& ){} // thrown when no more rows if (bAddedHeader) fprintf(fp, "\n"); } // FindSimilarities
T CDistancePointTriangle<T>::ComputeDistanceSqr() { T dist = ComputeDistance(); return dist * dist; }
int VisionControl::ProcessImage() { if(m_camera->IsFreshImage()) { ColorImage *image = NULL; m_camera->GetImage(image); Threshold threshold(60,100,90,255,20,255); ParticleFilterCriteria2 criteria[] = {IMAQ_MT_AREA,AREA_MINIMUM,65535,false,false}; BinaryImage *thresholdImage = image->ThresholdHSV(threshold); BinaryImage *convexHullImage = thresholdImage->ConvexHull(false); BinaryImage *filteredImage = convexHullImage->ParticleFilter(criteria, 1); vector<ParticleAnalysisReport> *reports = filteredImage->GetOrderedParticleAnalysisReports(); for (unsigned int i = 0; i < reports->size(); i++) { ParticleAnalysisReport *report = &(reports->at(i)); // first, determine if this is a particle we are looking at. if(report->boundingRect.left > 320/2 || report->boundingRect.left + report->boundingRect.width < 320/2) { // particle is not lined up with center of vision // note: may not want to do this for autonomous! continue; } double aspectRatio = AspectRatio(filteredImage, report); double difference3ptGoal = fabs(1-(aspectRatio / ((54.f+4+4)/(12.f+4+4)))); double difference2ptGoal = fabs(1-(aspectRatio / ((54.f+4+4)/(21.f+4+4)))); if(difference2ptGoal < 0.25 && difference2ptGoal < difference3ptGoal) { m_elevation = 0; m_distance = ComputeDistance(thresholdImage, report, true); m_relativeAzimuth = 0; } else if(difference3ptGoal < 0.25 && difference3ptGoal < difference2ptGoal) { m_elevation = 0; m_distance = ComputeDistance(thresholdImage, report, false); m_relativeAzimuth = 0; } else { // didn't sufficiently match a target! } } /* Scores *scores = new Scores[reports->size()]; //Iterate through each particle, scoring it and determining whether it is a target or not for (unsigned i = 0; i < reports->size(); i++) { ParticleAnalysisReport *report = &(reports->at(i)); scores[i].rectangularity = ScoreRectangularity(report); scores[i].aspectRatioOuter = ScoreAspectRatio(filteredImage, report, true); scores[i].aspectRatioInner = ScoreAspectRatio(filteredImage, report, false); scores[i].xEdge = ScoreXEdge(thresholdImage, report); scores[i].yEdge = ScoreYEdge(thresholdImage, report); if(ScoreCompare(scores[i], false)) { printf("particle: %d is a High Goal centerX: %f centerY: %f \n", i, report->center_mass_x_normalized, report->center_mass_y_normalized); printf("Distance: %f \n", ComputeDistance(thresholdImage, report, false)); } else if (ScoreCompare(scores[i], true)) { printf("particle: %d is a Middle Goal centerX: %f centerY: %f \n", i, report->center_mass_x_normalized, report->center_mass_y_normalized); printf("Distance: %f \n", ComputeDistance(thresholdImage, report, true)); } else { printf("particle: %d is not a goal centerX: %f centerY: %f \n", i, report->center_mass_x_normalized, report->center_mass_y_normalized); } printf("rect: %f ARinner: %f \n", scores[i].rectangularity, scores[i].aspectRatioInner); printf("ARouter: %f xEdge: %f yEdge: %f \n", scores[i].aspectRatioOuter, scores[i].xEdge, scores[i].yEdge); } */ delete image; delete thresholdImage; delete convexHullImage; delete filteredImage; delete reports; //delete scores; return 1; } return 0; }