void LNLP::prepare_forecast() { if(remake_vectors) { make_vectors(); init_distances(); } if(remake_targets) make_targets(); if(remake_ranges) { set_indices_from_range(lib_indices, lib_ranges, (E-1)*tau, -std::max(0, tp), true); set_indices_from_range(pred_indices, pred_ranges, (E-1)*tau, -std::max(0, tp), false); check_cross_validation(); which_lib = which_indices_true(lib_indices); which_pred = which_indices_true(pred_indices); remake_ranges = false; } compute_distances(); return; }
void classifyDatasetLOO(dataset *current, distances *distanceSet, criteria *criterias) { int t1, t2, i; float **distSet, *scores; int classFound; series *ts1, *ts2; distSet = malloc(current->cardinality * sizeof(float *)); scores = calloc(criterias->nb_criteria, sizeof(float)); for (t1 = 0; t1 < current->cardinality; t1++) { for (t2 = 0; t2 < current->cardinality; t2++) { ts1 = current->data[t1]; ts2 = current->data[t2]; distSet[t2] = compute_distances(ts1, ts2, distanceSet); } normalize_distance(distSet, distanceSet->nb_compute, current->cardinality); for (i = 0; i < criterias->nb_criteria; i++) if (criterias->compute[i]) { classFound = criterias->functions[i](distSet, current->classes, current->cardinality, distanceSet->nb_compute, t1, current->nb_classes); if (classFound != current->classes[t1]) scores[i]++; } } printf("Global results :\n"); for (i = 0; i < criterias->nb_criteria; i++) printf("%s \t : %f\n", criterias->name[i], (scores[i] / (float)current->cardinality)); free(scores); free(distSet); return; }
/*}}}*/ void tcs(struct node *top, struct tower_table *table, int all)/*{{{*/ { struct tower_data *data; data = malloc(table->n_towers * sizeof(struct tower_data)); make_charts(top, table, data); compute_distances(table, data); }
void WorkerStemFit::run() { Stem_fit fit(_cloud,_min_height, _bin_width, _epsilon); std::vector<pcl::ModelCoefficients> cylinders = fit.getCylinders(); std::vector<float> distances = compute_distances(cylinders); std::vector<float> distances_only_fitted_points = reduce_distances(distances,cylinders); float dist = squared_mean(distances_only_fitted_points); get_optimization()->updateCoeff(dist, _bin_width, _epsilon); }
int main (int argc, char *argv[]) { if (argc < 2) usage(); char *root = argv[1]; char vecfn[255]; sprintf (vecfn, "vectors/%s.ds", root); printf ("vector file '%s'\n", vecfn); load_vectors (vecfn); compute_distances (); char outfn[255]; sprintf (outfn, "results-gonzalez/%s.rank", root); cluster (outfn, 0); int index = 2; while (index < argc) { char subsetfn[255]; sprintf (subsetfn, "vectors/%s", argv[index]); printf ("subset file '%s'\n", subsetfn); char subsetfn2[255]; sprintf (subsetfn2, "vectors/%s.all", argv[index]); printf ("subset file '%s'\n", subsetfn2); load_subsets (subsetfn, subsetfn2); char outfn2[255]; sprintf (outfn2, "results-gonzalez/%s.%s.rank", root, argv[index]); cluster (outfn2, 0); sprintf (outfn2, "results-gonzalez/%s.%s.baseline.rank", root, argv[index]); cluster (outfn2, 1); sprintf (outfn2, "results-gonzalez/%s.%s.best.rank", root, argv[index]); cluster (outfn2, 2); index++; } return 0; }
int main(int argc, char *argv[]) { t_file_info *files_info; /* Tableau contenant la liste des fichiers */ int nbr; int i; nbr = argc - 1; files_info = malloc(nbr*sizeof(*files_info)); for(i = 0;i < nbr;i++) { files_info[i].filepath = malloc(strlen(argv[i+1])*sizeof(char)+1); files_info[i].compressed_size = -1; strcpy(files_info[i].filepath, argv[i+1]); } compute_distances(files_info, nbr, distance_compression); /*compute_distances(files_info, nbr, distance_lancaster);*/ for(i = 0;i < nbr; i++) { free(files_info[i].filepath); } free(files_info); return 0; }
int main(int argc, Char *argv[]) { pattern_elm ***pattern_array; long tip_count = 0; double ln_maxgrp; double ln_maxgrp1; double ln_maxgrp2; node * p; #ifdef MAC argc = 1; /* macsetup("Treedist", ""); */ argv[0] = "Treedist"; #endif init(argc, argv); emboss_getoptions("ftreedist",argc,argv); /* Initialize option-based variables, then ask for changes regarding their values. */ ntrees = 0.0; lasti = -1; /* read files to determine size of structures we'll be working with */ countcomma(ajStrGetuniquePtr(&phylotrees[0]->Tree),&tip_count); tip_count++; /* countcomma does a raw comma count, tips is one greater */ /* * EWFIX.BUG.756 -- this section may be killed if a good solution * to bug 756 is found * * inside cons.c there are several arrays which are allocated * to size "maxgrp", the maximum number of groups (sets of * tips more closely connected than the rest of the tree) we * can see as the code executes. * * We have two measures we use to determine how much space to * allot: * (1) based on the tip count of the trees in the infile * (2) based on total number of trees in infile, and * * (1) -- Tip Count Method * Since each group is a subset of the set of tips we must * represent at most pow(2,tips) different groups. (Technically * two fewer since we don't store the empty or complete subsets, * but let's keep this simple. * * (2) -- Total Tree Size Method * Each tree we read results in * singleton groups for each tip, plus * a group for each interior node except the root * Since the singleton tips are identical for each tree, this gives * a bound of #tips + ( #trees * (# tips - 2 ) ) * * * Ignoring small terms where expedient, either of the following should * result in an adequate allocation: * pow(2,#tips) * (#trees + 1) * #tips * * Since "maxgrp" is a limit on the number of items we'll need to put * in a hash, we double it to make space for quick hashing * * BUT -- all of this has the possibility for overflow, so -- let's * make the initial calculations with doubles and then convert * */ /* limit chosen to make hash arithmetic work */ maxgrp = LONG_MAX / 2; ln_maxgrp = log((double)maxgrp); /* 2 * (#trees + 1) * #tips */ ln_maxgrp1 = log(2.0 * (double)tip_count * ((double)trees_in_1 + (double)trees_in_2)); /* ln only for 2 * pow(2,#tips) */ ln_maxgrp2 = (double)(1 + tip_count) * log(2.0); /* now -- find the smallest of the three */ if(ln_maxgrp1 < ln_maxgrp) { maxgrp = 2 * (trees_in_1 + trees_in_2 + 1) * tip_count; ln_maxgrp = ln_maxgrp1; } if(ln_maxgrp2 < ln_maxgrp) { maxgrp = pow(2,tip_count+1); } /* Read the (first) tree file and put together grouping, order, and timesseen */ read_groups (&pattern_array, trees_in_1 + trees_in_2, tip_count, phylotrees); if ((tree_pairing == ADJACENT_PAIRS) || (tree_pairing == ALL_IN_FIRST)) { /* Here deal with the adjacent or all-in-first pairing difference computation */ compute_distances (pattern_array, trees_in_1, 0); } else if (tree_pairing == NO_PAIRING) { /* Compute the consensus tree. */ putc('\n', outfile); /* consensus(); Reserved for future development */ } if (progress) printf("\nOutput written to file \"%s\"\n\n", outfilename); FClose(outtree); FClose(intree); FClose(outfile); if ((tree_pairing == ALL_IN_1_AND_2) || (tree_pairing == CORR_IN_1_AND_2)) FClose(intree2); #ifdef MAC fixmacfile(outfilename); fixmacfile(outtreename); #endif free_patterns (pattern_array, trees_in_1 + trees_in_2); clean_up_final(); /* clean up grbg */ p = grbg; while (p != NULL) { node * r = p; p = p->next; free(r->nodeset); free(r->view); free(r); } printf("Done.\n\n"); embExit(); return 0; } /* main */