int main() { int aho[3]={4,5,7}; shuffle_ints( 3, aho ,20 ); printf( "%d %d %d\n", aho[0], aho[1], aho[2] ); }
int main() { int i; int nums[] = { 0, 1, 2, 3, 4, 5, 6 }; for (i = 0; i < 7; i++) { printf("%d\n", nums[i]); } printf("\n"); shuffle_ints(nums, 7); for (i = 0; i < 7; i++) { printf("%d\n", nums[i]); } return (EXIT_SUCCESS); }
Dataset cnn_reduce(Dataset ds, int n_neighbors) { int i, j, k, l; int n_classes; int* class_labels = NULL; int* S = malloc(sizeof(int) * ds.n_instances); int* S_copy = malloc(sizeof(int) * ds.n_instances); int* non_S = malloc(sizeof(int) * ds.n_instances); int* last_train_S_size = calloc(ds.n_instances, sizeof(int)); int S_size = 0; int non_S_size = 0; int S_index; int* nearest = malloc(sizeof(int) * ds.n_instances * n_neighbors); int* votes = NULL; int neighbor_majority_class; int neighbor_majority_class_count; bool whole_non_S_classified_correctly = FALSE; Dataset ds_reduced; fill_int_array(nearest, ds.n_instances * n_neighbors, -1); count_classes(ds, &n_classes, &class_labels); votes = malloc(sizeof(int) * n_classes); // Add one random instance from each class to S srand(time(NULL)); for (i = 0; i < n_classes; i++) while (1) { int j = rand() % ds.n_instances; if (ds.y[j] == class_labels[i]) { S[S_size++] = j; break; } } while (!whole_non_S_classified_correctly) { whole_non_S_classified_correctly = TRUE; // copy S to auxiliary array and sort it memcpy(S_copy, S, sizeof(int) * S_size); qsort(S_copy, S_size, sizeof(int), compare_ints); // Find all instances not in S S_index = 0; non_S_size = 0; for (i = 0; i < ds.n_instances; i++) if (S_index == S_size || i < S_copy[S_index]) non_S[non_S_size++] = i; else S_index++; shuffle_ints(non_S_size, non_S); for (i = 0; i < non_S_size; i++) { // update nearest neighbors for non_S[i] for (j = last_train_S_size[non_S[i]]; j < S_size; j++) { for (k = 0; k < n_neighbors; k++) { int* nearest_for_i = nearest + non_S[i] * n_neighbors; if (nearest_for_i[k] < 0) { nearest_for_i[k] = j; break; } if (squared_dist(ds.n_features, ds.X + ds.n_features * nearest_for_i[k], ds.X + ds.n_features * non_S[i]) > squared_dist(ds.n_features, ds.X + ds.n_features * non_S[i], ds.X + ds.n_features * j)) { for (l = n_neighbors - 1; l >= k + 1; l--) nearest_for_i[l] = nearest_for_i[l - 1]; nearest_for_i[k] = j; break; } } } // count votes for non_S[i] memset(votes, 0, n_classes * sizeof(int)); for (j = 0; j < n_neighbors; j++) { int current_neighbor = nearest[non_S[i] * n_neighbors + j]; if (current_neighbor >= 0) { int current_class = -1; for (k = 0; k < n_classes; k++) if (ds.y[current_neighbor] == class_labels[k]) { current_class = k; break; } votes[current_class]++; } else break; } // find out the majority class of non_S[i] neighbor_majority_class = class_labels[0]; neighbor_majority_class_count = votes[0]; for (j = 1; j < n_classes; j++) if (votes[j] > neighbor_majority_class_count) { neighbor_majority_class_count = votes[j]; neighbor_majority_class = class_labels[j]; } // based on the majority class either add non_S[i] to S // or remember the S_size used to classify non_S[i] if (ds.y[non_S[i]] != neighbor_majority_class) { S[S_size++] = non_S[i]; whole_non_S_classified_correctly = FALSE; } else last_train_S_size[non_S[i]] = S_size; } } // form a new dataset with only selected instances ds_reduced = alloc_dataset(ds.n_features, S_size); for (i = 0; i < S_size; i++) { memcpy(ds_reduced.X + ds.n_features * i, ds.X + ds.n_features * S[i], sizeof(flpoint) * ds.n_features); ds_reduced.y[i] = ds.y[S[i]]; } free(class_labels); free(S); free(S_copy); free(non_S); free(nearest); free(last_train_S_size); free(votes); return ds_reduced; }