void denormalize_net(char *cfgfile, char *weightfile, char *outfile) { gpu_index = -1; network *net = load_network(cfgfile, weightfile, 0); int i; for (i = 0; i < net->n; ++i) { layer l = net->layers[i]; if ((l.type == DECONVOLUTIONAL || l.type == CONVOLUTIONAL) && l.batch_normalize) { denormalize_convolutional_layer(l); net->layers[i].batch_normalize=0; } if (l.type == CONNECTED && l.batch_normalize) { denormalize_connected_layer(l); net->layers[i].batch_normalize=0; } if (l.type == GRU && l.batch_normalize) { denormalize_connected_layer(*l.input_z_layer); denormalize_connected_layer(*l.input_r_layer); denormalize_connected_layer(*l.input_h_layer); denormalize_connected_layer(*l.state_z_layer); denormalize_connected_layer(*l.state_r_layer); denormalize_connected_layer(*l.state_h_layer); l.input_z_layer->batch_normalize = 0; l.input_r_layer->batch_normalize = 0; l.input_h_layer->batch_normalize = 0; l.state_z_layer->batch_normalize = 0; l.state_r_layer->batch_normalize = 0; l.state_h_layer->batch_normalize = 0; net->layers[i].batch_normalize=0; } } save_weights(net, outfile); free_network(net); }
void normalize_net(char *cfgfile, char *weightfile, char *outfile) { gpu_index = -1; network *net = load_network(cfgfile, weightfile, 0); int i; for(i = 0; i < net->n; ++i){ layer l = net->layers[i]; if(l.type == CONVOLUTIONAL && !l.batch_normalize){ net->layers[i] = normalize_layer(l, l.n); } if (l.type == CONNECTED && !l.batch_normalize) { net->layers[i] = normalize_layer(l, l.outputs); } if (l.type == GRU && l.batch_normalize) { *l.input_z_layer = normalize_layer(*l.input_z_layer, l.input_z_layer->outputs); *l.input_r_layer = normalize_layer(*l.input_r_layer, l.input_r_layer->outputs); *l.input_h_layer = normalize_layer(*l.input_h_layer, l.input_h_layer->outputs); *l.state_z_layer = normalize_layer(*l.state_z_layer, l.state_z_layer->outputs); *l.state_r_layer = normalize_layer(*l.state_r_layer, l.state_r_layer->outputs); *l.state_h_layer = normalize_layer(*l.state_h_layer, l.state_h_layer->outputs); net->layers[i].batch_normalize=1; } } save_weights(net, outfile); free_network(net); }
void mkimg(char *cfgfile, char *weightfile, int h, int w, int num, char *prefix) { network *net = load_network(cfgfile, weightfile, 0); image *ims = get_weights(net->layers[0]); int n = net->layers[0].n; int z; for(z = 0; z < num; ++z){ image im = make_image(h, w, 3); fill_image(im, .5); int i; for(i = 0; i < 100; ++i){ image r = copy_image(ims[rand()%n]); rotate_image_cw(r, rand()%4); random_distort_image(r, 1, 1.5, 1.5); int dx = rand()%(w-r.w); int dy = rand()%(h-r.h); ghost_image(r, im, dx, dy); free_image(r); } char buff[256]; sprintf(buff, "%s/gen_%d", prefix, z); save_image(im, buff); free_image(im); } free_network(net); }
void partial(char *cfgfile, char *weightfile, char *outfile, int max) { gpu_index = -1; network *net = load_network(cfgfile, weightfile, 1); save_weights_upto(net, outfile, max); free_network(net); }
void predict_regressor(char *cfgfile, char *weightfile, char *filename) { network *net = load_network(cfgfile, weightfile, 0); set_batch_network(net, 1); srand(2222222); clock_t time; char buff[256]; char *input = buff; while(1){ if(filename){ strncpy(input, filename, 256); }else{ printf("Enter Image Path: "); fflush(stdout); input = fgets(input, 256, stdin); if(!input) return; strtok(input, "\n"); } image im = load_image_color(input, 0, 0); image sized = letterbox_image(im, net->w, net->h); float *X = sized.data; time=clock(); float *predictions = network_predict(net, X); printf("Predicted: %f\n", predictions[0]); printf("%s: Predicted in %f seconds.\n", input, sec(clock()-time)); free_image(im); free_image(sized); if (filename) break; } free_network(net); }
void oneoff(char *cfgfile, char *weightfile, char *outfile) { gpu_index = -1; network *net = parse_network_cfg(cfgfile); int oldn = net->layers[net->n - 2].n; int c = net->layers[net->n - 2].c; scal_cpu(oldn*c, .1, net->layers[net->n - 2].weights, 1); scal_cpu(oldn, 0, net->layers[net->n - 2].biases, 1); net->layers[net->n - 2].n = 11921; net->layers[net->n - 2].biases += 5; net->layers[net->n - 2].weights += 5*c; if(weightfile){ load_weights(net, weightfile); } net->layers[net->n - 2].biases -= 5; net->layers[net->n - 2].weights -= 5*c; net->layers[net->n - 2].n = oldn; printf("%d\n", oldn); layer l = net->layers[net->n - 2]; copy_cpu(l.n/3, l.biases, 1, l.biases + l.n/3, 1); copy_cpu(l.n/3, l.biases, 1, l.biases + 2*l.n/3, 1); copy_cpu(l.n/3*l.c, l.weights, 1, l.weights + l.n/3*l.c, 1); copy_cpu(l.n/3*l.c, l.weights, 1, l.weights + 2*l.n/3*l.c, 1); *net->seen = 0; save_weights(net, outfile); free_network(net); }
void test_cgm(char *cfgfile, char *weightfile, char *session) { char *base = basecfg(cfgfile); printf("%s\n", base); network net = parse_network_cfg(cfgfile); load_weights(&net, weightfile); freopen("out.txt", "w", stdout); int input_len = net.w; int stride = net.w/4; int i,j,k; data train; train.shallow = 0; train.X = make_matrix(128, net.w*net.h*net.c); train.y = make_matrix(128, 1); float cbuf[4096*16]; int cidx = 0; int cnt = 0; int cntstride = 0; int freq[16] = {0}; FILE *fp = fopen(session, "rb"); if(!fp) file_error(session); while (!feof(fp)) { unsigned short bytes[11]; fread(bytes, 2, 11, fp); float fbytes[2]; fread(fbytes, 4, 2, fp); // put into circular buffer for (j=0;j<10;j++) cbuf[cidx+(j<<12)] = (float)(bytes[j])/65536.f; cidx = (1+cidx)&4095; cnt++; cntstride++; if (fbytes[0]>54 && cnt>input_len && cntstride>stride) { cntstride = 0; train.y.vals[0][0] = (fbytes[0]-50)/200.0; for (k=0;k<net.w;k++) for (j=0;j<10;j++) { train.X.vals[0][(j*net.w)+k] = cbuf[(j<<12)+(((cidx-1-net.w+k)+4096)&4095)]; } float *p = network_predict(net, train.X.vals[0]); // fprintf(stderr, "%f, %f\n", train.y.vals[0][0], p[0]); p[0] = (p[0]*200.0)+50.0; fprintf(stdout, "%f, %f\n", fbytes[0], p[0]); fprintf(stderr, "%f, %f\n", fbytes[0], p[0]); } } fclose(fp); free_network(net); free_data(train); }
void statistics_net(char *cfgfile, char *weightfile) { gpu_index = -1; network *net = load_network(cfgfile, weightfile, 0); int i; for (i = 0; i < net->n; ++i) { layer l = net->layers[i]; if (l.type == CONNECTED && l.batch_normalize) { printf("Connected Layer %d\n", i); statistics_connected_layer(l); } if (l.type == GRU && l.batch_normalize) { printf("GRU Layer %d\n", i); printf("Input Z\n"); statistics_connected_layer(*l.input_z_layer); printf("Input R\n"); statistics_connected_layer(*l.input_r_layer); printf("Input H\n"); statistics_connected_layer(*l.input_h_layer); printf("State Z\n"); statistics_connected_layer(*l.state_z_layer); printf("State R\n"); statistics_connected_layer(*l.state_r_layer); printf("State H\n"); statistics_connected_layer(*l.state_h_layer); } printf("\n"); } free_network(net); }
int main(int argc, char** argv) { int NUM_PASSES = 100; if (argc < 3) { fprintf(stderr, "Usage: ./mnist <test|inference> [NUMBER_PASSES]\n"); return 2; } else{ NUM_PASSES = atoi(argv[2]); } Network* net = construct_scene_labeling_net(); initialize_network(net, 1); if (!strcmp(argv[1], "inference")) { //net_measure_inference_time(net); return 1; } if (!strcmp(argv[1], "test")){ // net_test(net, input, labels, NUM_PASSES); } free_network(net); return 2; }
void operations(char *cfgfile) { gpu_index = -1; network *net = parse_network_cfg(cfgfile); long ops = numops(net); printf("Floating Point Operations: %ld\n", ops); printf("Floating Point Operations: %.2f Bn\n", (float)ops/1000000000.); free_network( net ); }
void train_mnist_distill(char *cfgfile, char *weightfile) { data_seed = time(0); srand(time(0)); float avg_loss = -1; char *base = basecfg(cfgfile); printf("%s\n", base); network net = parse_network_cfg(cfgfile); if(weightfile){ load_weights(&net, weightfile); } printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net.learning_rate, net.momentum, net.decay); char *backup_directory = "backup"; int classes = 10; int N = 50000; int epoch = (*net.seen)/N; data train;// = load_all_mnist10(); matrix soft = csv_to_matrix("results/ensemble.csv"); float weight = .9; scale_matrix(soft, weight); scale_matrix(train.y, 1. - weight); matrix_add_matrix(soft, train.y); while(get_current_batch(net) < net.max_batches || net.max_batches == 0){ clock_t time=clock(); float loss = train_network_sgd(net, train, 1); if(avg_loss == -1) avg_loss = loss; avg_loss = avg_loss*.95 + loss*.05; if(get_current_batch(net)%100 == 0) { printf("%d, %.3f: %f, %f avg, %f rate, %lf seconds, %d images\n", get_current_batch(net), (float)(*net.seen)/N, loss, avg_loss, get_current_rate(net), sec(clock()-time), *net.seen); } if(*net.seen/N > epoch){ epoch = *net.seen/N; char buff[256]; sprintf(buff, "%s/%s_%d.weights",backup_directory,base, epoch); save_weights(net, buff); } if(get_current_batch(net)%100 == 0){ char buff[256]; sprintf(buff, "%s/%s.backup",backup_directory,base); save_weights(net, buff); } } char buff[256]; sprintf(buff, "%s/%s.weights", backup_directory, base); save_weights(net, buff); free_network(net); free(base); free_data(train); }
void train_go(char *cfgfile, char *weightfile) { data_seed = time(0); srand(time(0)); float avg_loss = -1; char *base = basecfg(cfgfile); printf("%s\n", base); network net = parse_network_cfg(cfgfile); if(weightfile){ load_weights(&net, weightfile); } printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net.learning_rate, net.momentum, net.decay); char *backup_directory = "/home/pjreddie/backup/"; char buff[256]; float *board = calloc(19*19*net.batch, sizeof(float)); float *move = calloc(19*19*net.batch, sizeof(float)); moves m = load_go_moves("/home/pjreddie/go.train"); //moves m = load_go_moves("games.txt"); int N = m.n; int epoch = (*net.seen)/N; while(get_current_batch(net) < net.max_batches || net.max_batches == 0){ clock_t time=clock(); random_go_moves(m, board, move, net.batch); float loss = train_network_datum(net, board, move) / net.batch; if(avg_loss == -1) avg_loss = loss; avg_loss = avg_loss*.95 + loss*.05; printf("%d, %.3f: %f, %f avg, %f rate, %lf seconds, %d images\n", get_current_batch(net), (float)(*net.seen)/N, loss, avg_loss, get_current_rate(net), sec(clock()-time), *net.seen); if(*net.seen/N > epoch){ epoch = *net.seen/N; char buff[256]; sprintf(buff, "%s/%s_%d.weights", backup_directory,base, epoch); save_weights(net, buff); } if(get_current_batch(net)%100 == 0){ char buff[256]; sprintf(buff, "%s/%s.backup",backup_directory,base); save_weights(net, buff); } if(get_current_batch(net)%10000 == 0){ char buff[256]; sprintf(buff, "%s/%s_%d.backup",backup_directory,base,get_current_batch(net)); save_weights(net, buff); } } sprintf(buff, "%s/%s.weights", backup_directory, base); save_weights(net, buff); free_network(net); free(base); }
void oneoff2(char *cfgfile, char *weightfile, char *outfile, int l) { gpu_index = -1; network *net = parse_network_cfg(cfgfile); if(weightfile){ load_weights_upto(net, weightfile, 0, net->n); load_weights_upto(net, weightfile, l, net->n); } *net->seen = 0; save_weights_upto(net, outfile, net->n); free_network(net); }
void test_lsd(char *cfg, char *weights, char *filename, int gray) { network *net = load_network(cfg, weights, 0); set_batch_network(net, 1); srand(2222222); clock_t time; char buff[256]; char *input = buff; int i, imlayer = 0; for (i = 0; i < net->n; ++i) { if (net->layers[i].out_c == 3) { imlayer = i; printf("%d\n", i); break; } } while(1){ if(filename){ strncpy(input, filename, 256); }else{ printf("Enter Image Path: "); fflush(stdout); input = fgets(input, 256, stdin); if(!input) return; strtok(input, "\n"); } image im = load_image_color(input, 0, 0); image resized = resize_min(im, net->w); image crop = crop_image(resized, (resized.w - net->w)/2, (resized.h - net->h)/2, net->w, net->h); if(gray) grayscale_image_3c(crop); float *X = crop.data; time=clock(); network_predict(net, X); image out = get_network_image_layer(net, imlayer); //yuv_to_rgb(out); constrain_image(out); printf("%s: Predicted in %f seconds.\n", input, sec(clock()-time)); save_image(out, "out"); show_image(out, "out", 1); show_image(crop, "crop", 0); free_image(im); free_image(resized); free_image(crop); if (filename) break; } free_network(net); }
void rgbgr_net(char *cfgfile, char *weightfile, char *outfile) { gpu_index = -1; network *net = load_network(cfgfile, weightfile, 0); int i; for(i = 0; i < net->n; ++i){ layer l = net->layers[i]; if(l.type == CONVOLUTIONAL){ rgbgr_weights(l); break; } } save_weights(net, outfile); free_network(net); }
void demo_regressor(char *datacfg, char *cfgfile, char *weightfile, int cam_index, const char *filename) { #ifdef OPENCV printf("Regressor Demo\n"); network *net = load_network(cfgfile, weightfile, 0); set_batch_network(net, 1); srand(2222222); list *options = read_data_cfg(datacfg); int classes = option_find_int(options, "classes", 1); char *name_list = option_find_str(options, "names", 0); char **names = get_labels(name_list); void * cap = open_video_stream(filename, cam_index, 0,0,0); if(!cap) error("Couldn't connect to webcam.\n"); float fps = 0; while(1){ struct timeval tval_before, tval_after, tval_result; gettimeofday(&tval_before, NULL); image in = get_image_from_stream(cap); image crop = center_crop_image(in, net->w, net->h); grayscale_image_3c(crop); float *predictions = network_predict(net, crop.data); printf("\033[2J"); printf("\033[1;1H"); printf("\nFPS:%.0f\n",fps); int i; for(i = 0; i < classes; ++i){ printf("%s: %f\n", names[i], predictions[i]); } show_image(crop, "Regressor", 10); free_image(in); free_image(crop); gettimeofday(&tval_after, NULL); timersub(&tval_after, &tval_before, &tval_result); float curr = 1000000.f/((long int)tval_result.tv_usec); fps = .9*fps + .1*curr; } free_network(net); #endif }
void train_cifar(char *cfgfile, char *weightfile) { srand(time(0)); float avg_loss = -1; char *base = basecfg(cfgfile); printf("%s\n", base); network net = parse_network_cfg(cfgfile); if(weightfile){ load_weights(&net, weightfile); } printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net.learning_rate, net.momentum, net.decay); char *backup_directory = "/home/pjreddie/backup/"; int classes = 10; int N = 50000; char **labels = get_labels("data/cifar/labels.txt"); int epoch = (*net.seen)/N; data train = load_all_cifar10(); while(get_current_batch(net) < net.max_batches || net.max_batches == 0){ clock_t time=clock(); float loss = train_network_sgd(net, train, 1); if(avg_loss == -1) avg_loss = loss; avg_loss = avg_loss*.95 + loss*.05; printf("%d, %.3f: %f, %f avg, %f rate, %lf seconds, %d images\n", get_current_batch(net), (float)(*net.seen)/N, loss, avg_loss, get_current_rate(net), sec(clock()-time), *net.seen); if(*net.seen/N > epoch){ epoch = *net.seen/N; char buff[256]; sprintf(buff, "%s/%s_%d.weights",backup_directory,base, epoch); save_weights(net, buff); } if(get_current_batch(net)%100 == 0){ char buff[256]; sprintf(buff, "%s/%s.backup",backup_directory,base); save_weights(net, buff); } } char buff[256]; sprintf(buff, "%s/%s.weights", backup_directory, base); save_weights(net, buff); free_network(net); free_ptrs((void**)labels, classes); free(base); free_data(train); }
// Perform the classification (this calls into the functions from cnn.c double run_classification(int* samples, int n, double** keep_output) { fprintf(stderr, "Making network...\n"); network_t* net = load_cnn_snapshot(); fprintf(stderr, "Loading batches...\n"); for (int i = 0; i < n; i++) { int batch = samples[i]/10000; if (batches[batch] == NULL) { batches[batch] = load_batch(batch); } } vol_t** input = (vol_t**)malloc(sizeof(vol_t*)*n); double* output = (double*)malloc(sizeof(double)*n); for (int i = 0; i < n; i++) { input[i] = batches[samples[i]/10000][samples[i]%10000]; } fprintf(stderr, "Running classification...\n"); uint64_t start_time = timestamp_us(); net_classify_cats(net, input, output, n); uint64_t end_time = timestamp_us(); for (int i = 0; i < n; i++) { samples[i] = (output[i] > 0.5) ? 0 : -1; } double dt = (double)(end_time-start_time) / 1000.0; fprintf(stderr, "TIME: %lf ms\n", dt); free_network(net); free(input); if (keep_output == NULL) free(output); else *keep_output = output; return dt; }
int main(int argc, char** argv) { int NUM_PASSES = 100; if (argc < 3) { fprintf(stderr, "Usage: ./gtsrb <test|inference> [NUMBER_PASSES]\n"); return 2; } else{ NUM_PASSES = atoi(argv[2]); } Network* net = construct_gtsrb_net(); initialize_network(net, 1); vol_t** input = (vol_t**)malloc(sizeof(vol_t*)*NUM_PASSES); label_t* labels = (label_t*)malloc(sizeof(label_t)*NUM_PASSES); load_gtsrb_data(input, labels, NUM_PASSES); int valid = 0; if (!strcmp(argv[1], "inference")) { net_predict_Multiple(net, input, NUM_PASSES); valid = 1; } if (!strcmp(argv[1], "test")){ net_test(net, input, labels, NUM_PASSES); valid = 1; } #ifdef DEBUGG print_vol(net->buffer[4][0],0); #endif free_network(net); for(int i = 0; i < NUM_PASSES; i++) free(input[i]); free(input); free(labels); if(valid) return 0; fprintf(stderr,"unrecognised command\n"); return 2; }
void test_yolo(char *cfgfile, char *weightfile, char *filename, float thresh) { image **alphabet = load_alphabet(); network *net = load_network(cfgfile, weightfile, 0); layer l = net->layers[net->n-1]; set_batch_network(net, 1); srand(2222222); clock_t time; char buff[256]; char *input = buff; float nms=.4; while(1){ if(filename){ strncpy(input, filename, 256); } else { printf("Enter Image Path: "); fflush(stdout); input = fgets(input, 256, stdin); if(!input) return; strtok(input, "\n"); } image im = load_image_color(input,0,0); image sized = resize_image(im, net->w, net->h); float *X = sized.data; time=clock(); network_predict(net, X); printf("%s: Predicted in %f seconds.\n", input, sec(clock()-time)); int nboxes = 0; detection *dets = get_network_boxes(net, 1, 1, thresh, 0, 0, 0, &nboxes); if (nms) do_nms_sort(dets, l.side*l.side*l.n, l.classes, nms); draw_detections(im, dets, l.side*l.side*l.n, thresh, voc_names, alphabet, 20); save_image(im, "predictions"); show_image(im, "predictions", 0); free_detections(dets, nboxes); free_image(im); free_image(sized); if (filename) break; } free_network(net); }
void demo_segmenter(char *datacfg, char *cfg, char *weights, int cam_index, const char *filename) { #ifdef OPENCV printf("Classifier Demo\n"); network *net = load_network(cfg, weights, 0); set_batch_network(net, 1); srand(2222222); void * cap = open_video_stream(filename, cam_index, 0,0,0); if(!cap) error("Couldn't connect to webcam.\n"); float fps = 0; while(1){ struct timeval tval_before, tval_after, tval_result; gettimeofday(&tval_before, NULL); image in = get_image_from_stream(cap); image in_s = letterbox_image(in, net->w, net->h); network_predict(net, in_s.data); printf("\033[2J"); printf("\033[1;1H"); printf("\nFPS:%.0f\n",fps); image pred = get_network_image(net); image prmask = mask_to_rgb(pred); show_image(prmask, "Segmenter", 10); free_image(in_s); free_image(in); free_image(prmask); gettimeofday(&tval_after, NULL); timersub(&tval_after, &tval_before, &tval_result); float curr = 1000000.f/((long int)tval_result.tv_usec); fps = .9*fps + .1*curr; } free_network(net); #endif }
void test_dcgan(char *cfgfile, char *weightfile) { network *net = load_network(cfgfile, weightfile, 0); set_batch_network(net, 1); srand(2222222); clock_t time; char buff[256]; char *input = buff; int imlayer = 0; imlayer = net->n-1; while(1){ image im = make_image(net->w, net->h, net->c); int i; for(i = 0; i < im.w*im.h*im.c; ++i){ im.data[i] = rand_normal(); } //float mag = mag_array(im.data, im.w*im.h*im.c); //scale_array(im.data, im.w*im.h*im.c, 1./mag); float *X = im.data; time=clock(); network_predict(net, X); image out = get_network_image_layer(net, imlayer); //yuv_to_rgb(out); normalize_image(out); printf("%s: Predicted in %f seconds.\n", input, sec(clock()-time)); save_image(out, "out"); show_image(out, "out", 0); free_image(im); } free_network(net); }
void train_compare(char *cfgfile, char *weightfile) { data_seed = time(0); srand(time(0)); float avg_loss = -1; char *base = basecfg(cfgfile); char *backup_directory = "/home/pjreddie/backup/"; printf("%s\n", base); network net = parse_network_cfg(cfgfile); if(weightfile){ load_weights(&net, weightfile); } printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net.learning_rate, net.momentum, net.decay); int imgs = 1024; list *plist = get_paths("data/compare.train.list"); char **paths = (char **)list_to_array(plist); int N = plist->size; printf("%d\n", N); clock_t time; #ifndef _MSC_VER pthread_t load_thread; #endif data train; data buffer; load_args args = {0}; args.w = net.w; args.h = net.h; args.paths = paths; args.classes = 20; args.n = imgs; args.m = N; args.d = &buffer; args.type = COMPARE_DATA; #ifndef _MSC_VER load_thread = load_data_in_thread(args); #endif int epoch = *net.seen/N; int i = 0; while(1){ ++i; time=clock(); #ifndef _MSC_VER pthread_join(load_thread, 0); #else load_data_in_thread(args); #endif train = buffer; #ifndef _MSC_VER load_thread = load_data_in_thread(args); #endif printf("Loaded: %lf seconds\n", sec(clock()-time)); time=clock(); float loss = train_network(net, train); if(avg_loss == -1) avg_loss = loss; avg_loss = avg_loss*.9 + loss*.1; printf("%.3f: %f, %f avg, %lf seconds, %d images\n", (float)*net.seen/N, loss, avg_loss, sec(clock()-time), *net.seen); free_data(train); if(i%100 == 0){ char buff[256]; sprintf(buff, "%s/%s_%d_minor_%d.weights",backup_directory,base, epoch, i); save_weights(net, buff); } if(*net.seen/N > epoch){ epoch = *net.seen/N; i = 0; char buff[256]; sprintf(buff, "%s/%s_%d.weights",backup_directory,base, epoch); save_weights(net, buff); if(epoch%22 == 0) net.learning_rate *= .1; } } #ifndef _MSC_VER pthread_join(load_thread, 0); #endif free_data(buffer); free_network(net); free_ptrs((void**)paths, plist->size); free_list(plist); free(base); }
void free_state(struct State *S) { free_network(&S->ntw); free_simulation(&S->sim); }
void validate_yolo(char *cfg, char *weights) { network *net = load_network(cfg, weights, 0); set_batch_network(net, 1); fprintf(stderr, "Learning Rate: %g, Momentum: %g, Decay: %g\n", net->learning_rate, net->momentum, net->decay); srand(time(0)); char *base = "results/comp4_det_test_"; //list *plist = get_paths("data/voc.2007.test"); list *plist = get_paths("/home/pjreddie/data/voc/2007_test.txt"); //list *plist = get_paths("data/voc.2012.test"); char **paths = (char **)list_to_array(plist); layer l = net->layers[net->n-1]; int classes = l.classes; int j; FILE **fps = calloc(classes, sizeof(FILE *)); for(j = 0; j < classes; ++j){ char buff[1024]; snprintf(buff, 1024, "%s%s.txt", base, voc_names[j]); fps[j] = fopen(buff, "w"); } int m = plist->size; int i=0; int t; float thresh = .001; int nms = 1; float iou_thresh = .5; int nthreads = 8; image *val = calloc(nthreads, sizeof(image)); image *val_resized = calloc(nthreads, sizeof(image)); image *buf = calloc(nthreads, sizeof(image)); image *buf_resized = calloc(nthreads, sizeof(image)); pthread_t *thr = calloc(nthreads, sizeof(pthread_t)); load_args args = {0}; args.w = net->w; args.h = net->h; args.type = IMAGE_DATA; for(t = 0; t < nthreads; ++t){ args.path = paths[i+t]; args.im = &buf[t]; args.resized = &buf_resized[t]; thr[t] = load_data_in_thread(args); } time_t start = time(0); for(i = nthreads; i < m+nthreads; i += nthreads){ fprintf(stderr, "%d\n", i); for(t = 0; t < nthreads && i+t-nthreads < m; ++t){ pthread_join(thr[t], 0); val[t] = buf[t]; val_resized[t] = buf_resized[t]; } for(t = 0; t < nthreads && i+t < m; ++t){ args.path = paths[i+t]; args.im = &buf[t]; args.resized = &buf_resized[t]; thr[t] = load_data_in_thread(args); } for(t = 0; t < nthreads && i+t-nthreads < m; ++t){ char *path = paths[i+t-nthreads]; char *id = basecfg(path); float *X = val_resized[t].data; network_predict(net, X); int w = val[t].w; int h = val[t].h; int nboxes = 0; detection *dets = get_network_boxes(net, w, h, thresh, 0, 0, 0, &nboxes); if (nms) do_nms_sort(dets, l.side*l.side*l.n, classes, iou_thresh); print_yolo_detections(fps, id, l.side*l.side*l.n, classes, w, h, dets); free_detections(dets, nboxes); free(id); free_image(val[t]); free_image(val_resized[t]); } } fprintf(stderr, "Total Detection Time: %f Seconds\n", (double)(time(0) - start)); free_network( net ); }
void train_yolo(char *cfgfile, char *weightfile) { char *train_images = "/data/voc/train.txt"; char *backup_directory = "/home/kunle12/backup/"; srand(time(0)); char *base = basecfg(cfgfile); printf("%s\n", base); float avg_loss = -1; network * net = load_network(cfgfile, weightfile, 0); printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net->learning_rate, net->momentum, net->decay); int imgs = net->batch*net->subdivisions; int i = *net->seen/imgs; data train, buffer; layer l = net->layers[net->n - 1]; int side = l.side; int classes = l.classes; float jitter = l.jitter; list *plist = get_paths(train_images); //int N = plist->size; char **paths = (char **)list_to_array(plist); load_args args = {0}; args.w = net->w; args.h = net->h; args.paths = paths; args.n = imgs; args.m = plist->size; args.classes = classes; args.jitter = jitter; args.num_boxes = side; args.d = &buffer; args.type = REGION_DATA; args.angle = net->angle; args.exposure = net->exposure; args.saturation = net->saturation; args.hue = net->hue; pthread_t load_thread = load_data_in_thread(args); clock_t time; //while(i*imgs < N*120){ while(get_current_batch(net) < net->max_batches){ i += 1; time=clock(); pthread_join(load_thread, 0); train = buffer; load_thread = load_data_in_thread(args); printf("Loaded: %lf seconds\n", sec(clock()-time)); time=clock(); float loss = train_network(net, train); if (avg_loss < 0) avg_loss = loss; avg_loss = avg_loss*.9 + loss*.1; printf("%d: %f, %f avg, %f rate, %lf seconds, %d images\n", i, loss, avg_loss, get_current_rate(net), sec(clock()-time), i*imgs); if(i%1000==0 || (i < 1000 && i%100 == 0)){ char buff[256]; sprintf(buff, "%s/%s_%d.weights", backup_directory, base, i); save_weights(net, buff); } free_data(train); } char buff[256]; sprintf(buff, "%s/%s_final.weights", backup_directory, base); save_weights(net, buff); free_network( net ); }
void validate_yolo_recall(char *cfg, char *weights) { network *net = load_network(cfg, weights, 0); set_batch_network(net, 1); fprintf(stderr, "Learning Rate: %g, Momentum: %g, Decay: %g\n", net->learning_rate, net->momentum, net->decay); srand(time(0)); char *base = "results/comp4_det_test_"; list *plist = get_paths("data/voc.2007.test"); char **paths = (char **)list_to_array(plist); layer l = net->layers[net->n-1]; int classes = l.classes; int side = l.side; int j, k; FILE **fps = calloc(classes, sizeof(FILE *)); for(j = 0; j < classes; ++j){ char buff[1024]; snprintf(buff, 1024, "%s%s.txt", base, voc_names[j]); fps[j] = fopen(buff, "w"); } int m = plist->size; int i=0; float thresh = .001; float iou_thresh = .5; float nms = 0; int total = 0; int correct = 0; int proposals = 0; float avg_iou = 0; for(i = 0; i < m; ++i){ char *path = paths[i]; image orig = load_image_color(path, 0, 0); image sized = resize_image(orig, net->w, net->h); char *id = basecfg(path); network_predict(net, sized.data); int nboxes = 0; detection *dets = get_network_boxes(net, orig.w, orig.h, thresh, 0, 0, 1, &nboxes); if (nms) do_nms_obj(dets, side*side*l.n, 1, nms); char labelpath[4096]; find_replace(path, "images", "labels", labelpath); find_replace(labelpath, "JPEGImages", "labels", labelpath); find_replace(labelpath, ".jpg", ".txt", labelpath); find_replace(labelpath, ".JPEG", ".txt", labelpath); int num_labels = 0; box_label *truth = read_boxes(labelpath, &num_labels); for(k = 0; k < side*side*l.n; ++k){ if(dets[k].objectness > thresh){ ++proposals; } } for (j = 0; j < num_labels; ++j) { ++total; box t = {truth[j].x, truth[j].y, truth[j].w, truth[j].h}; float best_iou = 0; for(k = 0; k < side*side*l.n; ++k){ float iou = box_iou(dets[k].bbox, t); if(dets[k].objectness > thresh && iou > best_iou){ best_iou = iou; } } avg_iou += best_iou; if(best_iou > iou_thresh){ ++correct; } } fprintf(stderr, "%5d %5d %5d\tRPs/Img: %.2f\tIOU: %.2f%%\tRecall:%.2f%%\n", i, correct, total, (float)proposals/(i+1), avg_iou*100/total, 100.*correct/total); free_detections(dets, nboxes); free(id); free_image(orig); free_image(sized); } free_network( net ); }
void train_regressor(char *datacfg, char *cfgfile, char *weightfile, int *gpus, int ngpus, int clear) { int i; float avg_loss = -1; char *base = basecfg(cfgfile); printf("%s\n", base); printf("%d\n", ngpus); network **nets = calloc(ngpus, sizeof(network*)); srand(time(0)); int seed = rand(); for(i = 0; i < ngpus; ++i){ srand(seed); #ifdef GPU cuda_set_device(gpus[i]); #endif nets[i] = load_network(cfgfile, weightfile, clear); nets[i]->learning_rate *= ngpus; } srand(time(0)); network *net = nets[0]; int imgs = net->batch * net->subdivisions * ngpus; printf("Learning Rate: %g, Momentum: %g, Decay: %g\n", net->learning_rate, net->momentum, net->decay); list *options = read_data_cfg(datacfg); char *backup_directory = option_find_str(options, "backup", "/backup/"); char *train_list = option_find_str(options, "train", "data/train.list"); int classes = option_find_int(options, "classes", 1); list *plist = get_paths(train_list); char **paths = (char **)list_to_array(plist); printf("%d\n", plist->size); int N = plist->size; clock_t time; load_args args = {0}; args.w = net->w; args.h = net->h; args.threads = 32; args.classes = classes; args.min = net->min_ratio*net->w; args.max = net->max_ratio*net->w; args.angle = net->angle; args.aspect = net->aspect; args.exposure = net->exposure; args.saturation = net->saturation; args.hue = net->hue; args.size = net->w; args.paths = paths; args.n = imgs; args.m = N; args.type = REGRESSION_DATA; data train; data buffer; pthread_t load_thread; args.d = &buffer; load_thread = load_data(args); int epoch = (*net->seen)/N; while(get_current_batch(net) < net->max_batches || net->max_batches == 0){ time=clock(); pthread_join(load_thread, 0); train = buffer; load_thread = load_data(args); printf("Loaded: %lf seconds\n", sec(clock()-time)); time=clock(); float loss = 0; #ifdef GPU if(ngpus == 1){ loss = train_network(net, train); } else { loss = train_networks(nets, ngpus, train, 4); } #else loss = train_network(net, train); #endif if(avg_loss == -1) avg_loss = loss; avg_loss = avg_loss*.9 + loss*.1; printf("%ld, %.3f: %f, %f avg, %f rate, %lf seconds, %ld images\n", get_current_batch(net), (float)(*net->seen)/N, loss, avg_loss, get_current_rate(net), sec(clock()-time), *net->seen); free_data(train); if(*net->seen/N > epoch){ epoch = *net->seen/N; char buff[256]; sprintf(buff, "%s/%s_%d.weights",backup_directory,base, epoch); save_weights(net, buff); } if(get_current_batch(net)%100 == 0){ char buff[256]; sprintf(buff, "%s/%s.backup",backup_directory,base); save_weights(net, buff); } } char buff[256]; sprintf(buff, "%s/%s.weights", backup_directory, base); save_weights(net, buff); for(i = 0; i < ngpus; ++i){ free_network(nets[i]); } free(nets); free_ptrs((void**)paths, plist->size); free_list(plist); free(base); }
/* Function to be called from R */ void R_epidemics(int *seqLength, double *mutRate, int *npop, int *nHostPerPop, double *beta, int *nStart, int *t1, int *t2, int *Nsample, int *Tsample, int *duration, int *nbnb, int *listnb, double *pdisp){ int i, nstep, counter_sample = 0, tabidx; /* Initialize random number generator */ int j; time_t t; t = time(NULL); // time in seconds, used to change the seed of the random generator gsl_rng * rng; const gsl_rng_type *typ; gsl_rng_env_setup(); typ=gsl_rng_default; rng=gsl_rng_alloc(typ); gsl_rng_set(rng,t); // changes the seed of the random generator /* transfer simulation parameters */ struct param * par; par = (struct param *) malloc(sizeof(struct param)); par->L = *seqLength; par->mu = *mutRate; par->muL = par->mu * par->L; par->rng = rng; par->npop = *npop; par->popsizes = nHostPerPop; par->beta = *beta; par->nstart = *nStart; par->t1 = *t1; par->t2 = *t2; par->t_sample = Tsample; par->n_sample = *Nsample; par->duration = *duration; par->cn_nb_nb = nbnb; par->cn_list_nb = listnb; par->cn_weights = pdisp; /* check/print parameters */ check_param(par); print_param(par); /* dispersal matrix */ struct network *cn = create_network(par); /* print_network(cn, TRUE); */ /* group sizes */ struct ts_groupsizes * grpsizes = create_ts_groupsizes(par); /* initiate population */ struct metapopulation * metapop; metapop = create_metapopulation(par); /* get sampling schemes (timestep+effectives) */ translate_dates(par); struct table_int *tabdates = get_table_int(par->t_sample, par->n_sample); printf("\n\nsampling at timesteps:"); print_table_int(tabdates); /* create sample */ struct sample ** samplist = (struct sample **) malloc(tabdates->n * sizeof(struct sample *)); struct sample *samp; /* MAKE METAPOPULATION EVOLVE */ nstep = 0; while(get_total_nsus(metapop)>0 && (get_total_ninf(metapop)+get_total_nexp(metapop))>0 && nstep<par->duration){ nstep++; /* age metapopulation */ age_metapopulation(metapop, par); /* process infections */ for(j=0;j<get_npop(metapop);j++){ process_infections(get_populations(metapop)[j], metapop, cn, par); } /* draw samples */ if((tabidx = int_in_vec(nstep, tabdates->items, tabdates->n)) > -1){ /* TRUE if step must be sampled */ samplist[counter_sample++] = draw_sample(metapop, tabdates->times[tabidx], par); } fill_ts_groupsizes(grpsizes, metapop, nstep); } /* we stopped after 'nstep' steps */ if(nstep < par->duration){ printf("\nEpidemics ended at time %d, before last sampling time (%d).\n", nstep, par->duration); } else { /* printf("\n\n-- FINAL METAPOPULATION --"); */ /* print_metapopulation(metapop, FALSE); */ /* merge samples */ samp = merge_samples(samplist, tabdates->n, par); /* write sample to file */ printf("\n\nWriting sample to file 'out-sample.txt'\n"); write_sample(samp); /* free memory */ free_sample(samp); } /* write group sizes to file */ printf("\n\nPrinting group sizes to file 'out-popsize.txt'\n"); write_ts_groupsizes(grpsizes); /* free memory */ free_metapopulation(metapop); free_param(par); for(i=0;i<counter_sample;i++) free_sample(samplist[i]); free(samplist); free_table_int(tabdates); free_network(cn); free_ts_groupsizes(grpsizes); }
/* all-in-one function testing epidemics growth, summary statistics, etc. */ void test_epidemics(int seqLength, double mutRate, int npop, int *nHostPerPop, double beta, int nStart, int t1, int t2, int Nsample, int *Tsample, int duration, int *nbnb, int *listnb, double *pdisp){ int i, j, nstep=0, tabidx, counter_sample = 0; /* Initialize random number generator */ time_t t; t = time(NULL); // time in seconds, used to change the seed of the random generator gsl_rng * rng; const gsl_rng_type *typ; gsl_rng_env_setup(); typ=gsl_rng_default; rng=gsl_rng_alloc(typ); gsl_rng_set(rng,t); // changes the seed of the random generator /* transfer simulation parameters */ struct param * par; par = (struct param *) malloc(sizeof(struct param)); par->L = seqLength; par->mu = mutRate; par->muL = par->mu * par->L; par->rng = rng; par->npop = npop; par->npop = npop; par->popsizes = nHostPerPop; par->beta = beta; par->nstart = nStart; par->t1 = t1; par->t2 = t2; par->t_sample = Tsample; par->n_sample = Nsample; par->duration = duration; par->cn_nb_nb = nbnb; par->cn_list_nb = listnb; par->cn_weights = pdisp; /* check/print parameters */ check_param(par); print_param(par); /* dispersal matrix */ struct network *cn = create_network(par); /* group sizes */ struct ts_groupsizes * grpsizes = create_ts_groupsizes(par); /* initiate population */ struct metapopulation * metapop; metapop = create_metapopulation(par); /* get sampling schemes (timestep+effectives) */ translate_dates(par); struct table_int *tabdates = get_table_int(par->t_sample, par->n_sample); printf("\n\nsampling at timesteps:"); print_table_int(tabdates); /* create sample */ struct sample ** samplist = (struct sample **) malloc(tabdates->n * sizeof(struct sample *)); struct sample *samp; /* MAKE METAPOPULATION EVOLVE */ nstep = 0; while(get_total_nsus(metapop)>0 && (get_total_ninf(metapop)+get_total_nexp(metapop))>0 && nstep<par->duration){ nstep++; /* age metapopulation */ age_metapopulation(metapop, par); /* process infections */ for(j=0;j<get_npop(metapop);j++){ process_infections(get_populations(metapop)[j], metapop, cn, par); } /* draw samples */ if((tabidx = int_in_vec(nstep, tabdates->items, tabdates->n)) > -1){ /* TRUE if step must be sampled */ samplist[counter_sample++] = draw_sample(metapop, tabdates->times[tabidx], par); } fill_ts_groupsizes(grpsizes, metapop, nstep); } /* we stopped after 'nstep' steps */ if(nstep < par->duration){ printf("\nEpidemics ended at time %d, before last sampling time (%d).\n", nstep, par->duration); } else { printf("\n\n-- FINAL METAPOPULATION --"); print_metapopulation(metapop, TRUE); /* test samples */ for(i=0;i<tabdates->n;i++) { printf("\nsample %d\n", i); print_sample(samplist[i], TRUE); } samp = merge_samples(samplist, tabdates->n, par) ; print_sample(samp, TRUE); /* test allele listing */ struct snplist *snpbilan; snpbilan = list_snps(samp, par); print_snplist(snpbilan); /* test allele frequencies */ struct allfreq *freq; freq = get_frequencies(samp, par); print_allfreq(freq); /* test Hs*/ double Hs = hs(samp,par); printf("\nHs = %0.3f\n", Hs); /* test Hs full genome */ Hs = hs_full_genome(samp,par); printf("\nHs (full genome) = %0.5f\n", Hs); /* test nb of snps */ int nball = nb_snps(samp,par); printf("\nnumber of SNPs = %d\n", nball); /* test mean nb of snps */ double temp = mean_nb_snps(samp); printf("\nmean number of SNPs = %.2f\n", temp); /* test var nb of snps */ temp = var_nb_snps(samp); printf("\nvariance of number of alleles = %.2f\n", temp); /* test pairwise distances */ struct distmat_int *mat = pairwise_dist(samp, par); print_distmat_int(mat); /* test mean pairwise distances */ temp = mean_pairwise_dist(samp,par); printf("\nmean pairwise distance: %.2f", temp); /* test variance of pairwise distances */ temp = var_pairwise_dist(samp,par); printf("\nvar pairwise distance: %.2f", temp); /* test Fst */ temp = fst(samp,par); printf("\nfst: %.2f", temp); printf("\n\n"); /* free memory */ free_sample(samp); free_snplist(snpbilan); free_allfreq(freq); free_distmat_int(mat); } /* write group sizes to file */ printf("\n\nPrinting group sizes to file 'out-popsize.txt'"); write_ts_groupsizes(grpsizes); /* free memory */ free_metapopulation(metapop); free_param(par); for(i=0;i<counter_sample;i++) free_sample(samplist[i]); free(samplist); free_table_int(tabdates); free_network(cn); free_ts_groupsizes(grpsizes); }