Example #1
0
void save_connected_weights(layer l, FILE *fp)
{
#ifdef GPU
    if(gpu_index >= 0){
        pull_connected_layer(l);
    }
#endif
    fwrite(l.biases, sizeof(float), l.outputs, fp);
    fwrite(l.weights, sizeof(float), l.outputs*l.inputs, fp);
    if (l.batch_normalize){
        fwrite(l.scales, sizeof(float), l.outputs, fp);
        fwrite(l.rolling_mean, sizeof(float), l.outputs, fp);
        fwrite(l.rolling_variance, sizeof(float), l.outputs, fp);
    }
}
Example #2
0
void save_weights_upto(network net, char *filename, int cutoff)
{
    fprintf(stderr, "Saving weights to %s\n", filename);
    FILE *fp = fopen(filename, "w");
    if(!fp) file_error(filename);

    fwrite(&net.learning_rate, sizeof(float), 1, fp);
    fwrite(&net.momentum, sizeof(float), 1, fp);
    fwrite(&net.decay, sizeof(float), 1, fp);
    fwrite(&net.seen, sizeof(int), 1, fp);

    int i;
    for(i = 0; i < net.n && i < cutoff; ++i){
        layer l = net.layers[i];
        if(l.type == CONVOLUTIONAL){
#ifdef GPU
            if(gpu_index >= 0){
                pull_convolutional_layer(l);
            }
#endif
            int num = l.n*l.c*l.size*l.size;
            fwrite(l.biases, sizeof(float), l.n, fp);
            fwrite(l.filters, sizeof(float), num, fp);
        }
        if(l.type == DECONVOLUTIONAL){
#ifdef GPU
            if(gpu_index >= 0){
                pull_deconvolutional_layer(l);
            }
#endif
            int num = l.n*l.c*l.size*l.size;
            fwrite(l.biases, sizeof(float), l.n, fp);
            fwrite(l.filters, sizeof(float), num, fp);
        }
        if(l.type == CONNECTED){
#ifdef GPU
            if(gpu_index >= 0){
                pull_connected_layer(l);
            }
#endif
            fwrite(l.biases, sizeof(float), l.outputs, fp);
            fwrite(l.weights, sizeof(float), l.outputs*l.inputs, fp);
        }
    }
    fclose(fp);
}
Example #3
0
void pull_rnn_layer(layer l)
{
    pull_connected_layer(*(l.input_layer));
    pull_connected_layer(*(l.self_layer));
    pull_connected_layer(*(l.output_layer));
}