Пример #1
0
void update_network(network net)
{
    int i;
    int update_batch = net.batch*net.subdivisions;
    for(i = 0; i < net.n; ++i){
        layer l = net.layers[i];
        if(l.type == CONVOLUTIONAL){
            update_convolutional_layer(l, update_batch, net.learning_rate, net.momentum, net.decay);
        } else if(l.type == DECONVOLUTIONAL){
            update_deconvolutional_layer(l, net.learning_rate, net.momentum, net.decay);
        } else if(l.type == CONNECTED){
            update_connected_layer(l, update_batch, net.learning_rate, net.momentum, net.decay);
        }
    }
}
Пример #2
0
void handle_connection(void *pointer)
{
    connection_info info = *(connection_info *) pointer;
    free(pointer);
    //printf("New Connection\n");
    if(info.counter%100==0){
        char buff[256];
        sprintf(buff, "unikitty/net_%d.part", info.counter);
        save_network(info.net, buff);
    }
    int fd = info.fd;
    network net = info.net;
    int i;
    for(i = 0; i < net.n; ++i){
        if(net.types[i] == CONVOLUTIONAL){
            convolutional_layer layer = *(convolutional_layer *) net.layers[i];

            read_and_add_into(fd, layer.bias_updates, layer.n);
            int num = layer.n*layer.c*layer.size*layer.size;
            read_and_add_into(fd, layer.filter_updates, num);
        }
        if(net.types[i] == CONNECTED){
            connected_layer layer = *(connected_layer *) net.layers[i];

            read_and_add_into(fd, layer.bias_updates, layer.outputs);
            read_and_add_into(fd, layer.weight_updates, layer.inputs*layer.outputs);
        }
    }
    for(i = 0; i < net.n; ++i){
        if(net.types[i] == CONVOLUTIONAL){
            convolutional_layer layer = *(convolutional_layer *) net.layers[i];
            update_convolutional_layer(layer);

            write_all(fd, (char*) layer.biases, layer.n*sizeof(float));
            int num = layer.n*layer.c*layer.size*layer.size;
            write_all(fd, (char*) layer.filters, num*sizeof(float));
        }
        if(net.types[i] == CONNECTED){
            connected_layer layer = *(connected_layer *) net.layers[i];
            update_connected_layer(layer);
            write_all(fd, (char *)layer.biases, layer.outputs*sizeof(float));
            write_all(fd, (char *)layer.weights, layer.outputs*layer.inputs*sizeof(float));
        }
    }
    //printf("Received updates\n");
    close(fd);
}
Пример #3
0
void update_network(network net)
{
    int i;
    int update_batch = net.batch*net.subdivisions;
    float rate = get_current_rate(net);
    for(i = 0; i < net.n; ++i){
        layer l = net.layers[i];
        if(l.type == CONVOLUTIONAL){
            update_convolutional_layer(l, update_batch, rate, net.momentum, net.decay);
        } else if(l.type == DECONVOLUTIONAL){
            update_deconvolutional_layer(l, rate, net.momentum, net.decay);
        } else if(l.type == CONNECTED){
            update_connected_layer(l, update_batch, rate, net.momentum, net.decay);
        } else if(l.type == RNN){
            update_rnn_layer(l, update_batch, rate, net.momentum, net.decay);
        } else if(l.type == CRNN){
            update_crnn_layer(l, update_batch, rate, net.momentum, net.decay);
        } else if(l.type == LOCAL){
            update_local_layer(l, update_batch, rate, net.momentum, net.decay);
        }
    }
}
Пример #4
0
void update_crnn_layer(layer_t l, int batch, float learning_rate, float momentum, float decay)
{
    update_convolutional_layer(*(l.input_layer), batch, learning_rate, momentum, decay);
    update_convolutional_layer(*(l.self_layer), batch, learning_rate, momentum, decay);
    update_convolutional_layer(*(l.output_layer), batch, learning_rate, momentum, decay);
}
Пример #5
0
void update_crnn_layer(layer l, update_args a)
{
    update_convolutional_layer(*(l.input_layer),  a);
    update_convolutional_layer(*(l.self_layer),   a);
    update_convolutional_layer(*(l.output_layer), a);
}