Beispiel #1
0
/**
 * @brief Initialise a preprocessing system
 * @param no_of_layers The number of convolutional layers
 * @param inputs_across Number of units across in the input layer or image
 * @param inputs_down The number of units down in the input layer or image
 * @param inputs_depth The depth of the input layer or image
 * @param max_features The maximum number of features per layer
 * @param reduction_factor Reduction factor for successive convolution layers
 * @param pooling_factor The reduction factor used for pooling
 * @param conv Preprocessing object
 * @param random_seed Random number generator seed
 * @returns zero on success
 */
int conv_init(int no_of_layers,
              int inputs_across,
              int inputs_down,
              int inputs_depth,
              int max_features,
              int reduction_factor,
              int pooling_factor,
              deeplearn_conv * conv,
              float error_threshold[],
              unsigned int * random_seed)
{
    int across = inputs_across;
    int down = inputs_down;

    if (no_of_layers >= PREPROCESS_MAX_LAYERS) {
        return -1;
    }

    rand_num(random_seed);
    conv->random_seed = *random_seed;

    conv->reduction_factor = reduction_factor;
    conv->pooling_factor = pooling_factor;

    conv->history_ctr = 0;
    conv->training_ctr = 0;
    conv->history_index = 0;
    conv->history_step = 1;
    conv->history_plot_interval = 1;
    sprintf(conv->history_plot_filename,"%s","training_conv.png");
    sprintf(conv->history_plot_title,"%s",
            "Convolutional Training History");

    conv->current_layer = 0;
    conv->training_complete = 0;
    conv->itterations = 0;
    conv->BPerror = -1;
    memcpy((void*)conv->error_threshold,
           (void*)error_threshold, no_of_layers*sizeof(float));
    conv->enable_learning = 0;
    conv->no_of_layers = no_of_layers;
    conv->inputs_across = inputs_across;
    conv->inputs_down = inputs_down;
    conv->inputs_depth = inputs_depth;
    conv->max_features = max_features;

    for (int i = 0; i < no_of_layers; i++) {
        /* reduce the array dimensions */
        across /= reduction_factor;
        down /= reduction_factor;
        if (across < 4) across = 4;
        if (down < 4) down = 4;

        conv->layer[i].units_across = across;
        conv->layer[i].units_down = down;
        conv->layer[i].pooling_factor = pooling_factor;
        conv->layer[i].convolution =
            (float*)malloc(sizeof(float)*across*down*conv_layer_features(conv, i));
        if (!conv->layer[i].convolution) return -2;

        /* ensure that the random seed is different for each
           convolutional neural net */
        rand_num(random_seed);

        /* create an autocoder for feature learning on this layer */
        conv->layer[i].autocoder = (ac*)malloc(sizeof(ac));

        int depth = conv_layer_features(conv, i);
        if (i == 0) {
            /* on the first layer the depth is the same as the
               inputs or image */
            depth = inputs_depth;
        }

        /* the number of units/pixels within an input patch of
           the previous layer, not including depth */
        int patch_pixels =
            conv_patch_radius(i,conv)*
            conv_patch_radius(i,conv)*4;

        /* initialise the autocoder for this layer */
        if (autocoder_init(conv->layer[i].autocoder,
                           patch_pixels*depth,
                           conv_layer_features(conv, i),
                           *random_seed) != 0) {
            return -3;
        }

        /* reduce the dimensions by the pooling factor */
        across /= pooling_factor;
        down /= pooling_factor;
        if (across < 4) across = 4;
        if (down < 4) down = 4;

        /* create a pooling array */
        conv->layer[i].pooling =
            (float*)malloc(sizeof(float)*across*down*
                           conv_layer_features(conv, i));
        if (!conv->layer[i].pooling) return -4;
    }
    return 0;
}
Beispiel #2
0
/**
* @brief Load an autocoder from file
* @param fp Pointer to the file
* @param autocoder Autocoder object
* @param initialise Whether to initialise
* @return zero on success
*/
int autocoder_load(FILE * fp, ac * autocoder, int initialise)
{
    int no_of_inputs = 0;
    int no_of_hiddens = 0;
    unsigned int random_seed = 0;

    if (fread(&no_of_inputs, sizeof(int), 1, fp) == 0) {
        return -1;
    }
    if (fread(&no_of_hiddens, sizeof(int), 1, fp) == 0) {
        return -2;
    }
    if (fread(&random_seed, sizeof(unsigned int), 1, fp) == 0) {
        return -3;
    }

    /* create the autocoder */
    if (initialise != 0) {
        if (autocoder_init(autocoder,
                           no_of_inputs,
                           no_of_hiddens,
                           random_seed) != 0) {
            return -4;
        }
    }
    else {
        autocoder->NoOfInputs = no_of_inputs;
        autocoder->NoOfHiddens = no_of_hiddens;
        autocoder->random_seed = random_seed;
    }

    if (fread(&autocoder->DropoutPercent, sizeof(float), 1, fp) == 0) {
        return -5;
    }
    if (fread(autocoder->weights, sizeof(float),
              autocoder->NoOfInputs*autocoder->NoOfHiddens, fp) == 0) {
        return -6;
    }
    if (fread(autocoder->lastWeightChange, sizeof(float),
              autocoder->NoOfInputs*autocoder->NoOfHiddens, fp) == 0) {
        return -7;
    }
    if (fread(autocoder->bias, sizeof(float),
              autocoder->NoOfHiddens, fp) == 0) {
        return -8;
    }
    if (fread(autocoder->lastBiasChange, sizeof(float),
              autocoder->NoOfHiddens, fp) == 0) {
        return -9;
    }
    if (fread(&autocoder->learningRate, sizeof(float), 1, fp) == 0) {
        return -10;
    }
    if (fread(&autocoder->noise, sizeof(float), 1, fp) == 0) {
        return -11;
    }
    if (fread(&autocoder->itterations, sizeof(unsigned int), 1, fp) == 0) {
        return -12;
    }
    return 0;
}