Ejemplo n.º 1
0
 ValueListPtr NeuralNet::run(const ValueListPtr input) const
 {
     LayerList::const_iterator it = m_layers.begin();
     ValueListPtr vl = layer_output(*it, input);
     it++;
     while(it != m_layers.end())
     {
         vl = layer_output(*it, vl);
         it++;
     }
     return vl;
 }
std::vector<double> SimpleRecurrentNeuralNetwork::Evaluate(const std::vector<double> &input) {
    std::vector<double> layer_hidden(dimension_hidden_layer_, 0.0);
    std::vector<double> layer_output(dimension_output_layer_, 0.0);

    for (unsigned int i = 0; i < dimension_hidden_layer_; ++i) {
        layer_hidden[i] = context_[i];

        if (input_layer_enable_bias_ ==  true) {
            layer_hidden[i] += layer_weights_input_hidden_[i * (dimension_input_layer_ + input_layer_enable_bias_)];
        }

        for (unsigned int j = 0; j < dimension_input_layer_; ++j) {
            const unsigned int ij = i * (dimension_input_layer_ + input_layer_enable_bias_) + 1 + j;
            layer_hidden[i] += input[j] * layer_weights_input_hidden_[ij];
        }

        // Activation function
        switch (hidden_layer_activation_) {
        case ActivationFunctionType::Linear:
            // do nothing
            break;
        case ActivationFunctionType::Sigmoid:
            layer_hidden[i] = 1.0 / (1.0 + std::exp(-layer_hidden[i]));
            break;
        }
        context_[i] = layer_hidden[i];
    }

    for (unsigned int i = 0; i < dimension_output_layer_; ++i) {
        if (hidden_layer_enable_bias_ ==  true) {
            layer_output[i] += layer_weights_hidden_output_[i * (dimension_hidden_layer_ + hidden_layer_enable_bias_)];
        }

        for (unsigned int j = 0; j < dimension_hidden_layer_; ++j) {
            const unsigned int ij = i * (dimension_hidden_layer_ + hidden_layer_enable_bias_) + 1 + j;
            layer_output[i] += layer_hidden[j] * layer_weights_hidden_output_[ij];
        }

        // Activation function
        switch (output_layer_activation_) {
        case ActivationFunctionType::Linear:
            // do nothing
            break;
        case ActivationFunctionType::Sigmoid:
            layer_output[i] = 1.0 / (1.0 + std::exp(-layer_output[i]));
            break;
        }
    }

    return layer_output;
}