Exemplo n.º 1
0
 void mlp::set_activation_function(int activation_function, mlp_layer layer)
 {
     if (grt_mlp.validateActivationFunction(activation_function) == false)
     {
         flext::error("activation function %d is invalid, hint should be between 0-%d", activation_function, GRT::Neuron::NUMBER_OF_ACTIVATION_FUNCTIONS - 1);
         return;
     }
     
     GRT::Neuron::ActivationFunctions activation_function_ = (GRT::Neuron::ActivationFunctions)activation_function;
     
     switch (layer)
     {
         case LAYER_INPUT:
             input_activation_function = activation_function_;
             break;
         case LAYER_HIDDEN:
             hidden_activation_function = activation_function_;
             break;
         case LAYER_OUTPUT:
             output_activation_function = activation_function_;
             break;
         default:
             ml::error("no activation function for layer: " + std::to_string(layer));
             return;
     }
     post("activation function set to " + grt_mlp.activationFunctionToString(activation_function_));
 }
Exemplo n.º 2
0
 void ann::set_activation_function(int activation_function, ann_layer layer)
 {
     GRT::Neuron::Type activation_function_ = GRT::Neuron::Type::LINEAR;
     
     try
     {
         activation_function_ = get_grt_neuron_type(activation_function);
     }
     catch (std::exception& e)
     {
         flext::error(e.what());
         return;
     }
     
     if (grt_ann.validateActivationFunction(activation_function_) == false)
     {
         flext::error("activation function %d is invalid, hint should be between 0-%d", activation_function, GRT::Neuron::NUMBER_OF_ACTIVATION_FUNCTIONS - 1);
         return;
     }
     
     switch (layer)
     {
         case LAYER_INPUT:
             input_activation_function = activation_function_;
             break;
         case LAYER_HIDDEN:
             hidden_activation_function = activation_function_;
             break;
         case LAYER_OUTPUT:
             output_activation_function = activation_function_;
             break;
         default:
             ml::error("no activation function for layer: " + std::to_string(layer));
             return;
     }
     post("activation function set to " + grt_ann.activationFunctionToString(activation_function_));
 }