コード例 #1
0
ファイル: ml_mlp.cpp プロジェクト: FredVoisin/ml-lib
 void mlp::set_activation_function(int activation_function, mlp_layer layer)
 {
     if (grt_mlp.validateActivationFunction(activation_function) == false)
     {
         flext::error("activation function %d is invalid, hint should be between 0-%d", activation_function, GRT::Neuron::NUMBER_OF_ACTIVATION_FUNCTIONS - 1);
         return;
     }
     
     GRT::Neuron::ActivationFunctions activation_function_ = (GRT::Neuron::ActivationFunctions)activation_function;
     
     switch (layer)
     {
         case LAYER_INPUT:
             input_activation_function = activation_function_;
             break;
         case LAYER_HIDDEN:
             hidden_activation_function = activation_function_;
             break;
         case LAYER_OUTPUT:
             output_activation_function = activation_function_;
             break;
         default:
             ml::error("no activation function for layer: " + std::to_string(layer));
             return;
     }
     post("activation function set to " + grt_mlp.activationFunctionToString(activation_function_));
 }
コード例 #2
0
ファイル: ml_ann.cpp プロジェクト: cmuartfab/ml-lib
 void ann::set_activation_function(int activation_function, ann_layer layer)
 {
     GRT::Neuron::Type activation_function_ = GRT::Neuron::Type::LINEAR;
     
     try
     {
         activation_function_ = get_grt_neuron_type(activation_function);
     }
     catch (std::exception& e)
     {
         flext::error(e.what());
         return;
     }
     
     if (grt_ann.validateActivationFunction(activation_function_) == false)
     {
         flext::error("activation function %d is invalid, hint should be between 0-%d", activation_function, GRT::Neuron::NUMBER_OF_ACTIVATION_FUNCTIONS - 1);
         return;
     }
     
     switch (layer)
     {
         case LAYER_INPUT:
             input_activation_function = activation_function_;
             break;
         case LAYER_HIDDEN:
             hidden_activation_function = activation_function_;
             break;
         case LAYER_OUTPUT:
             output_activation_function = activation_function_;
             break;
         default:
             ml::error("no activation function for layer: " + std::to_string(layer));
             return;
     }
     post("activation function set to " + grt_ann.activationFunctionToString(activation_function_));
 }