// EVALUATE -- RECURSIVE double Neuron::evaluate() { // IF INPUT, RETURN VALUE if( isInput() ) return value; // ELSE EVALUATE // FIND WEIGHTED SUM double sum = 0.; for( int i = 0; i < num_inputs; i++ ) sum += weights[i] * inputs[i]->evaluate(); // ACTIVATION FUNCTION switch( config->activation_function ) { case SIGMOID: return fsigmoid( sum, config->activation_slope ); case BINARY: return binary_activation( sum, threshold ); case HYPERBOLIC_TANGENT: return hyperbolic_tangent( sum, config->activation_slope ); } return 0.; }
double ANN_activation_function(double _input, uint8_t _type, uint8_t is_backward) { double ret = 0; switch (_type) { case HYPERBOLIC_TANGENT: if (is_backward == true) { ret = diff_hyperbolic_tangent(_input); } else { ret = hyperbolic_tangent(_input); } break; case LOGISTIC: if (is_backward == true) { ret = diff_logistic_function(_input); } else { logistic_function(_input); } break; case LINEAR: if (is_backward == true) { ret = diff_linear_function(_input); } else { ret = lenear_function(_input); } break; default: break; } return ret; }
double hyperbolic_cotangent(double angle, enum angle_type atype) { return (1.0 / hyperbolic_tangent(angle, atype)); }