bool CVarianceDecomposition::trainGP() { bool conv = false; // initGP if is not init if (this->is_init==0) this->initGP(); //train GP conv = this->opt->opt(); //check convergence VectorXd scales; this->agetScales(&scales); conv &= (scales.unaryExpr(std::bind2nd( std::ptr_fun<double,double,double>(pow), 2) ).maxCoeff()<(mfloat_t)10.0); return conv; }
VectorXd LayeredFeedForwardNeuralNet::FireSingleLayer(const VectorXd& inputActivations, long layerIndex) const { // get layer input weights (also checks valid layerIndex) const MatrixXd& layerInputWeights = GetLayerInputWeights(layerIndex); if (layerInputWeights.cols() - 1 != inputActivations.size()) { // input is invalid for this neural net topology throw NeuralNetTopologyMismatch("activation input must match number of units in neural network layer"); } // get the activation function auto expressionParser = UnaryExpressionParserFactory::CreateParser(); UnaryFunction activationFunction = expressionParser->GetFunctionForExpression(m_activationFunction); // bias activation VectorXd bias(1); bias << -1.0; // calculate layer net inputs VectorXd inputPlusBias(layerInputWeights.cols()); inputPlusBias << inputActivations, bias; //std::cout << "layer " << layerIndex << " input activations +bias : " << std::endl << inputPlusBias << std::endl << std::endl; //std::cout << "layer " << layerIndex << " input weights : " << std::endl << layerInputWeights << std::endl << std::endl; VectorXd layerNetInputs = layerInputWeights * inputPlusBias; //std::cout << "layer " << layerIndex << " net inputs : " << std::endl << layerNetInputs << std::endl << std::endl; // calculate layer activations VectorXd layerActivations = layerNetInputs.unaryExpr(activationFunction); //std::cout << "layer " << layerIndex << " output activations : " << layerActivations << std::endl << std::endl; return layerActivations; }
VectorXd D_P_ANN_Controller::sgm(VectorXd x){ VectorXd W_in; W_in=x.unaryExpr(std::ptr_fun(sigmoid)); if(thresholdNo) W_in(0)=1; return W_in; }