void registerMapRules( Map& i_map, const std::string& i_inputLayer, const std::string& i_outputLayer, Automap& io_automap ) { if(!i_map.isLayerExist(i_inputLayer)) { AutomapLog::report("No input layer in map", AutomapLog::Type::Warning); return; } if(!i_map.isLayerExist(i_outputLayer)) { AutomapLog::report("No output layer in map", AutomapLog::Type::Warning); return; } Layer& inputLayer = i_map.getLayer(i_inputLayer); Layer& outputLayer = i_map.getLayer(i_outputLayer); RuleSimpleChange rulemakerSimple; RuleCopyAndChange rulemaker; for( size_t y = 0; y<i_map.getHeight(); ++y) { for( size_t x = 0; x<i_map.getWidth(); ++x) { if(inputLayer(x,y).info().path.empty()) continue; if(outputLayer(x,y).info().path.empty()) { auto* rule = rulemakerSimple.makeRule(); rule->setInputLayerName(i_inputLayer); rule->assignIndex(0, i_inputLayer); rule->assignIndex(1, inputLayer(x,y).info()); rule->assignIndex(2, inputLayer(x,y).info()); io_automap.registerRule(rule); } else { auto* rule = rulemaker.makeRule(); rule->setInputLayerName(i_inputLayer); rule->assignIndex(0, i_inputLayer); rule->assignIndex(1, i_outputLayer); rule->assignIndex(1, inputLayer(x,y).info()); rule->assignIndex(2, outputLayer(x,y).info()); io_automap.registerRule(rule); } } } }
CNeuralNet::CNeuralNet(uint inputLayerSize, uint hiddenLayerSize, uint outputLayerSize, double lRate, double mse_cutoff) : m_inputLayerSize(inputLayerSize), m_hiddenLayerSize(hiddenLayerSize), m_outputLayerSize(outputLayerSize), m_lRate(lRate), m_mse_cutoff(mse_cutoff) //intializer list { //initialize vectors for output layer values std::vector<double> _outputActivation(m_outputLayerSize); //initialize the two neuron layers SNeuronLayer hiddenLayer(m_hiddenLayerSize, m_inputLayerSize); SNeuronLayer outputLayer(m_outputLayerSize, m_hiddenLayerSize); //push the layers onto a vector m_vecLayer.push_back(hiddenLayer); m_vecLayer.push_back(outputLayer); //initialize random weights for neurons initWeights(); //std::cout << "Constructor complete" << std::endl; }