void Adaline::setInputSize(int n) { if(n != (int) weights.size()){ setWeights(getRandomValues((int)n)); setThreshold(getRandomValues(1)[0]); }else{ qWarning("No se realizo cambio alguno porque el numero de entradas no vario"); } }
void MultilayerPerceptron::setOutputSize(size_t size) { outputWeights.resize(size); size_t nOutputWeights = layerWeights[layerWeights.size()-1].size(); for(size_t i = 0; i < size; i++){ outputWeights[i] = getRandomValues(nOutputWeights + 1); } }
void planning_models::KinematicModel::JointModel::getRandomValues(random_numbers::RandomNumberGenerator &rng, std::map<std::string, double> &values, const Bounds &bounds) const { std::vector<double> rv; rv.reserve(variable_names_.size()); getRandomValues(rng, rv, bounds); for (std::size_t i = 0 ; i < variable_names_.size() ; ++i) values[variable_names_[i]] = rv[i]; }
void MultilayerPerceptron::setInputSize(int size) { nInputs = size; int sNeurons = layerWeights[0].size(); for(int i = 0; i < sNeurons; i++){ layerWeights[0][i] = getRandomValues(nInputs + 1); } }
void MultilayerPerceptron::setLayerSize(unsigned int layer, int size) { size_t nLayers = layerWeights.size(); layerWeights[layer].resize(size); //Si la capa a redimensionar es anterior a la penultima capa if(layer <= nLayers - 2){ size_t nNeurons = layerWeights[layer+1].size(); for(size_t i = 0; i < nNeurons; i++){ layerWeights[layer+1][i] = getRandomValues(size+1); } }else if(layer == nLayers - 1){ size_t nNeurons = outputWeights.size(); for(size_t i = 0; i < nNeurons; i++){ outputWeights[i] = getRandomValues(size+1); } } }
void MultilayerPerceptron::randomizeWeights(double min, double max) { size_t nLayers = layerWeights.size(), layer, neuron, nNeurons, nOutputs; srand(clock()); for(layer = 0; layer < nLayers; layer++){ nNeurons = layerWeights[layer].size(); for(neuron = 0; neuron < nNeurons; neuron++){ layerWeights[layer][neuron] = (layer == 0 ? getRandomValues(getInputSize()+1, min, max) : getRandomValues(layerWeights[layer-1].size()+1, min, max)); } } nOutputs = outputWeights.size(); for(size_t i = 0; i < nOutputs; i++){ outputWeights[i] = getRandomValues(layerWeights[nLayers-1].size() + 1, min, max); } }
void Adaline::init(const vector<double> &weights, TransferFunctionType tf) { size_t sWeights = weights.size(); if(sWeights > 0){ setWeights(weights); setAlfa(1); setThreshold(getRandomValues(1)[0]); setTransferFunction(tf); }else{ qWarning() << "Se debe asignar al menos una entrada a un perceptron simple"; } }
void MultilayerPerceptron::setLayerSizes(const vector<int> &sizes) { // layerSizes = sizes; // this->hiddenLayerSizes = sizes; size_t nLayers = sizes.size(); layerWeights = vector<vector<vector<double> > >(nLayers); for(size_t i = 0; i < nLayers; i++){ layerWeights[i] = vector<vector<double > >(sizes[i]); for(int j = 0; j < sizes[i]; j++){ if(i == 0){ layerWeights[i][j] = getRandomValues(nInputs + 1); }else{ layerWeights[i][j] = getRandomValues(sizes[i - 1] + 1); } } // setLayerSize(i, sizes[i]); } // outputWeights.resize(); size_t nOutputWeights = layerWeights[layerWeights.size()-1].size(); size_t sOutputs = outputWeights.size(); for(size_t i = 0; i < sOutputs; i++){ outputWeights[i] = getRandomValues(nOutputWeights + 1); } }
int main() { //testPrint(); //testUtils(); //testMax(); std::vector<double> v; v = getRandomValues(); double tab[3]; tab[0] = min(v); tab[1] = max(v); tab[2] = sumNumbersInVector( v ); for( int i = 0 ; i < 3 ; i++) std::cout << tab[i] << " "; system("pause"); return 0; }
void Adaline::init(int ninputs, double *weights, TransferFunctionType ft){ setWeights(ninputs, weights); setAlfa(1); setThreshold(getRandomValues(1)[0]); setTransferFunction(ft); }
void Adaline::randomizeWeights(double min, double max) { setWeights(getRandomValues((int)weights.size(), min, max)); threshold = getRandomValues(1, min, max)[0]; }