void train(int x, int j) { for (int i = 0; i < fea_num; ++i) mlp[0][i] = neuron(0, i, 0, fea_train[x][i]); for (int i = 0; i < tag_num; ++i) mlp[0][i+fea_num] = neuron(0, fea_num+i, 0, tag_train[x][i]); mlp[0][fea_num+tag_num] = neuron(0, fea_num+tag_num, 0, 1); //for (int i = 0; i < 10; ++i) // ans[i] = concept[i][id_train[x]]; ans[0] = concept[j][id_train[x]]; ans[1]=1-ans[0]; calc(); adjust(); if (concept[j][id_train[x]]==1) { if (mlp[layers-1][0].y>=mlp[layers-1][1].y) ++pgood; ++psum; } else { if (mlp[layers-1][0].y<=mlp[layers-1][1].y) ++ngood; ++nsum; } }
void test(int x, int k) { for (int i = 0; i < fea_num; ++i) mlp[0][i] = neuron(0, i, 0, (k==1?fea_test:fea_database)[x][i]); for (int i = 0; i < tag_num; ++i) mlp[0][i+fea_num] = neuron(0, fea_num+i, 0, (k==1?tag_test:tag_database)[x][i]); mlp[0][fea_num+tag_num] = neuron(0, fea_num+tag_num, 0, 1); calc(); }
void NetworkLayer :: init(int nNeurons){ N = nNeurons; for(int i =0;i<nNeurons;i++){ Neuron neuron(nNeurons); // printf("Generating neurons on layer %d , neuron id is %d\n", layerId, neuron.getID()); Neurons.push_back(neuron); } }
int main() { srand(0);//(unsigned)time(NULL)); initialize(); for (int k = 0; k < 10; ++k) { for (int i = 1; i < layers; ++i) for (int j = 0; j < neu_num[i]; ++j) mlp[i][j] = neuron((i < layers - 1 ? inner : outer), j, i, 0); train(k); test(k); } output(); return 0; }