Data const &Phenotype::solve(Data const &input) { Data biased(input); biased.push_back(1.0f); assert(inputNeurons.size() == biased.size()); // clear for (Neuron *const &n : hiddenNeurons) n->state = 0.0f; for (Neuron *const &n : outputNeurons) n->state = 0.0f; for (size_t i = 0; i < inputNeurons.size(); ++i) { Neuron *const &n = inputNeurons[i]; for (size_t j = 0; j < n->synapses.size(); ++j) n->synapses[j].to->state += biased[i] * n->synapses[j].weight; } for (size_t i = 0; i < hiddenNeurons.size(); ++i) { Neuron *const &n = hiddenNeurons[i]; n->activate(); for (size_t j = 0; j < n->synapses.size(); ++j) n->synapses[j].to->state += n->state * n->synapses[j].weight; } for (size_t i = 0; i < outputNeurons.size(); ++i) { outputNeurons[i]->activate(); output[i] = outputNeurons[i]->state; } return output; }
void runAnalysis(int *dic, Args args, std::istream *in, profile &pro) { int s, fieldc=0, linec=0; char consensus; std::string name, column, number, line; std::vector <std::string> field; getline(*in, line); field=split(line, args.cdel); count_t samples=(field.size()-3)/3; count_t ** count=new count_t* [samples]; for(count_t i = 0 ; i < samples; ++i) count[i] = new count_t[9]; site_t site(samples); pro.setsamples(samples); pro.setcolumns(args.c); pro.set_delim_column(args.cdel); pro.set_delim_quartet(args.qdel); if (not (args.noheader) ) pro.writeheader(); /* If multiple files are given as arguments, the default behavoir should be to merge the files, * an option should be given to append the files, but in the event that duplicat ids exist in * files being appended the program should throw an error or warning. */ while( std::getline(*in, line)!=NULL){ field=split(line, args.cdel); ++linec; fieldc = field.size(); if(fieldc < 5){ printf("WARNING[mapgd proview]: Skipping line %d with only %d fields.\n", linec, fieldc); continue; } for(count_t i = 0 ; i < samples; ++i) memset(count[i], 0, sizeof(count_t)*9 ); site.id0 = field[0]; consensus = field[2][0]; if (site.extra_ids.size()==0) site.extra_ids.push_back(std::string(1, consensus)); else site.extra_ids[0] = consensus; for (count_t i = 0; i < samples; ++i){ column = field[4+i*3]; scanCol(column, dic, count[i], consensus); } bool run=false; for (count_t x=0 ; x<samples ; ++x){ s=0; for(count_t y=0; y<8; ++y) s += count[x][y]; if (s>=args.min) run=true; } if (!args.notrim){ for (count_t x=0 ; x<samples ; ++x) for (count_t y=0 ; y<4 ; y++) if (biased(count[x][y], count[x][y+4], args.pvalue) ) run=false; } if(run){ site.id1 = field[1]; for(count_t x=0 ; x<samples ; x++){ for (count_t y=0 ; y<4 ; y++){ site.sample[x].base[y]=count[x][y]+count[x][y+4]; } } pro.write(site); } } }