void fill() { order.clear(); MapPtr nodes = ::root_map->get( "Nodes" ); Map::iterator i = nodes->begin(); Map::iterator iend = nodes->end(); while( i != iend ) { NodeLayerPtr node = nodes->get( i ); BooleanPtr enabled( node->get( "enabled" ) ); if( node->asyncRecallOrder() && enabled->get() ) { size_t neurons = node->numNeurons(); for( size_t i = 0; i < neurons; ++i ) { order.push_back( NodeNeuron( node, i ) ); } } i++; } }
string transform(const string& src, MapPtr& rule) { auto des = rule->find(src); if (des != rule->end()) { return des->second; } else return src; }
void nnet::learn() { if( ::exec_map->empty() ) { // train synchronous layers first { MapPtr nodes = ::root_map->get( "Nodes" ); Map::iterator i = nodes->begin(); Map::iterator iend = nodes->end(); while( i != iend ) { NodeLayerPtr node = nodes->get( i ); BooleanPtr enabled( node->get( "enabled" ) ); if( !node->asyncRecallOrder() && enabled->get() ) { learn( node ); } i++; } } // then train async layers { RecallOrder::iterator i = recall_order.order.begin(); RecallOrder::iterator iend = recall_order.order.end(); while( i != iend ) { i->node->learn( i->neuron ); i++; } } } else { try { ExecEnginePtr exec( ::exec_map->first() ); exec->learn(); } catch( std::exception& e ) { LOG_EXCEPTION_E( e ); error::std_exception( "nnet::Learn() running ExecEngine", e.what() ); return; } catch( ... ) { LOG_EXCEPTION; error::alert( "Critical exception in ExecEngine!" ); } } nnet::global::learn_signal(); }
int CustomerData::query(const std::string& customer, const std::string& stock) const { MapPtr data = getData(); Map::const_iterator entries = data->find(customer); std::cout << "CustomerData::query " << customer << ", " << stock << std::endl; if(entries != data->end()) { return findEntry(entries->second, stock); } else return -1; }