void find_data_ranges(teaching_vector_type &teaching, SOM_element &max_range, SOM_element &min_range ) { if(teaching.size() == 0) return; max_range = min_range = teaching[0]; for(int i = 1; i < (int)teaching.size(); ++i) { max_range.elementwise_max(teaching[i]); min_range.elementwise_min(teaching[i]); } }
void SOMap::teach(teaching_vector_type &in) { for(int i = 0; i < nPasses; ++i ) { int j = (int)(randval(0, (double)in.size())); // this won't be reproducible. if(j == in.size()) --j; int min_x = -1; int min_y = -1; subsquare_type br2(0, (int)my_map.size(), 1, 0, (int)my_map[0].size(), 1); (void) BMU_range(in[j],min_x, min_y, br2); // just need min_x, min_y // radius of interest double radius = max_radius * exp(-(double)i*radius_decay_rate); // update circle is min_xiter to max_xiter inclusive. double learning_rate = max_learning_rate * exp( -(double)i * learning_decay_rate); epoch_update(in[j], i, min_x, min_y, radius, learning_rate); } }
void graph_teach(SOMap &map1, teaching_vector_type &in) { build_BMU_graph(map1); // normally the training would pick random exemplars to teach the SOM. We need // the process to be reproducible, so we will pick the exemplars in order, [0, in.size()) int next_j = 0; for(int epoch = 0; epoch < nPasses; ++epoch) { global_i = epoch; bool canceled_submaps = false; int j = next_j; // try to make reproducible next_j = (epoch+1) % in.size(); search_result_type min_sr; if(epoch < speculation_start) { (send_to[epoch%SPECULATION_CNT])->try_put(in[j]); } else if(epoch == speculation_start) { (send_to[epoch%SPECULATION_CNT])->try_put(in[j]); if(epoch < nPasses-1) { (send_to[(epoch+1)%SPECULATION_CNT])->try_put(in[next_j]); } } else if(epoch < nPasses - 1) { (send_to[(epoch+1)%SPECULATION_CNT])->try_put(in[next_j]); } min_sr = graph_BMU(epoch % SPECULATION_CNT); //calls wait_for_all() double min_distance = get<0>(min_sr); double radius = max_radius * exp(-(double)epoch*radius_decay_rate); double learning_rate = max_learning_rate * exp(-(double)epoch * learning_decay_rate); if(epoch >= speculation_start && epoch < (nPasses - 1)) { // have to cancel the affected submaps cancel_submaps(get<XV>(min_sr), get<YV>(min_sr), radius, (epoch+1)%SPECULATION_CNT); canceled_submaps = true; } map1.epoch_update(in[j], epoch, get<1>(min_sr), get<2>(min_sr), radius, learning_rate); ++global_i; if(canceled_submaps) { // do I have to wait for all the non-canceled speculative graph to complete first? // yes, in case a canceled task was already executing. wait_for_all_graphs((epoch+1) % SPECULATION_CNT); // wait for the array of subgraphs restart_submaps(get<1>(min_sr), get<2>(min_sr), radius, (epoch+1)%SPECULATION_CNT, in[next_j]); } last_update = min_sr; get<RADIUS>(last_update) = radius; // not smallest value, but range of effect } destroy_BMU_graph(); }