Exemple #1
0
// run the whole learning process of RBM
void rbm::run(std::string model_weight_file, std::string model_visible_bias_file,
            std::string model_hidden_bias_file, std::string learned_features_file, std::string file){

    load_data(file.c_str());

    // randomly initialize weights and biases
    float ** weights = new float*[num_hidden_units];
    float * hidden_bias = new float[num_hidden_units];
    float * visible_bias = new float[num_visible_units];

    for (int i=0; i<num_hidden_units; i++){
        weights[i] = new float[num_visible_units];
    }

    init_paras(weights, visible_bias, hidden_bias);
//	std::string in_weights = "init_weights";
//	std::string in_hidden = "init_hidden";
//	std::string in_visible = "init_visible";
//    init_paras(weights, visible_bias, hidden_bias, in_weights.c_str(), in_hidden.c_str(), in_visible.c_str());

    // train rbm
    train(weights, visible_bias, hidden_bias);

    // save model
    save_model(weights, visible_bias, hidden_bias, model_weight_file.c_str(),
                model_visible_bias_file.c_str(), model_hidden_bias_file.c_str());

    for (int i=0; i<num_hidden_units; i++){
        delete[] weights[i];
    }
    delete[] weights;

    delete[] visible_bias;
    delete[] hidden_bias;
}
Exemple #2
0
// run the whole learning process of DNN
void dnn::run(std::string model_weight_file, std::string model_bias_file)
{
  // assign id to threads
  if (!thread_id.get()) {
    thread_id.reset(new int(thread_counter++));
  }

  // get access to tables
  petuum::PSTableGroup::RegisterThread();
  mat *weights= new mat[num_layers-1];
  mat *biases=new mat[num_layers-1];
  for(int i=0;i<num_layers-1;i++){
    weights[i]=petuum::PSTableGroup::GetTableOrDie<float>(i);
    biases[i]=petuum::PSTableGroup::GetTableOrDie<float>(i+num_layers-1);
  }

  // Run additional iterations to let stale values finish propagating
  for (int iter = 0; iter < staleness; ++iter) {
    petuum::PSTableGroup::Clock();
  } 
  
  // initialize parameters
  if (client_id==0&&(*thread_id) == 0){
    std::cout<<"init parameters"<<std::endl;
    init_paras(weights, biases);
    std::cout<<"init parameters done"<<std::endl;
  }
  process_barrier->wait();
  
  // do DNN training
  if (client_id==0&&(*thread_id) == 0)
    std::cout<<"training starts"<<std::endl;
  train(weights, biases);
  
  // Run additional iterations to let stale values finish propagating
  for (int iter = 0; iter < staleness; ++iter) {
    petuum::PSTableGroup::Clock();
  }

  //save model
  if(client_id==0&&(*thread_id)==0)
  {
    save_model(weights, biases, model_weight_file.c_str(), model_bias_file.c_str());
  }

  delete[]weights;
  delete[]biases;
  petuum::PSTableGroup::DeregisterThread();
}
Exemple #3
0
  virtual void solve() {

    auto lines = paracel_load(input);
    local_parser(item_vects, lines);
    std::cout << "parser done" << std::endl;

    if(learning_method == "default") {
      auto all_lines = paracel_loadall(input);
      local_parser(all_item_vects, all_lines);
      std::cout << "loadall done" << std::endl;
      normalize(item_vects);
      normalize(all_item_vects);
      std::cout << "normalize done" << std::endl;
      sync();
      learning();
    } else if(learning_method == "limit_storage") {
      normalize(item_vects); // normalize here to reduce calculation
      init_paras();
      sync();
      mls_learning();
    } else {}

  }
Exemple #4
0
 void solve() {
   init_paras();
   paracel_sync();
   learning();
 }