Example #1
0
void Net::ReadLabels(const mxArray *mx_labels) {
  std::vector<size_t> labels_dim = mexGetDimensions(mx_labels);  
  mexAssert(labels_dim.size() == 2, "The label array must have 2 dimensions");
  size_t samples_num = labels_dim[0];
  size_t classes_num = labels_dim[1];
  mexAssert(classes_num == layers_.back()->length_,
    "Labels and last layer must have equal number of classes");  
  MatCPU labels_norm; // order_ == false
  mexGetMatrix(mx_labels, labels_norm);
  if (params_.balance_) {  
    MatCPU labels_mean(1, classes_num);
    Mean(labels_norm, labels_mean, 1);
    mexAssert(!labels_mean.hasZeros(), 
      "Balancing impossible: one of the classes is not presented");
    MatCPU cpucoeffs(1, classes_num);
    cpucoeffs.assign(1);
    cpucoeffs /= labels_mean;
    classcoefs_.resize(1, classes_num);
    classcoefs_ = cpucoeffs;
    classcoefs_ /= (ftype) classes_num;
  }
  labels_.resize(samples_num, classes_num);
  labels_.reorder(true, false); // order_ == true;
  labels_ = labels_norm; 
}
Example #2
0
void Net::ReadLabels(const mxArray *mx_labels) {
  std::vector<size_t> labels_dim = mexGetDimensions(mx_labels);  
  mexAssert(labels_dim.size() == 2, "The label array must have 2 dimensions");
  ////mexPrintMsg("labels_dim.", labels_dim[0]);
  ////mexPrintMsg("data_.size()", data_.size());  
  size_t classes_num = labels_dim[1];
  mexAssert(classes_num == layers_.back()->length_,
    "Labels and last layer must have equal number of classes");  
  labels_ = mexGetMatrix(mx_labels);
  classcoefs_.init(1, classes_num, 1);
  if (params_.balance_) {  
    Mat labels_mean = Mean(labels_, 1);
    for (size_t i = 0; i < classes_num; ++i) {
      mexAssert(labels_mean(i) > 0, "Balancing impossible: one of the classes is not presented");
      (classcoefs_(i) /= labels_mean(i)) /= classes_num;      
    }
  }
  if (layers_.back()->function_ == "SVM") {
    (labels_ *= 2) -= 1;    
  }
}
Example #3
0
void Net::Train(const mxArray *mx_data, const mxArray *mx_labels) {  
  
  //mexPrintMsg("Start training...");
  LayerFull *lastlayer = static_cast<LayerFull*>(layers_.back());
  std::vector<size_t> labels_dim = mexGetDimensions(mx_labels);  
  mexAssert(labels_dim.size() == 2, "The label array must have 2 dimensions");    
  mexAssert(labels_dim[0] == lastlayer->length_,
    "Labels and last layer must have equal number of classes");  
  size_t train_num = labels_dim[1];  
  Mat labels(labels_dim);
  mexGetMatrix(mx_labels, labels);
  classcoefs_.assign(labels_dim[0], 1);
  if (params_.balance_) {  
    Mat labels_mean(labels_dim[0], 1);
    labels.Mean(2, labels_mean);
    for (size_t i = 0; i < labels_dim[0]; ++i) {
      mexAssert(labels_mean(i) > 0, "Balancing impossible: one of the classes is not presented");  
      (classcoefs_[i] /= labels_mean(i)) /= labels_dim[0];      
    }
  }
  if (lastlayer->function_ == "SVM") {
    (labels *= 2) -= 1;    
  }
  
  size_t mapnum = 1;  
  if (mexIsCell(mx_data)) {
    mapnum = mexGetNumel(mx_data);    
  }
  mexAssert(mapnum == layers_.front()->outputmaps_,
    "Data must have the same number of cells as outputmaps on the first layer");
  std::vector< std::vector<Mat> > data(mapnum);  
  for (size_t map = 0; map < mapnum; ++map) {
    const mxArray *mx_cell;  
    if (mexIsCell(mx_data)) {
      mx_cell = mxGetCell(mx_data, map);
    } else {
      mx_cell = mx_data;
    }
    std::vector<size_t> data_dim = mexGetDimensions(mx_cell);  
    mexAssert(data_dim.size() == 3, "The data array must have 3 dimensions");  
    mexAssert(data_dim[0] == layers_.front()->mapsize_[0] && 
              data_dim[1] == layers_.front()->mapsize_[1],
             "Data and the first layer must have equal sizes");    
    mexAssert(data_dim[2] == train_num, "All data maps and labels must have equal number of objects");    
    mexGetMatrix3D(mx_cell, data[map]);
  }
  
      
  
  size_t numbatches = ceil((double) train_num/params_.batchsize_);
  trainerror_.assign(params_.numepochs_ * numbatches, 0);
  for (size_t epoch = 0; epoch < params_.numepochs_; ++epoch) {    
    std::vector<size_t> randind(train_num);
    for (size_t i = 0; i < train_num; ++i) {
      randind[i] = i;
    }
    if (params_.shuffle_) {
      std::random_shuffle(randind.begin(), randind.end());
    }
    std::vector<size_t>::const_iterator iter = randind.begin();
    for (size_t batch = 0; batch < numbatches; ++batch) {
      size_t batchsize = std::min(params_.batchsize_, (size_t)(randind.end() - iter));
      std::vector<size_t> batch_ind = std::vector<size_t>(iter, iter + batchsize);
      iter = iter + batchsize;
      std::vector< std::vector<Mat> > data_batch(mapnum);
      for (size_t map = 0; map < mapnum; ++map) {
        data_batch[map].resize(batchsize);
        for (size_t i = 0; i < batchsize; ++i) {        
          data_batch[map][i] = data[map][batch_ind[i]];
        }
      }      
      Mat labels_batch(labels_dim[0], batchsize);
      Mat pred_batch(labels_dim[0], batchsize);
      labels.SubMat(batch_ind, 2 ,labels_batch);
      UpdateWeights(false);      
      Forward(data_batch, pred_batch, true);
      Backward(labels_batch, trainerror_[epoch * numbatches + batch]);      
      UpdateWeights(true);
      if (params_.verbose_ == 2) {
        std::string info = std::string("Epoch: ") + std::to_string(epoch+1) +
                           std::string(", batch: ") + std::to_string(batch+1);
        mexPrintMsg(info);
      }
    } // batch    
    if (params_.verbose_ == 1) {
      std::string info = std::string("Epoch: ") + std::to_string(epoch+1);                         
      mexPrintMsg(info);
    }
  } // epoch
  //mexPrintMsg("Training finished");
}