예제 #1
0
파일: net.cpp 프로젝트: chenqi1990/MyCaffe
void Net<Dtype>::ReInit( NetParameter& param, const int batch_size )
{

  layers_.clear();
  layer_names_.clear();
  layer_need_backward_.clear();
  // blobs stores the blobs that store intermediate results between the
  // layers.
  blobs_.clear();
  blob_names_.clear();
  blob_need_backward_.clear();
  // bottom_vecs stores the vectors containing the input for each layer.
  // They don't actually host the blobs (blobs_ does), so we simply store
  // pointers.
  bottom_vecs_.clear();
  bottom_id_vecs_.clear();
  // top_vecs stores the vectors containing the output for each layer
  top_vecs_.clear();
  top_id_vecs_.clear();
  // blob indices for the input and the output of the net
  net_input_blob_indices_.clear();
  net_input_blobs_.clear();
  net_output_blobs_.clear();
  // The parameters in the network.
  params_.clear();
  // the learning rate multipliers
  params_lr_.clear();
  // the weight decay multipliers
  params_weight_decay_.clear();

  param.mutable_layers(0)->mutable_layer()->set_batchsize(batch_size);
  Init( param );
}