Example #1
0
void ScaleLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
      const vector<Blob<Dtype>*>& top) {
  const ScaleParameter& param = this->layer_param_.scale_param();
  if (bottom.size() == 1 && this->blobs_.size() > 0) {
    LOG(INFO) << "Skipping parameter initialization";
  } else if (bottom.size() == 1) {
    // scale is a learned parameter; initialize it
    axis_ = bottom[0]->CanonicalAxisIndex(param.axis());
    const int num_axes = param.num_axes();
    CHECK_GE(num_axes, -1) << "num_axes must be non-negative, "
                           << "or -1 to extend to the end of bottom[0]";
    if (num_axes >= 0) {
      CHECK_GE(bottom[0]->num_axes(), axis_ + num_axes)
          << "scale blob's shape extends past bottom[0]'s shape when applied "
          << "starting with bottom[0] axis = " << axis_;
    }
    this->blobs_.resize(1);
    const vector<int>::const_iterator& shape_start =
        bottom[0]->shape().begin() + axis_;
    const vector<int>::const_iterator& shape_end =
        (num_axes == -1) ? bottom[0]->shape().end() : (shape_start + num_axes);
    vector<int> scale_shape(shape_start, shape_end);
    this->blobs_[0].reset(new Blob<Dtype>(scale_shape));
    FillerParameter filler_param(param.filler());
    if (!param.has_filler()) {
      // Default to unit (1) filler for identity operation.
      filler_param.set_type("constant");
      filler_param.set_value(1);
    }
    shared_ptr<Filler<Dtype> > filler(GetFiller<Dtype>(filler_param));
    filler->Fill(this->blobs_[0].get());
  }
  if (param.bias_term()) {
    LayerParameter layer_param(this->layer_param_);
    layer_param.set_type("Bias");
    BiasParameter* bias_param = layer_param.mutable_bias_param();
    bias_param->set_axis(param.axis());
    if (bottom.size() > 1) {
      bias_param->set_num_axes(bottom[1]->num_axes());
    } else {
      bias_param->set_num_axes(param.num_axes());
    }
    bias_param->mutable_filler()->CopyFrom(param.bias_filler());
    bias_layer_ = LayerRegistry<Dtype>::CreateLayer(layer_param);
    bias_bottom_vec_.resize(1);
    bias_bottom_vec_[0] = bottom[0];
    bias_layer_->SetUp(bias_bottom_vec_, top);
    bias_param_id_ = this->blobs_.size();
    this->blobs_.resize(bias_param_id_ + 1);
    this->blobs_[bias_param_id_] = bias_layer_->blobs()[0];
    bias_propagate_down_.resize(1, false);
  }
  this->param_propagate_down_.resize(this->blobs_.size(), true);
}
Example #2
0
void Net < Dtype >::appendTop(const NetParameter& param, const int layer_id, const int top_id,
	set<string>* available_blobs, map<string, int>* blob_name_to_idx){
	boost::shared_ptr<LayerParameter> layer_param(
		layer_id >= 0 ? new LayerParameter(param.layer(layer_id)) : NULL);
	//	use (layer_id//top_id) or (-1//top_id) to get a blob name
	const string& blob_name = layer_param ?
		(top_id<layer_param->top_size() ? layer_param->top(top_id) : "(automatic)") : param.input(top_id);
	//	in-place case (e.g:
	//	I0721 10:38 : 16.722070  4692 net.cpp : 84] relu1 <-conv1
	//	I0721 10:38 : 16.722082  4692 net.cpp : 98] relu1->conv1(in-place)
	//	check a blob whether at the same postion in both bottom and top
	if (blob_name_to_idx && layer_param && top_id < layer_param->bottom_size()
		&& blob_name == layer_param->bottom(top_id)){
		LOG_IF(INFO, Dragon::get_root_solver())
			<< layer_param->name() << "[Layer-Produce]->" << blob_name << " [Blob-Name] (in-place)";
		//	add into this layer's top blob using blob_name
		top_vecs[layer_id].push_back(blobs[(*blob_name_to_idx)[blob_name]].get());
		//	log the id
		top_id_vecs[layer_id].push_back((*blob_name_to_idx)[blob_name]);
	}
	else if (blob_name_to_idx && (*blob_name_to_idx).count(blob_name) ){
		LOG(FATAL) << "Top blob:" << blob_name << " propogate from multiple sources.";
	}
	// normal top blob stuffing
	else{
		//	debug info
		if (Dragon::get_root_solver()){
			if (layer_param) LOG(INFO) << layer_param->name() << "[Layer-Produce] ->" << blob_name << " [Blob-Name]";
			//	special case and only used when viewing a Net's structure
			//	because they need not specify data source and can not train or test
			//	virtual data input blobs do not belong to any layers
			//	see more in insert_splits.cpp/void InsertSplits()
			else LOG(INFO) << "Input " << top_id << "[Blob-Code] -> " << blob_name << "[Blob - Name]";
		}
		//	allocate a null blob at first
		boost::shared_ptr<Blob<Dtype>> ptr_blob(new Blob<Dtype>());
		//	store global blob infos
		const int blob_id = blobs.size();
		blobs.push_back(ptr_blob);
		blobs_name.push_back(blob_name);
		blobs_need_backward.push_back(false);
		//	encode index number for a name
		//	which also represent this top blob is binded from a bottom
		//	check it before can know whether a top blob has multiple sources(Forbidden)
		if (blob_name_to_idx) (*blob_name_to_idx)[blob_name] = blob_id;
		//	reshape for virtual input blobs solely
		//	becaude they do not exist into a DataLayer(provide reshape/transfrom service)
		if (layer_id == -1){
			ptr_blob->reshape(param.input_shape(top_id));
			//	store solely for virtual input blobs
			net_input_blobs.push_back(ptr_blob.get());
			net_input_blob_indices.push_back(blob_id);
		}
		else{
			top_vecs[layer_id].push_back(ptr_blob.get());
			top_id_vecs[layer_id].push_back(blob_id);
		}
	}
	//	a set used for listing all exsiting top blobs
	if (available_blobs) available_blobs->insert(blob_name);
}