コード例 #1
0
ファイル: solver.cpp プロジェクト: VikingMew/dec
void Solver<Dtype>::Restore(const char* state_file) {
  SolverState state;
  NetParameter net_param;
  ReadProtoFromBinaryFile(state_file, &state);
  if (state.has_learned_net()) {
    ReadProtoFromBinaryFile(state.learned_net().c_str(), &net_param);
    net_->CopyTrainedLayersFrom(net_param);
  }
  iter_ = state.iter();
  RestoreSolverState(state);
}
コード例 #2
0
void InfogainLossLayer<Dtype>::LayerSetUp(
    const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) {
  LossLayer<Dtype>::LayerSetUp(bottom, top);
  if (bottom.size() < 3) {
    CHECK(this->layer_param_.infogain_loss_param().has_source())
        << "Infogain matrix source must be specified.";
    BlobProto blob_proto;
    ReadProtoFromBinaryFile(
      this->layer_param_.infogain_loss_param().source(), &blob_proto);
    infogain_.FromProto(blob_proto);
  }
}
コード例 #3
0
void InfogainLossLayer<Dtype>::FurtherSetUp(
    const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
  CHECK_EQ(bottom[1]->channels(), 1);
  CHECK_EQ(bottom[1]->height(), 1);
  CHECK_EQ(bottom[1]->width(), 1);

  BlobProto blob_proto;
  ReadProtoFromBinaryFile(
    this->layer_param_.infogain_loss_param().source(), &blob_proto);
  infogain_.FromProto(blob_proto);
  CHECK_EQ(infogain_.num(), 1);
  CHECK_EQ(infogain_.channels(), 1);
  CHECK_EQ(infogain_.height(), infogain_.width());
}
コード例 #4
0
ファイル: loss_layer.cpp プロジェクト: Vanova/caffe-compact
void InfogainLossLayer<Dtype>::SetUp(
    const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) {
  CHECK_EQ(bottom.size(), 2) << "Loss Layer takes two blobs as input.";
  CHECK_EQ(top->size(), 0) << "Loss Layer takes no output.";
  CHECK_EQ(bottom[0]->num(), bottom[1]->num())
      << "The data and label should have the same number.";
  CHECK_EQ(bottom[1]->channels(), 1);
  CHECK_EQ(bottom[1]->height(), 1);
  CHECK_EQ(bottom[1]->width(), 1);
  BlobProto blob_proto;
  ReadProtoFromBinaryFile(this->layer_param_.source(), &blob_proto);
  infogain_.FromProto(blob_proto);
  CHECK_EQ(infogain_.num(), 1);
  CHECK_EQ(infogain_.channels(), 1);
  CHECK_EQ(infogain_.height(), infogain_.width());
}
コード例 #5
0
ファイル: sgd_solver.cpp プロジェクト: fossabot/caffe
void SGDSolver<Dtype>::RestoreSolverStateFromBinaryProto(
    const string& state_file) {
  SolverState state;
  ReadProtoFromBinaryFile(state_file, &state);
  this->iter_ = state.iter();
  if (state.has_learned_net()) {
    NetParameter net_param;
    ReadNetParamsFromBinaryFileOrDie(state.learned_net().c_str(), &net_param);
    this->net_->CopyTrainedLayersFrom(net_param);
  }
  this->current_step_ = state.current_step();
  CHECK_EQ(state.history_size(), history_.size())
      << "Incorrect length of history blobs.";
  LOG(INFO) << "SGDSolver: restoring history";
  for (int i = 0; i < history_.size(); ++i) {
    history_[i]->FromProto(state.history(i));
  }
}
コード例 #6
0
ファイル: mmsb_model.cpp プロジェクト: ZhitingHu/network_mp
void MMSBModel::Restore(const char* snapshot_file) {
  ModelParameter param;
  ReadProtoFromBinaryFile(snapshot_file, &param);
  // check consistency
  CHECK_EQ(K_, param.num_comms());
  CHECK_EQ(vertices_.size(), param.vertices_size());
  // global param
  for (CIndex k = 0; k < K_; ++k) {
    lambda_0_[k] = param.lambda_0(k);
    lambda_1_[k] = param.lambda_1(k);
  }
  // vertex's params (neighbors, z)
  for (VIndex i = 0; i < vertices_.size(); ++i) {
    const VertexParameter& v_param = param.vertices(i);
    CHECK_EQ(i, v_param.index());
    vertices_[i]->FromProto(v_param);
  }
  // solver state
  iter_ = param.solver_param().solver_state().iter();
}
コード例 #7
0
ファイル: infogain_loss_layer.cpp プロジェクト: naibaf7/caffe
void InfogainLossLayer<Dtype, MItype, MOtype>::LayerSetUp(
    const vector<Blob<MItype>*>& bottom,
    const vector<Blob<MOtype>*>& top) {
  LossLayer<Dtype, MItype, MOtype>::LayerSetUp(bottom, top);
  // internal softmax layer
  LayerParameter softmax_layer_param(this->layer_param_);
  SoftmaxParameter* softmax_param = softmax_layer_param.mutable_softmax_param();
  softmax_param->set_axis(this->layer_param_.infogain_loss_param().axis());
  softmax_layer_param.set_type("Softmax");
  softmax_layer_param.clear_loss_weight();
  softmax_layer_param.add_loss_weight(1);
  softmax_layer_ =
      LayerRegistry<Dtype, MItype, MOtype>::CreateLayer(softmax_layer_param);
  softmax_bottom_vec_.clear();
  softmax_bottom_vec_.push_back(bottom[0]);
  softmax_top_vec_.clear();
  softmax_top_vec_.push_back(&prob_);
  softmax_layer_->SetUp(softmax_bottom_vec_, softmax_top_vec_);

  // ignore label
  has_ignore_label_ =
    this->layer_param_.loss_param().has_ignore_label();
  if (has_ignore_label_) {
    ignore_label_ = this->layer_param_.loss_param().ignore_label();
  }
  // normalization
  CHECK(!this->layer_param_.loss_param().has_normalize())
    << "normalize is deprecated. use \"normalization\"";
  normalization_ = this->layer_param_.loss_param().normalization();
  // matrix H
  if (bottom.size() < 3) {
    CHECK(this->layer_param_.infogain_loss_param().has_source())
        << "Infogain matrix source must be specified.";
    BlobProto blob_proto;
    ReadProtoFromBinaryFile(
      this->layer_param_.infogain_loss_param().source(), &blob_proto);
    infogain_.FromProto(blob_proto);
  }
  this->InitializeQuantizers(bottom, top);
}
コード例 #8
0
ファイル: upgrade_proto.cpp プロジェクト: tgebru/transform
void ReadNetParamsFromBinaryFileOrDie(const string& param_file,
                                      NetParameter* param) {
    CHECK(ReadProtoFromBinaryFile(param_file, param))
            << "Failed to parse NetParameter file: " << param_file;
    if (NetNeedsUpgrade(*param)) {
        // NetParameter was specified using the old style (V0LayerParameter); try to
        // upgrade it.
        LOG(ERROR) << "Attempting to upgrade input file specified using deprecated "
                   << "V0LayerParameter: " << param_file;
        NetParameter original_param(*param);
        if (!UpgradeV0Net(original_param, param)) {
            LOG(ERROR) << "Warning: had one or more problems upgrading "
                       << "V0NetParameter to NetParameter (see above); continuing anyway.";
        } else {
            LOG(INFO) << "Successfully upgraded file specified using deprecated "
                      << "V0LayerParameter";
        }
        LOG(ERROR) << "Note that future Caffe releases will not support "
                   << "V0NetParameter; use ./build/tools/upgrade_net_proto_binary.bin to "
                   << "upgrade this and any other network proto files to the new format.";
    }
}
コード例 #9
0
ファイル: upgrade_proto.cpp プロジェクト: XinLiuNvidia/caffe
void ReadNetParamsFromBinaryFileOrDie(const string& param_file,
                                      NetParameter* param) {
  CHECK(ReadProtoFromBinaryFile(param_file, param))
      << "Failed to parse NetParameter file: " << param_file;
  UpgradeNetAsNeeded(param_file, param);
}
コード例 #10
0
ファイル: io.hpp プロジェクト: JesseLivezey/caffe
inline void ReadProtoFromBinaryFile(const string& filename,
                                    Message* proto) {
    ReadProtoFromBinaryFile(filename.c_str(), proto);
}
コード例 #11
0
ファイル: io.hpp プロジェクト: ZhangSirM/caffe-SPPNet
inline void ReadProtoFromBinaryFileOrDie(const char* filename, Message* proto) {
  CHECK(ReadProtoFromBinaryFile(filename, proto));
}
コード例 #12
0
ファイル: io.hpp プロジェクト: ZhangSirM/caffe-SPPNet
inline bool ReadProtoFromBinaryFile(const string& filename, Message* proto) {
  return ReadProtoFromBinaryFile(filename.c_str(), proto);
}
コード例 #13
0
void MyImageDataLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
									vector<Blob<Dtype>*>* top) {
	Layer<Dtype>::SetUp(bottom, top);
	const int new_height  = this->layer_param_.image_data_param().new_height();
	const int new_width  = this->layer_param_.image_data_param().new_width();
	CHECK((new_height == 0 && new_width == 0) ||
		  (new_height > 0 && new_width > 0)) << "Current implementation requires "
				  "new_height and new_width to be set at the same time.";

	/*
	 * 因为下面需要随便拿一张图片来初始化blob。
	 * 因此需要硬盘上有一张图片。
	 * 1 从prototxt读取一张图片的路径,
	 * 2 其实也可以在这里将用于初始化的图片路径写死
	*/

	/*1*/
	//jin
	const string& source = this->layer_param_.image_data_param().source();
	LOG(INFO) << "Opening file " << source;
	std::ifstream infile(source.c_str());
	string filename;
	int label;
	while (infile >> filename >> label) {
	  lines_.push_back(std::make_pair(filename, label));
	  break; //jin
	}
	

	/*2*/
	//lines_.push_back(std::make_pair("/home/linger/init.jpg", 1));

	//上面1和2代码可以任意用一段

	lines_id_ = 0;
	// Read a data point, and use it to initialize the top blob. (随便)读取一张图片,来初始化blob
	Datum datum;
	CHECK(ReadImageToDatum(lines_[lines_id_].first, lines_[lines_id_].second,
						   new_height, new_width, &datum));
	// image
	const int crop_size = this->layer_param_.image_data_param().crop_size();
	const int batch_size = 1;//this->layer_param_.image_data_param().batch_size();
	const string& mean_file = this->layer_param_.image_data_param().mean_file();
	if (crop_size > 0) {
		(*top)[0]->Reshape(batch_size, datum.channels(), crop_size, crop_size);
		prefetch_data_.Reshape(batch_size, datum.channels(), crop_size, crop_size);
	} else {
		(*top)[0]->Reshape(batch_size, datum.channels(), datum.height(),
						   datum.width());
		prefetch_data_.Reshape(batch_size, datum.channels(), datum.height(),
							   datum.width());
	}
	LOG(INFO) << "output data size: " << (*top)[0]->num() << ","
			  << (*top)[0]->channels() << "," << (*top)[0]->height() << ","
			  << (*top)[0]->width();
	// label
	(*top)[1]->Reshape(batch_size, 1, 1, 1);
	prefetch_label_.Reshape(batch_size, 1, 1, 1);
	// datum size
	datum_channels_ = datum.channels();
	datum_height_ = datum.height();
	datum_width_ = datum.width();
	datum_size_ = datum.channels() * datum.height() * datum.width();
	CHECK_GT(datum_height_, crop_size);
	CHECK_GT(datum_width_, crop_size);
	// check if we want to have mean
	if (this->layer_param_.image_data_param().has_mean_file()) {
		BlobProto blob_proto;
		LOG(INFO) << "Loading mean file from" << mean_file;
		ReadProtoFromBinaryFile(mean_file.c_str(), &blob_proto);
		data_mean_.FromProto(blob_proto);
		CHECK_EQ(data_mean_.num(), 1);
		CHECK_EQ(data_mean_.channels(), datum_channels_);
		CHECK_EQ(data_mean_.height(), datum_height_);
		CHECK_EQ(data_mean_.width(), datum_width_);
	} else {
		// Simply initialize an all-empty mean.
		data_mean_.Reshape(1, datum_channels_, datum_height_, datum_width_);
	}
	// Now, start the prefetch thread. Before calling prefetch, we make two
	// cpu_data calls so that the prefetch thread does not accidentally make
	// simultaneous cudaMalloc calls when the main thread is running. In some
	// GPUs this seems to cause failures if we do not so.
	prefetch_data_.mutable_cpu_data();
	prefetch_label_.mutable_cpu_data();
	data_mean_.cpu_data();


}
コード例 #14
0
ファイル: net.cpp プロジェクト: AmirooR/caffe
void Net<Dtype>::CopyTrainedLayersFrom(const string trained_filename) {
  NetParameter param;
  ReadProtoFromBinaryFile(trained_filename, &param);
  CopyTrainedLayersFrom(param);
}
コード例 #15
0
ファイル: data_layer.cpp プロジェクト: ChenglongChen/DSN
void DataLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
      vector<Blob<Dtype>*>* top) {
  CHECK_EQ(bottom.size(), 0) << "Data Layer takes no input blobs.";
  CHECK_EQ(top->size(), 2) << "Data Layer takes two blobs as output.";
  // Initialize the leveldb
  leveldb::DB* db_temp;
  leveldb::Options options;
  options.create_if_missing = false;
  options.max_open_files = 100;
  LOG(INFO) << "Opening leveldb " << this->layer_param_.source();
  leveldb::Status status = leveldb::DB::Open(
      options, this->layer_param_.source(), &db_temp);
  CHECK(status.ok()) << "Failed to open leveldb "
      << this->layer_param_.source() << std::endl << status.ToString();
  db_.reset(db_temp);
  iter_.reset(db_->NewIterator(leveldb::ReadOptions()));
  iter_->SeekToFirst();
  // Check if we would need to randomly skip a few data points
  if (this->layer_param_.rand_skip()) {
    // NOLINT_NEXT_LINE(runtime/threadsafe_fn)
    unsigned int skip = rand() % this->layer_param_.rand_skip();
    LOG(INFO) << "Skipping first " << skip << " data points.";
    while (skip-- > 0) {
      iter_->Next();
      if (!iter_->Valid()) {
        iter_->SeekToFirst();
      }
    }
  }
  // Read a data point, and use it to initialize the top blob.
  Datum datum;
  datum.ParseFromString(iter_->value().ToString());
  // image
  int cropsize = this->layer_param_.cropsize();
  if (cropsize > 0) {
    (*top)[0]->Reshape(
        this->layer_param_.batchsize(), datum.channels(), cropsize, cropsize);
    prefetch_data_.reset(new Blob<Dtype>(
        this->layer_param_.batchsize(), datum.channels(), cropsize, cropsize));
  } else {
    (*top)[0]->Reshape(
        this->layer_param_.batchsize(), datum.channels(), datum.height(),
        datum.width());
    prefetch_data_.reset(new Blob<Dtype>(
        this->layer_param_.batchsize(), datum.channels(), datum.height(),
        datum.width()));
  }
  LOG(INFO) << "output data size: " << (*top)[0]->num() << ","
      << (*top)[0]->channels() << "," << (*top)[0]->height() << ","
      << (*top)[0]->width();
  // label
  (*top)[1]->Reshape(this->layer_param_.batchsize(), 1, 1, 1);
  prefetch_label_.reset(
      new Blob<Dtype>(this->layer_param_.batchsize(), 1, 1, 1));
  // datum size
  datum_channels_ = datum.channels();
  datum_height_ = datum.height();
  datum_width_ = datum.width();
  datum_size_ = datum.channels() * datum.height() * datum.width();
  CHECK_GT(datum_height_, cropsize);
  CHECK_GT(datum_width_, cropsize);
  // check if we want to have mean
  if (this->layer_param_.has_meanfile()) {
    BlobProto blob_proto;
    LOG(INFO) << "Loading mean file from" << this->layer_param_.meanfile();
    ReadProtoFromBinaryFile(this->layer_param_.meanfile().c_str(), &blob_proto);
    data_mean_.FromProto(blob_proto);
    CHECK_EQ(data_mean_.num(), 1);
    CHECK_EQ(data_mean_.channels(), datum_channels_);
    CHECK_EQ(data_mean_.height(), datum_height_);
    CHECK_EQ(data_mean_.width(), datum_width_);
  } else {
    // Simply initialize an all-empty mean.
    data_mean_.Reshape(1, datum_channels_, datum_height_, datum_width_);
  }
  // Now, start the prefetch thread. Before calling prefetch, we make two
  // cpu_data calls so that the prefetch thread does not accidentally make
  // simultaneous cudaMalloc calls when the main thread is running. In some
  // GPUs this seems to cause failures if we do not so.
  prefetch_data_->mutable_cpu_data();
  prefetch_label_->mutable_cpu_data();
  data_mean_.cpu_data();
  DLOG(INFO) << "Initializing prefetch";
  CHECK(!pthread_create(&thread_, NULL, DataLayerPrefetch<Dtype>,
      reinterpret_cast<void*>(this))) << "Pthread execution failed.";
  DLOG(INFO) << "Prefetch initialized.";
}
コード例 #16
0
ファイル: caffe.cpp プロジェクト: aaalgo/xnn
 CaffeModel (fs::path const& dir, int batch)
     : CaffeSetMode(mode),
     net((dir/"caffe.model").native(), TEST)
 {
     BOOST_VERIFY(batch >= 1);
     //CHECK_EQ(net.num_inputs(), 1) << "Network should have exactly one input: " << net.num_inputs();
     input_blob = net.input_blobs()[0];
     shape[0] = batch;
     shape[1] = input_blob->shape(1);
     CHECK(shape[1] == 3 || shape[1] == 1)
         << "Input layer should have 1 or 3 channels." << shape[1];
     net.CopyTrainedLayersFrom((dir/"caffe.params").native());
     // resize to required batch size
     shape[2] = input_blob->shape(2);
     shape[3] = input_blob->shape(3);
     input_blob->Reshape(shape[0], shape[1], shape[2], shape[3]);
     net.Reshape();
     // set mean file
     means[0] = means[1] = means[2] = 0;
     fs::path mean_file = dir / "caffe.mean";
     fs::ifstream test(mean_file);
     if (test) {
         BlobProto blob_proto;
         // check old format
         if (ReadProtoFromBinaryFile(mean_file.native(), &blob_proto)) {
             /* Convert from BlobProto to Blob<float> */
             Blob<float> meanblob;
             meanblob.FromProto(blob_proto);
             CHECK_EQ(meanblob.channels(), channels())
                 << "Number of channels of mean file doesn't match input layer.";
             /* The format of the mean file is planar 32-bit float BGR or grayscale. */
             vector<cv::Mat> mats;
             float* data = meanblob.mutable_cpu_data();
             for (int i = 0; i < channels(); ++i) {
                 /* Extract an individual channel. */
                 cv::Mat channel(meanblob.height(), meanblob.width(), CV_32FC1, data);
                 mats.push_back(channel);
                 data += meanblob.height() * meanblob.width();
             }
             /* Merge the separate channels into a single image. */
             cv::Mat merged;
             cv::merge(mats, merged);
             cv::Scalar channel_mean = cv::mean(merged);
             //mean = cv::Mat(input_height, input_width, merged.type(), channel_mean);
             means[0] = means[1] = means[2] = channel_mean[0];
             if (channels() > 1) {
                 means[1] = channel_mean[1];
                 means[2] = channel_mean[2];
             }   
         }
         // if not proto format, then the mean file is just a bunch of textual numbers
         else {
             test >> means[0];
             means[1] = means[2] = means[0];
             test >> means[1];
             test >> means[2];
         }
     }
     {
         fs::ifstream is(dir/"blobs");
         string blob;
         CHECK(is) << "cannot open blobs file.";
         while (is >> blob) {
             output_blobs.push_back(net.blob_by_name(blob));
         }
     }
 }