void BaseInteractionDataLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) { output_labels_ = true; DataLayerSetUp(bottom, top); // The subclasses should setup the datum channels, height and width CHECK_GT(datum_channels_, 0); CHECK_GT(datum_height_, 0); CHECK_GT(datum_width_, 0); if (transform_param_.crop_size() > 0) { CHECK_GE(datum_height_, transform_param_.crop_size()); CHECK_GE(datum_width_, transform_param_.crop_size()); } // check if we want to have mean if (transform_param_.has_mean_file()) { const string& mean_file = transform_param_.mean_file(); LOG(INFO) << "Loading mean file from" << mean_file; BlobProto blob_proto; ReadProtoFromBinaryFileOrDie(mean_file.c_str(), &blob_proto); data_mean_.FromProto(blob_proto); CHECK_GE(data_mean_.num(), 1); CHECK_GE(data_mean_.channels(), datum_channels_); CHECK_GE(data_mean_.height(), datum_height_); CHECK_GE(data_mean_.width(), datum_width_); } else { // Simply initialize an all-empty mean. data_mean_.Reshape(1, datum_channels_, datum_height_, datum_width_); } mean_ = data_mean_.cpu_data(); data_transformer_.InitRand(); }
void BaseDataLayer<Ftype, Btype>::LayerSetUp(const vector<Blob*>& bottom, const vector<Blob*>& top) { output_labels_ = top.size() != 1; for (int i = 0; i < data_transformers_.size(); ++i) { data_transformers_[i] = make_shared<DataTransformer<Ftype>>(transform_param_, this->phase_); data_transformers_[i]->InitRand(); } // Subclasses should setup the size of bottom and top DataLayerSetUp(bottom, top); }
void BaseDataLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) { if (top.size() == 1) { output_labels_ = false; } else { output_labels_ = true; } // The subclasses should setup the size of bottom and top DataLayerSetUp(bottom, top); data_transformer_.InitRand(); }
void BaseDataLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) { if (top.size() == 1) { output_labels_ = false; } else { output_labels_ = true; } data_transformer_.reset( new DataTransformer<Dtype>(transform_param_, this->phase_)); data_transformer_->InitRand(); // The subclasses should setup the size of bottom and top DataLayerSetUp(bottom, top); }
void BaseDataLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) { if (top.size() == 1) {//vector.size() return the number of elements in the container. output_labels_ = false; } else { output_labels_ = true; } data_transformer_.reset( new DataTransformer<Dtype>(transform_param_, this->phase_)); data_transformer_->InitRand();//Initialize the Random number generations if needed by the transformation. // The subclasses should setup the size of bottom and top DataLayerSetUp(bottom, top);//子类需要定义 }
void BaseDataLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top) { //如果top blob为1则只输出Data,如果为2,输出data和label if (top.size() == 1) { output_labels_ = false; } else { output_labels_ = true; } //初始化数据变换器对象 data_transformer_.reset( new DataTransformer<Dtype>(transform_param_, this->phase_)); data_transformer_->InitRand();//生成随机数种子 // The subclasses should setup the size of bottom and top 虚函数没有具体实现 DataLayerSetUp(bottom, top); }
void CompactDataLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) { if (top->size() == 1) { this->output_labels_ = false; } else { this->output_labels_ = true; } DataLayerSetUp(bottom, top); // The subclasses should setup the datum channels, height and width CHECK_GT(this->datum_channels_, 0); CHECK_GT(this->datum_height_, 0); CHECK_GT(this->datum_width_, 0); CHECK(this->transform_param_.crop_size() > 0); CHECK_GE(this->datum_height_, this->transform_param_.crop_size()); CHECK_GE(this->datum_width_, this->transform_param_.crop_size()); int crop_size = this->transform_param_.crop_size(); // check if we want to have mean if (transform_param_.has_mean_file()) { //CHECK(this->transform_param_.has_mean_file()); this->data_mean_.Reshape(1, this->datum_channels_, crop_size, crop_size); const string& mean_file = this->transform_param_.mean_file(); LOG(INFO) << "Loading mean file from" << mean_file; BlobProto blob_proto; ReadProtoFromBinaryFileOrDie(mean_file.c_str(), &blob_proto); this->data_mean_.FromProto(blob_proto); Blob<Dtype> tmp; tmp.FromProto(blob_proto); const Dtype* src_data = tmp.cpu_data(); Dtype* dst_data = this->data_mean_.mutable_cpu_data(); CHECK_EQ(tmp.num(), 1); CHECK_EQ(tmp.channels(), this->datum_channels_); CHECK_GE(tmp.height(), crop_size); CHECK_GE(tmp.width(), crop_size); int w_off = (tmp.width() - crop_size) / 2; int h_off = (tmp.height() - crop_size) / 2; for (int c = 0; c < this->datum_channels_; c++) { for (int h = 0; h < crop_size; h++) { for (int w = 0; w < crop_size; w++) { int src_idx = (c * tmp.height() + h + h_off) * tmp.width() + w + w_off; int dst_idx = (c * crop_size + h) * crop_size + w; dst_data[dst_idx] = src_data[src_idx]; } } } } else { // Simply initialize an all-empty mean. this->data_mean_.Reshape(1, this->datum_channels_, crop_size, crop_size); } this->mean_ = this->data_mean_.cpu_data(); this->data_transformer_.InitRand(); this->prefetch_data_.mutable_cpu_data(); if (this->output_labels_) { this->prefetch_label_.mutable_cpu_data(); } DLOG(INFO) << "Initializing prefetch"; this->CreatePrefetchThread(); DLOG(INFO) << "Prefetch initialized."; }
void BaseDataLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) { MPI_Comm_rank (MPI_COMM_WORLD, &rank); if (top->size() == 1) { output_labels_ = false; } else { output_labels_ = true; } DataLayerSetUp(bottom, top); // The subclasses should setup the datum channels, height and width CHECK_GT(datum_channels_, 0); CHECK_GT(datum_height_, 0); CHECK_GT(datum_width_, 0); if (transform_param_.crop_size() > 0) { CHECK_GE(datum_height_, transform_param_.crop_size()); CHECK_GE(datum_width_, transform_param_.crop_size()); } // check if we want to have mean if (transform_param_.has_mean_file()) { const string& mean_file = transform_param_.mean_file(); LOG(INFO) << "Loading mean file from" << mean_file; BlobProto blob_proto; #if 0 int fsize=0; FILE * fin=NULL; if(rank==0){ fin=fopen(mean_file.c_str(),"rb"); if(fin==NULL)LOG(FATAL)<<"NO this mean file "<< mean_file;//TODO client fseek(fin,0,SEEK_END); fsize=ftell(fin); rewind(fin); } MPI_Bcast(&fsize,1,MPI_INT,0,MPI_COMM_WORLD); uint8_t *mean_buffer=(uint8_t*)malloc(fsize); if(rank==0){ fread(mean_buffer,fsize,1,fin); fclose(fin); } MPI_Bcast(mean_buffer,fsize,MPI_CHAR,0,MPI_COMM_WORLD); CodedInputStream* coded_input = new CodedInputStream(mean_buffer,fsize); coded_input->SetTotalBytesLimit(1073741824, 536870912); blob_proto.ParseFromCodedStream(coded_input); delete coded_input; free(mean_buffer); #else ReadProtoFromBinaryFileOrDie(mean_file.c_str(), &blob_proto); #endif data_mean_.FromProto(blob_proto); CHECK_GE(data_mean_.num(), 1); CHECK_GE(data_mean_.channels(), datum_channels_); CHECK_GE(data_mean_.height(), datum_height_); CHECK_GE(data_mean_.width(), datum_width_); } else { // Simply initialize an all-empty mean. data_mean_.Reshape(1, datum_channels_, datum_height_, datum_width_); } mean_ = data_mean_.cpu_data(); data_transformer_.InitRand(); }