void hdf5_load_nd_dataset_helper( hid_t file_id, const char* dataset_name_, int min_dim, int max_dim, Blob<Dtype>* blob) { // Verify that the dataset exists. // REVIEW ktran: this check doesn't work for nested dataset name ////CHECK(H5LTfind_dataset(file_id, dataset_name_)) << "Failed to find HDF5 dataset " << dataset_name_; // Verify that the number of dimensions is in the accepted range. herr_t status; int ndims; status = H5LTget_dataset_ndims(file_id, dataset_name_, &ndims); CHECK_GE(status, 0) << "Failed to get dataset ndims for " << dataset_name_; CHECK_GE(ndims, min_dim); CHECK_LE(ndims, max_dim); // Verify that the data format is what we expect: float or double. std::vector<hsize_t> dims(ndims); H5T_class_t class_; status = H5LTget_dataset_info( file_id, dataset_name_, dims.data(), &class_, NULL); CHECK_GE(status, 0) << "Failed to get dataset info for " << dataset_name_; switch (class_) { case H5T_FLOAT: { LOG_FIRST_N(INFO, 1) << "Datatype class: H5T_FLOAT"; break; } case H5T_INTEGER: { LOG_FIRST_N(INFO, 1) << "Datatype class: H5T_INTEGER"; break; } case H5T_TIME: LOG(FATAL) << "Unsupported datatype class: H5T_TIME"; case H5T_STRING: LOG(FATAL) << "Unsupported datatype class: H5T_STRING"; case H5T_BITFIELD: LOG(FATAL) << "Unsupported datatype class: H5T_BITFIELD"; case H5T_OPAQUE: LOG(FATAL) << "Unsupported datatype class: H5T_OPAQUE"; case H5T_COMPOUND: LOG(FATAL) << "Unsupported datatype class: H5T_COMPOUND"; case H5T_REFERENCE: LOG(FATAL) << "Unsupported datatype class: H5T_REFERENCE"; case H5T_ENUM: LOG(FATAL) << "Unsupported datatype class: H5T_ENUM"; case H5T_VLEN: LOG(FATAL) << "Unsupported datatype class: H5T_VLEN"; case H5T_ARRAY: LOG(FATAL) << "Unsupported datatype class: H5T_ARRAY"; default: LOG(FATAL) << "Datatype class unknown"; } vector<int> blob_dims(dims.size()); for (int i = 0; i < dims.size(); ++i) { blob_dims[i] = dims[i]; } blob->Reshape(blob_dims); }
void hdf5_load_nd_dataset_helper( hid_t file_id, const char* dataset_name_, int min_dim, int max_dim, Blob<Dtype>* blob) { // Verify that the dataset exists. CHECK(H5LTfind_dataset(file_id, dataset_name_)) << "Failed to find HDF5 dataset " << dataset_name_; // Verify that the number of dimensions is in the accepted range. herr_t status; int ndims; status = H5LTget_dataset_ndims(file_id, dataset_name_, &ndims); CHECK_GE(status, 0) << "Failed to get dataset ndims for " << dataset_name_; CHECK_GE(ndims, min_dim); CHECK_LE(ndims, max_dim); // Verify that the data format is what we expect: float or double. std::vector<hsize_t> dims(ndims); H5T_class_t class_; status = H5LTget_dataset_info( file_id, dataset_name_, dims.data(), &class_, NULL); CHECK_GE(status, 0) << "Failed to get dataset info for " << dataset_name_; // blocks around "LOG" macros are to avoid "initialization of occurresces_?? // is skipped by case label" errors on msvc switch (class_) { case H5T_FLOAT: { LOG_FIRST_N(INFO, 1) << "Datatype class: H5T_FLOAT"; break; } case H5T_INTEGER: { LOG_FIRST_N(INFO, 1) << "Datatype class: H5T_INTEGER"; break; } case H5T_TIME: { LOG(FATAL) << "Unsupported datatype class: H5T_TIME"; } case H5T_STRING: { LOG(FATAL) << "Unsupported datatype class: H5T_STRING"; } case H5T_BITFIELD: { LOG(FATAL) << "Unsupported datatype class: H5T_BITFIELD"; } case H5T_OPAQUE: { LOG(FATAL) << "Unsupported datatype class: H5T_OPAQUE"; } case H5T_COMPOUND: { LOG(FATAL) << "Unsupported datatype class: H5T_COMPOUND"; } case H5T_REFERENCE: { LOG(FATAL) << "Unsupported datatype class: H5T_REFERENCE"; } case H5T_ENUM: { LOG(FATAL) << "Unsupported datatype class: H5T_ENUM"; } case H5T_VLEN: { LOG(FATAL) << "Unsupported datatype class: H5T_VLEN"; } case H5T_ARRAY: { LOG(FATAL) << "Unsupported datatype class: H5T_ARRAY"; } default: { LOG(FATAL) << "Datatype class unknown"; } } vector<int> blob_dims(dims.size()); for (int i = 0; i < dims.size(); ++i) { blob_dims[i] = dims[i]; } blob->Reshape(blob_dims); }
void test_my_glog() { std::cout << "----test my glog begin--------------" << std::endl; // google::InitGoogleLogging(argv[0]); // google::ParseCommandLineFlags(&argc, &argv, true); // FLAGS_log_dir = "./log"; // FLAGS_logtostderr=1; // FLAGS_colorlogtostderr=true; LOG(INFO)<< "info: hello world!"; LOG(WARNING)<< "warning: hello world!"; LOG(ERROR)<< "error: hello world!"; // LOG(FATAL) << "fatal: hello world!"; VLOG(0) << "vlog0: hello world!"; VLOG(1) << "vlog1: hello world!"; VLOG(2) << "vlog2: hello world!"; VLOG(3) << "vlog3: hello world!"; DLOG(INFO)<< "DLOG: hello world!"; for (int i = 1; i <= 100; i++) { LOG_IF(INFO, i ==100) << "LOG_IF(INFO,i==100) google::COUNTER=" << google::COUNTER << " i=" << i; LOG_EVERY_N(INFO, 10) << "LOG_EVERY_N(INFO,10) google::COUNTER=" << google::COUNTER << " i=" << i; LOG_IF_EVERY_N(WARNING, (i > 50), 10) << "LOG_IF_EVERY_N(INFO,(i>50),10) google::COUNTER=" << google::COUNTER << " i=" << i; LOG_FIRST_N(ERROR, 5) << "LOG_FIRST_N(INFO,5) google::COUNTER=" << google::COUNTER << " i=" << i; } // CHECK_NE(2, 2) << ": The world must be ending!"; // google::ShutDownCommandLineFlags(); // google::ShutdownGoogleLogging(); std::cout << "---------------------test my glog end---------------------" << std::endl; }
virtual real calcLearningRate(int64_t numSamplesProcessed, int64_t pass) { if (numSamplesProcessed > a_) { LOG_FIRST_N(WARNING, 1) << "Using caffe_poly learning rate schedule, " << "learning rate hits ZERO when " << "numSamplesProcessed > config.learning_rate_decay_b(), " << "training is over and you can stop it. " << "See common/LearningRateScheduler.cpp for more info."; return 0; } else { return learningRate_ * pow(1.0 - numSamplesProcessed / a_, b_); } }
// v w is the two points of the line segment, p is the test point //float minimum_distance(cv::Mat v, cv::Mat w, cv::Mat p) { float minimum_distance(cv::Point2f v, cv::Point2f w, cv::Point2f p, cv::Point2f& closest) { // Return minimum distance between line segment vw and point p float l2 = cv::norm(w - v); l2 *= l2; //const float l2 = cv::norm(cv::Mat(v - w), cv::NORM_L1); // i.e. |w-v|^2 - avoid a sqrt if (l2 == 0.0) { closest = v; return cv::norm(p - v); // v == w case } // Consider the line extending the segment, parameterized as v + t (w - v). // We find projection of point p onto the line. // It falls where t = [(p-v) . (w-v)] / |w-v|^2 const float t = ((p - v).dot(w - v)) / l2; if (t < 0.0) { closest = v; return cv::norm(p - v); // Beyond the 'v' end of the segment } else if (t > 1.0) { closest = w; return cv::norm(p - w); // Beyond the 'w' end of the segment } closest = v + t * (w - v); // Projection falls on the segment const float dist = cv::norm(p - closest); if (VLOG_IS_ON(3)) { LOG_FIRST_N(INFO, 10) << v.x << " " << v.y << ", " << w.x << " " << w.y << ", " << p.x << " " << p.y << ", " << closest.x << " " << closest.y << ", " << l2 << " " << t << " " << dist ; } return dist; }
RddResultCode ReduceByKeyTransformer::transform(const ActorMessagePtr& msg, const vector<BaseRddPartition*>& input, RddPartition* output) { if (input.size() != 1) { LOG(ERROR)<<"reduce value transformer need one input RDD"; return RRC_INVALID_RDD_INPUT; } if (input[0]->empty()) { return RRC_SUCCESS; } if(!input[0]->getKeyTemplate().get()) { LOG(ERROR) << "KEY Template is empty"; return RRC_INVALID_KEY; } if(!input[0]->getValueTemplate().get()) { LOG(ERROR) << "VALUE Template is empty"; return RRC_INVALID_VALUE; } vector<ReduceOperation*> operations; RddResultCode code = parseOperations(msg, input[0], output, operations); if(RRC_SUCCESS != code) { LOG(ERROR) << "parse reduce operation error, code = " << code; return code; } auto filterExpr = output->getFilterExpression(0); ExpressionContext ctx; input[0]->foreachGroup([this, input, output, &operations, filterExpr, &code, &ctx] (const PbMessagePtr& key, const vector<PbMessagePtr>& values) { if(!key.get()) { /// key is empty LOG(ERROR) << "reduce value transformer error, key is empty"; return RRC_INVALID_KEY; } for(auto value = values.begin(); value != values.end(); ++value) { /// loop values if(!(*value).get()) { LOG_FIRST_N(ERROR, 10) << "value is null, ignored"; continue; } try { if (filterExpr) { ctx.setKeyValue(&key, &(*value)); PbVariant var = filterExpr->evaluate(&ctx); if (!(bool) var) { continue; } } } catch (RddResultCode& err) { LOG(ERROR) << "evaluate error, caused by code " << err; code = err; } for(auto op = operations.begin(); op != operations.end(); ++op) { (*op)->reduce(key, *value); } } /// end loop values bool reuse_key = input[0]->getKeyTemplate()->GetDescriptor()->full_name() == output->getKeyTemplate()->GetDescriptor()->full_name(); handleReduceResult(operations, key, output, reuse_key); }); return code; }