void LRNLayerTest<Dtype>::ReferenceLRNForward( const Blob<Dtype>& blob_bottom, const LayerParameter& layer_param, Blob<Dtype>* blob_top) { blob_top->Reshape(blob_bottom.num(), blob_bottom.channels(), blob_bottom.height(), blob_bottom.width()); const Dtype* bottom_data = blob_bottom.cpu_data(); Dtype* top_data = blob_top->mutable_cpu_data(); Dtype alpha = layer_param.alpha(); Dtype beta = layer_param.beta(); int size = layer_param.local_size(); for (int n = 0; n < blob_bottom.num(); ++n) { for (int c = 0; c < blob_bottom.channels(); ++c) { for (int h = 0; h < blob_bottom.height(); ++h) { for (int w = 0; w < blob_bottom.width(); ++w) { int c_start = c - (size - 1) / 2; int c_end = min(c_start + size, blob_bottom.channels()); c_start = max(c_start, 0); Dtype scale = 1.; for (int i = c_start; i < c_end; ++i) { Dtype value = blob_bottom.data_at(n, i, h, w); scale += value * value * alpha / size; } *(top_data + blob_top->offset(n, c, h, w)) = blob_bottom.data_at(n, c, h, w) / pow(scale, beta); } } } } }