Patchwork::Patchwork(const HOGPyramid & pyramid) : padx_(pyramid.padx()), pady_(pyramid.pady()), interval_(pyramid.interval()) { // Remove the padding from the bottom/right sides since convolutions with Fourier wrap around const int nbLevels = pyramid.levels().size(); rectangles_.resize(nbLevels); for (int i = 0; i < nbLevels; ++i) { rectangles_[i].first.setWidth(pyramid.levels()[i].cols() - padx_); rectangles_[i].first.setHeight(pyramid.levels()[i].rows() - pady_); } // Build the patchwork planes const int nbPlanes = BLF(rectangles_); // Constructs an empty patchwork in case of error if (nbPlanes <= 0) return; planes_.resize(nbPlanes); for (int i = 0; i < nbPlanes; ++i) { planes_[i] = Plane::Constant(MaxRows_, HalfCols_, Cell::Zero()); Map<HOGPyramid::Level, Aligned> plane(reinterpret_cast<HOGPyramid::Cell *>(planes_[i].data()), MaxRows_, HalfCols_ * 2); // Set the last feature to 1 for (int y = 0; y < MaxRows_; ++y) for (int x = 0; x < MaxCols_; ++x) plane(y, x)(HOGPyramid::NbFeatures - 1) = 1.0f; } // Recopy the pyramid levels into the planes for (int i = 0; i < nbLevels; ++i) { Map<HOGPyramid::Level, Aligned> plane(reinterpret_cast<HOGPyramid::Cell *>(planes_[rectangles_[i].second].data()), MaxRows_, HalfCols_ * 2); plane.block(rectangles_[i].first.y(), rectangles_[i].first.x(), rectangles_[i].first.height(), rectangles_[i].first.width()) = pyramid.levels()[i].topLeftCorner(rectangles_[i].first.height(), rectangles_[i].first.width()); } // Transform the planes int i; #pragma omp parallel for private(i) for (i = 0; i < nbPlanes; ++i) #ifndef FFLD_HOGPYRAMID_DOUBLE fftwf_execute_dft_r2c(Forwards_, reinterpret_cast<float *>(planes_[i].data()->data()), reinterpret_cast<fftwf_complex *>(planes_[i].data()->data())); #else fftw_execute_dft_r2c(Forwards_, reinterpret_cast<double *>(planes_[i].data()->data()), reinterpret_cast<fftw_complex *>(planes_[i].data()->data())); #endif }
void Mixture::convolve(const HOGPyramid & pyramid, vector<HOGPyramid::Matrix> & scores, vector<Indices> & argmaxes, vector<vector<vector<Model::Positions> > > * positions) const { if(empty() || pyramid.empty()) { scores.clear(); argmaxes.clear(); if(positions) positions->clear(); return; } const int nbModels = models_.size(); const int nbLevels = pyramid.levels().size(); // Convolve with all the models vector<vector<HOGPyramid::Matrix> > tmp(nbModels); convolve(pyramid, tmp, positions); // In case of error if(tmp.empty()) { scores.clear(); argmaxes.clear(); if(positions) positions->clear(); return; } // Resize the scores and argmaxes scores.resize(nbLevels); argmaxes.resize(nbLevels); int i; #pragma omp parallel for private(i) for(i = 0; i < nbLevels; ++i) { scores[i].resize(pyramid.levels()[i].rows() - maxSize().first + 1, pyramid.levels()[i].cols() - maxSize().second + 1); argmaxes[i].resize(scores[i].rows(), scores[i].cols()); for(int y = 0; y < scores[i].rows(); ++y) { for(int x = 0; x < scores[i].cols(); ++x) { int argmax = 0; for(int j = 1; j < nbModels; ++j) if(tmp[j][i](y, x) > tmp[argmax][i](y, x)) argmax = j; scores[i](y, x) = tmp[argmax][i](y, x); argmaxes[i](y, x) = argmax; } } } }
void printHogSizes(HOGPyramid pyramid){ int nlevels = pyramid.levels().size(); for(int level = 0; level < nlevels; level++){ //const float* raw_hog = pyramid.levels()[level].data()->data(); int width = pyramid.levels()[level].cols(); int height = pyramid.levels()[level].rows(); int depth = pyramid.NbFeatures; printf("level %d: width=%d, height=%d, depth=%d \n", level, width, height, depth); } }
// nRows = 32 // nCols = width*height void writePyraToCsv(HOGPyramid pyramid){ int nlevels = pyramid.levels().size(); for(int level = 0; level < nlevels; level++){ //printf("writing to CSV: level %d \n", level); const float* raw_hog = pyramid.levels()[level].data()->data(); int width = pyramid.levels()[level].cols(); int height = pyramid.levels()[level].rows(); int depth = pyramid.NbFeatures; ostringstream fname; fname << "../piggyHOG_results/level" << level << ".csv"; //TODO: get orig img name into the CSV name. int nCols = depth; //one descriptor per row int nRows = width*height; //TODO: also write (depth, width, height) -- in some order -- to the top of the CSV file. writeCsv_2dFloat(raw_hog, nRows, nCols, fname.str()); } }