void calcBtvRegularizationImpl(InputArray _src, OutputArray _dst, int btvKernelSize, const std::vector<float>& btvWeights) { Mat src = _src.getMat(); _dst.create(src.size(), src.type()); _dst.setTo(Scalar::all(0)); Mat dst = _dst.getMat(); const int ksize = (btvKernelSize - 1) / 2; BtvRegularizationBody<T> body; body.src = src; body.dst = dst; body.ksize = ksize; body.btvWeights = &btvWeights[0]; parallel_for_(Range(ksize, src.rows - ksize), body); }
static bool ocl_calcBtvRegularization(InputArray _src, OutputArray _dst, int btvKernelSize, const UMat & ubtvWeights) { int cn = _src.channels(); ocl::Kernel k("calcBtvRegularization", ocl::superres::superres_btvl1_oclsrc, format("-D cn=%d", cn)); if (k.empty()) return false; UMat src = _src.getUMat(); _dst.create(src.size(), src.type()); _dst.setTo(Scalar::all(0)); UMat dst = _dst.getUMat(); const int ksize = (btvKernelSize - 1) / 2; k.args(ocl::KernelArg::ReadOnlyNoSize(src), ocl::KernelArg::WriteOnly(dst), ksize, ocl::KernelArg::PtrReadOnly(ubtvWeights)); size_t globalsize[2] = { (size_t)src.cols, (size_t)src.rows }; return k.run(2, globalsize, NULL, false); }
void BriefDescriptorExtractor::computeImpl(InputArray image, std::vector<KeyPoint>& keypoints, OutputArray descriptors) const { // Construct integral image for fast smoothing (box filter) Mat sum; Mat grayImage = image.getMat(); if( image.type() != CV_8U ) cvtColor( image, grayImage, COLOR_BGR2GRAY ); ///TODO allow the user to pass in a precomputed integral image //if(image.type() == CV_32S) // sum = image; //else integral( grayImage, sum, CV_32S); //Remove keypoints very close to the border KeyPointsFilter::runByImageBorder(keypoints, image.size(), PATCH_SIZE/2 + KERNEL_SIZE/2); descriptors.create((int)keypoints.size(), bytes_, CV_8U); descriptors.setTo(Scalar::all(0)); test_fn_(sum, keypoints, descriptors); }
void FREAK::computeDescriptors( InputArray _image, std::vector<KeyPoint>& keypoints, OutputArray _descriptors ) const { Mat image = _image.getMat(); Mat imgIntegral; integral(image, imgIntegral, DataType<iiMatType>::type); std::vector<int> kpScaleIdx(keypoints.size()); // used to save pattern scale index corresponding to each keypoints const std::vector<int>::iterator ScaleIdxBegin = kpScaleIdx.begin(); // used in std::vector erase function const std::vector<cv::KeyPoint>::iterator kpBegin = keypoints.begin(); // used in std::vector erase function const float sizeCst = static_cast<float>(FREAK_NB_SCALES/(FREAK_LOG2* nOctaves)); srcMatType pointsValue[FREAK_NB_POINTS]; int thetaIdx = 0; int direction0; int direction1; // compute the scale index corresponding to the keypoint size and remove keypoints close to the border if( scaleNormalized ) { for( size_t k = keypoints.size(); k--; ) { //Is k non-zero? If so, decrement it and continue" kpScaleIdx[k] = std::max( (int)(std::log(keypoints[k].size/FREAK_SMALLEST_KP_SIZE)*sizeCst+0.5) ,0); if( kpScaleIdx[k] >= FREAK_NB_SCALES ) kpScaleIdx[k] = FREAK_NB_SCALES-1; if( keypoints[k].pt.x <= patternSizes[kpScaleIdx[k]] || //check if the description at this specific position and scale fits inside the image keypoints[k].pt.y <= patternSizes[kpScaleIdx[k]] || keypoints[k].pt.x >= image.cols-patternSizes[kpScaleIdx[k]] || keypoints[k].pt.y >= image.rows-patternSizes[kpScaleIdx[k]] ) { keypoints.erase(kpBegin+k); kpScaleIdx.erase(ScaleIdxBegin+k); } } } else { const int scIdx = std::max( (int)(1.0986122886681*sizeCst+0.5) ,0); for( size_t k = keypoints.size(); k--; ) { kpScaleIdx[k] = scIdx; // equivalent to the formule when the scale is normalized with a constant size of keypoints[k].size=3*SMALLEST_KP_SIZE if( kpScaleIdx[k] >= FREAK_NB_SCALES ) { kpScaleIdx[k] = FREAK_NB_SCALES-1; } if( keypoints[k].pt.x <= patternSizes[kpScaleIdx[k]] || keypoints[k].pt.y <= patternSizes[kpScaleIdx[k]] || keypoints[k].pt.x >= image.cols-patternSizes[kpScaleIdx[k]] || keypoints[k].pt.y >= image.rows-patternSizes[kpScaleIdx[k]] ) { keypoints.erase(kpBegin+k); kpScaleIdx.erase(ScaleIdxBegin+k); } } } // allocate descriptor memory, estimate orientations, extract descriptors if( !extAll ) { // extract the best comparisons only _descriptors.create((int)keypoints.size(), FREAK_NB_PAIRS/8, CV_8U); _descriptors.setTo(Scalar::all(0)); Mat descriptors = _descriptors.getMat(); void *ptr = descriptors.data+(keypoints.size()-1)*descriptors.step[0]; for( size_t k = keypoints.size(); k--; ) { // estimate orientation (gradient) if( !orientationNormalized ) { thetaIdx = 0; // assign 0° to all keypoints keypoints[k].angle = 0.0; } else { // get the points intensity value in the un-rotated pattern for( int i = FREAK_NB_POINTS; i--; ) { pointsValue[i] = meanIntensity<srcMatType, iiMatType>(image, imgIntegral, keypoints[k].pt.x, keypoints[k].pt.y, kpScaleIdx[k], 0, i); } direction0 = 0; direction1 = 0; for( int m = 45; m--; ) { //iterate through the orientation pairs const int delta = (pointsValue[ orientationPairs[m].i ]-pointsValue[ orientationPairs[m].j ]); direction0 += delta*(orientationPairs[m].weight_dx)/2048; direction1 += delta*(orientationPairs[m].weight_dy)/2048; } keypoints[k].angle = static_cast<float>(atan2((float)direction1,(float)direction0)*(180.0/CV_PI));//estimate orientation thetaIdx = int(FREAK_NB_ORIENTATION*keypoints[k].angle*(1/360.0)+0.5); if( thetaIdx < 0 ) thetaIdx += FREAK_NB_ORIENTATION; if( thetaIdx >= FREAK_NB_ORIENTATION ) thetaIdx -= FREAK_NB_ORIENTATION; } // extract descriptor at the computed orientation for( int i = FREAK_NB_POINTS; i--; ) { pointsValue[i] = meanIntensity<srcMatType, iiMatType>(image, imgIntegral, keypoints[k].pt.x, keypoints[k].pt.y, kpScaleIdx[k], thetaIdx, i); } // Extract descriptor extractDescriptor<srcMatType>(pointsValue, &ptr); } } else // extract all possible comparisons for selection { _descriptors.create((int)keypoints.size(), 128, CV_8U); _descriptors.setTo(Scalar::all(0)); Mat descriptors = _descriptors.getMat(); std::bitset<1024>* ptr = (std::bitset<1024>*) (descriptors.data+(keypoints.size()-1)*descriptors.step[0]); for( size_t k = keypoints.size(); k--; ) { //estimate orientation (gradient) if( !orientationNormalized ) { thetaIdx = 0;//assign 0° to all keypoints keypoints[k].angle = 0.0; } else { //get the points intensity value in the un-rotated pattern for( int i = FREAK_NB_POINTS;i--; ) pointsValue[i] = meanIntensity<srcMatType, iiMatType>(image, imgIntegral, keypoints[k].pt.x,keypoints[k].pt.y, kpScaleIdx[k], 0, i); direction0 = 0; direction1 = 0; for( int m = 45; m--; ) { //iterate through the orientation pairs const int delta = (pointsValue[ orientationPairs[m].i ]-pointsValue[ orientationPairs[m].j ]); direction0 += delta*(orientationPairs[m].weight_dx)/2048; direction1 += delta*(orientationPairs[m].weight_dy)/2048; } keypoints[k].angle = static_cast<float>(atan2((float)direction1,(float)direction0)*(180.0/CV_PI)); //estimate orientation thetaIdx = int(FREAK_NB_ORIENTATION*keypoints[k].angle*(1/360.0)+0.5); if( thetaIdx < 0 ) thetaIdx += FREAK_NB_ORIENTATION; if( thetaIdx >= FREAK_NB_ORIENTATION ) thetaIdx -= FREAK_NB_ORIENTATION; } // get the points intensity value in the rotated pattern for( int i = FREAK_NB_POINTS; i--; ) { pointsValue[i] = meanIntensity<srcMatType, iiMatType>(image, imgIntegral, keypoints[k].pt.x, keypoints[k].pt.y, kpScaleIdx[k], thetaIdx, i); } int cnt(0); for( int i = 1; i < FREAK_NB_POINTS; ++i ) { //(generate all the pairs) for( int j = 0; j < i; ++j ) { ptr->set(cnt, pointsValue[i] >= pointsValue[j] ); ++cnt; } } --ptr; } } }