void BackgroundSubtractorLOBSTER_<ParallelUtils::eGLSL>::getBackgroundDescriptorsImage(cv::OutputArray oBGDescImg) const { lvDbgExceptionWatch; CV_Assert(m_bInitialized); glAssert(m_bGLInitialized && !m_vnBGModelData.empty()); CV_Assert(LBSP::DESC_SIZE==2); oBGDescImg.create(m_oFrameSize,CV_16UC(int(m_nImgChannels))); cv::Mat oOutputImg = oBGDescImg.getMatRef(); glBindBuffer(GL_SHADER_STORAGE_BUFFER,getSSBOId(BackgroundSubtractorLOBSTER_::eLOBSTERStorageBuffer_BGModelBinding)); glGetBufferSubData(GL_SHADER_STORAGE_BUFFER,0,m_nBGModelSize*sizeof(uint),(void*)m_vnBGModelData.data()); glErrorCheck; for(size_t nRowIdx=0; nRowIdx<(size_t)m_oFrameSize.height; ++nRowIdx) { const size_t nModelRowOffset = nRowIdx*m_nRowStepSize; const size_t nImgRowOffset = nRowIdx*oOutputImg.step.p[0]; for(size_t nColIdx=0; nColIdx<(size_t)m_oFrameSize.width; ++nColIdx) { const size_t nModelColOffset = nColIdx*m_nColStepSize+nModelRowOffset; const size_t nImgColOffset = nColIdx*oOutputImg.step.p[1]+nImgRowOffset; std::array<float,4> afCurrPxSum = {0.0f,0.0f,0.0f,0.0f}; for(size_t nSampleIdx=0; nSampleIdx<m_nBGSamples; ++nSampleIdx) { const size_t nModelPxOffset_color = nSampleIdx*m_nSampleStepSize+nModelColOffset; const size_t nModelPxOffset_desc = nModelPxOffset_color+(m_nBGSamples*m_nSampleStepSize); for(size_t nChannelIdx=0; nChannelIdx<m_nImgChannels; ++nChannelIdx) { const size_t nModelTotOffset = nChannelIdx+nModelPxOffset_desc; afCurrPxSum[nChannelIdx] += m_vnBGModelData[nModelTotOffset]; } } for(size_t nChannelIdx=0; nChannelIdx<m_nImgChannels; ++nChannelIdx) { const size_t nSampleChannelIdx = ((nChannelIdx==3||m_nImgChannels==1)?nChannelIdx:2-nChannelIdx); const size_t nImgTotOffset = nSampleChannelIdx*2+nImgColOffset; *(ushort*)(oOutputImg.data+nImgTotOffset) = (ushort)(afCurrPxSum[nChannelIdx]/m_nBGSamples); } } } }
/** * Convert gdal type to opencv type */ int KGDAL2CV::gdal2opencv(const GDALDataType& gdalType, const int& channels){ switch (gdalType){ /// UInt8 case GDT_Byte: if (channels == 1){ return CV_8UC1; } if (channels == 3){ return CV_8UC3; } if (channels == 4){ return CV_8UC4; } else { return CV_8UC(channels); } return -1; /// UInt16 case GDT_UInt16: if (channels == 1){ return CV_16UC1; } if (channels == 3){ return CV_16UC3; } if (channels == 4){ return CV_16UC4; } else { return CV_16UC(channels); } return -1; /// Int16 case GDT_Int16: if (channels == 1){ return CV_16SC1; } if (channels == 3){ return CV_16SC3; } if (channels == 4){ return CV_16SC4; } else { return CV_16SC(channels); } return -1; /// UInt32 case GDT_UInt32: case GDT_Int32: if (channels == 1){ return CV_32SC1; } if (channels == 3){ return CV_32SC3; } if (channels == 4){ return CV_32SC4; } else { return CV_32SC(channels); } return -1; case GDT_Float32: if (channels == 1){ return CV_32FC1; } if (channels == 3){ return CV_32FC3; } if (channels == 4){ return CV_32FC4; } else { return CV_32FC(channels); } return -1; case GDT_Float64: if (channels == 1){ return CV_64FC1; } if (channels == 3){ return CV_64FC3; } if (channels == 4){ return CV_64FC4; } else { return CV_64FC(channels); } return -1; default: std::cout << "Unknown GDAL Data Type" << std::endl; std::cout << "Type: " << GDALGetDataTypeName(gdalType) << std::endl; return -1; } return -1; }
PERF_TEST_P(Size_CvtMode16U, DISABLED_cvtColor_16u, testing::Combine( testing::Values(::perf::szODD, ::perf::szVGA, ::perf::sz1080p), CvtMode16U::all() ) ) { Size sz = get<0>(GetParam()); int _mode = get<1>(GetParam()), mode = _mode; ChPair ch = getConversionInfo(mode); mode %= COLOR_COLORCVT_MAX; Mat src(sz, CV_16UC(ch.scn)); Mat dst(sz, CV_16UC(ch.scn)); declare.time(100); declare.in(src, WARMUP_RNG).out(dst); int runs = sz.width <= 320 ? 100 : 5; TEST_CYCLE_MULTIRUN(runs) cvtColor(src, dst, mode, ch.dcn); SANITY_CHECK(dst, 1); }
void BackgroundSubtractorLOBSTER_<ParallelUtils::eNonParallel>::initialize(const cv::Mat& oInitImg, const cv::Mat& oROI) { lvDbgExceptionWatch; // == init BackgroundSubtractorLBSP::initialize(oInitImg,oROI); m_voBGColorSamples.resize(m_nBGSamples); m_voBGDescSamples.resize(m_nBGSamples); for(size_t s=0; s<m_nBGSamples; ++s) { m_voBGColorSamples[s].create(m_oImgSize,CV_8UC((int)m_nImgChannels)); m_voBGColorSamples[s] = cv::Scalar_<uchar>::all(0); m_voBGDescSamples[s].create(m_oImgSize,CV_16UC((int)m_nImgChannels)); m_voBGDescSamples[s] = cv::Scalar_<ushort>::all(0); } m_bInitialized = true; refreshModel(1.0f,true); m_bModelInitialized = true; }
void BackgroundSubtractorLOBSTER::initialize(const cv::Mat& oInitImg, const cv::Mat& oROI) { CV_Assert(!oInitImg.empty() && oInitImg.cols>0 && oInitImg.rows>0); CV_Assert(oInitImg.isContinuous()); CV_Assert(oInitImg.type()==CV_8UC1 || oInitImg.type()==CV_8UC3); if(oInitImg.type()==CV_8UC3) { std::vector<cv::Mat> voInitImgChannels; cv::split(oInitImg,voInitImgChannels); if(!cv::countNonZero((voInitImgChannels[0]!=voInitImgChannels[1])|(voInitImgChannels[2]!=voInitImgChannels[1]))) std::cout << std::endl << "\tBackgroundSubtractorLOBSTER : Warning, grayscale images should always be passed in CV_8UC1 format for optimal performance." << std::endl; } cv::Mat oNewBGROI; if(oROI.empty() && (m_oROI.empty() || oROI.size()!=oInitImg.size())) { oNewBGROI.create(oInitImg.size(),CV_8UC1); oNewBGROI = cv::Scalar_<uchar>(UCHAR_MAX); } else if(oROI.empty()) oNewBGROI = m_oROI; else { CV_Assert(oROI.size()==oInitImg.size() && oROI.type()==CV_8UC1); CV_Assert(cv::countNonZero((oROI<UCHAR_MAX)&(oROI>0))==0); oNewBGROI = oROI.clone(); } LBSP::validateROI(oNewBGROI); const size_t nROIPxCount = (size_t)cv::countNonZero(oNewBGROI); CV_Assert(nROIPxCount>0); m_oROI = oNewBGROI; m_oImgSize = oInitImg.size(); m_nImgType = oInitImg.type(); m_nImgChannels = oInitImg.channels(); m_nTotPxCount = m_oImgSize.area(); m_nTotRelevantPxCount = nROIPxCount; m_nFrameIndex = 0; m_nFramesSinceLastReset = 0; m_nModelResetCooldown = 0; m_oLastFGMask.create(m_oImgSize,CV_8UC1); m_oLastFGMask = cv::Scalar_<uchar>(0); m_oLastColorFrame.create(m_oImgSize,CV_8UC((int)m_nImgChannels)); m_oLastColorFrame = cv::Scalar_<uchar>::all(0); m_oLastDescFrame.create(m_oImgSize,CV_16UC((int)m_nImgChannels)); m_oLastDescFrame = cv::Scalar_<ushort>::all(0); m_voBGColorSamples.resize(m_nBGSamples); m_voBGDescSamples.resize(m_nBGSamples); for(size_t s=0; s<m_nBGSamples; ++s) { m_voBGColorSamples[s].create(m_oImgSize,CV_8UC((int)m_nImgChannels)); m_voBGColorSamples[s] = cv::Scalar_<uchar>::all(0); m_voBGDescSamples[s].create(m_oImgSize,CV_16UC((int)m_nImgChannels)); m_voBGDescSamples[s] = cv::Scalar_<ushort>::all(0); } if(m_aPxIdxLUT) delete[] m_aPxIdxLUT; if(m_aPxInfoLUT) delete[] m_aPxInfoLUT; m_aPxIdxLUT = new size_t[m_nTotRelevantPxCount]; m_aPxInfoLUT = new PxInfoBase[m_nTotPxCount]; if(m_nImgChannels==1) { CV_Assert(m_oLastColorFrame.step.p[0]==(size_t)m_oImgSize.width && m_oLastColorFrame.step.p[1]==1); CV_Assert(m_oLastDescFrame.step.p[0]==m_oLastColorFrame.step.p[0]*2 && m_oLastDescFrame.step.p[1]==m_oLastColorFrame.step.p[1]*2); for(size_t t=0; t<=UCHAR_MAX; ++t) m_anLBSPThreshold_8bitLUT[t] = cv::saturate_cast<uchar>((t*m_fRelLBSPThreshold+m_nLBSPThresholdOffset)/2); for(size_t nPxIter=0, nModelIter=0; nPxIter<m_nTotPxCount; ++nPxIter) { if(m_oROI.data[nPxIter]) { m_aPxIdxLUT[nModelIter] = nPxIter; m_aPxInfoLUT[nPxIter].nImgCoord_Y = (int)nPxIter/m_oImgSize.width; m_aPxInfoLUT[nPxIter].nImgCoord_X = (int)nPxIter%m_oImgSize.width; m_aPxInfoLUT[nPxIter].nModelIdx = nModelIter; m_oLastColorFrame.data[nPxIter] = oInitImg.data[nPxIter]; const size_t nDescIter = nPxIter*2; LBSP::computeGrayscaleDescriptor(oInitImg,oInitImg.data[nPxIter],m_aPxInfoLUT[nPxIter].nImgCoord_X,m_aPxInfoLUT[nPxIter].nImgCoord_Y,m_anLBSPThreshold_8bitLUT[oInitImg.data[nPxIter]],*((ushort*)(m_oLastDescFrame.data+nDescIter))); ++nModelIter; } } } else { //m_nImgChannels==3 CV_Assert(m_oLastColorFrame.step.p[0]==(size_t)m_oImgSize.width*3 && m_oLastColorFrame.step.p[1]==3); CV_Assert(m_oLastDescFrame.step.p[0]==m_oLastColorFrame.step.p[0]*2 && m_oLastDescFrame.step.p[1]==m_oLastColorFrame.step.p[1]*2); for(size_t t=0; t<=UCHAR_MAX; ++t) m_anLBSPThreshold_8bitLUT[t] = cv::saturate_cast<uchar>(t*m_fRelLBSPThreshold+m_nLBSPThresholdOffset); for(size_t nPxIter=0, nModelIter=0; nPxIter<m_nTotPxCount; ++nPxIter) { if(m_oROI.data[nPxIter]) { m_aPxIdxLUT[nModelIter] = nPxIter; m_aPxInfoLUT[nPxIter].nImgCoord_Y = (int)nPxIter/m_oImgSize.width; m_aPxInfoLUT[nPxIter].nImgCoord_X = (int)nPxIter%m_oImgSize.width; m_aPxInfoLUT[nPxIter].nModelIdx = nModelIter; const size_t nPxRGBIter = nPxIter*3; const size_t nDescRGBIter = nPxRGBIter*2; for(size_t c=0; c<3; ++c) { m_oLastColorFrame.data[nPxRGBIter+c] = oInitImg.data[nPxRGBIter+c]; LBSP::computeSingleRGBDescriptor(oInitImg,oInitImg.data[nPxRGBIter+c],m_aPxInfoLUT[nPxIter].nImgCoord_X,m_aPxInfoLUT[nPxIter].nImgCoord_Y,c,m_anLBSPThreshold_8bitLUT[oInitImg.data[nPxRGBIter+c]],((ushort*)(m_oLastDescFrame.data+nDescRGBIter))[c]); } ++nModelIter; } } } m_bInitialized = true; refreshModel(1.0f); }