void NewMapPtsNCC::output() { decidePointType(); for (size_t i = 0; i < newMapPts.size(); i++) { MapPoint* mpt = newMapPts[i]; for (int c = 0; c < numCams; c++) { int camId = m_camGroup.camIds[c]; if (mpt->pFeatures[camId]) { for (FeaturePoint* fp = mpt->pFeatures[camId]; fp->nextFrame; fp = fp->nextFrame) { fp->mpt = mpt; mpt->lastFrame = fp->f > mpt->lastFrame ? fp->f : mpt->lastFrame; mpt->nccBlks[camId].computeScaled( pCoSLAM->slam[camId].m_smallImg, pCoSLAM->slam[camId].m_smallScale, fp->x, fp->y); int x = max(0, min((int) fp->x, pCoSLAM->slam[camId].m_img.w)); int y = max(0, min((int) fp->y, pCoSLAM->slam[camId].m_img.h)); mpt->setColor(pCoSLAM->slam[camId].m_rgb(x,y)); } } } if (newMapPts[i]->lastFrame < pCoSLAM->curFrame) { pCoSLAM->actMapPts.add(mpt); } else pCoSLAM->curMapPts.add(mpt); } }
int SingleSLAM::newMapPoints(std::vector<MapPoint*>& mapPts, double maxEpiErr, double maxNcc) { std::vector<FeaturePoint*> vecFeatPts; getUnMappedAndTrackedFeatPts(vecFeatPts, 0, Param::nMinFeatTrkLen); mapPts.clear(); mapPts.reserve(4096); double M[3], m1[2], m2[2]; //reconstruct 3D map points int numRecons = 0; for (size_t k = 0; k < vecFeatPts.size(); k++) { FeaturePoint* cur_fp = vecFeatPts[k]; FeaturePoint* pre_fp = cur_fp; while (pre_fp->preFrame && pre_fp->preFrame->cam) { if (pre_fp->type == TYPE_FEATPOINT_DYNAMIC) { break; } pre_fp = pre_fp->preFrame; } if (pre_fp->type == TYPE_FEATPOINT_DYNAMIC || !pre_fp->cam) continue; normPoint(iK.data, pre_fp->m, m1); normPoint(iK.data, cur_fp->m, m2); //triangulate the two feature points to get the 3D point binTriangulate(pre_fp->cam->R, pre_fp->cam->t, cur_fp->cam->R, cur_fp->cam->t, m1, m2, M); if (isAtCameraBack(cur_fp->cam->R, cur_fp->cam->t, M)) continue; double cov[9], org[3]; getBinTriangulateCovMat(K.data, pre_fp->cam->R, pre_fp->cam->t, K.data, cur_fp->cam->R, cur_fp->cam->t, M, cov, Const::PIXEL_ERR_VAR); getCameraCenter(cur_fp->cam->R, cur_fp->cam->t, org); double s = fabs(cov[0] + cov[4] + cov[8]); if (dist3(org, M) < sqrt(s)) continue; //check the reprojection error double err1 = reprojErrorSingle(K.data, pre_fp->cam->R, pre_fp->cam->t, M, pre_fp->m); double err2 = reprojErrorSingle(K.data, cur_fp->cam->R, cur_fp->cam->t, M, cur_fp->m); if (err1 < maxEpiErr && err2 < maxEpiErr) { //a new map point is generated refineTriangulation(cur_fp, M, cov); err1 = reprojErrorSingle(K.data, pre_fp->cam->R, pre_fp->cam->t, M, pre_fp->m); err2 = reprojErrorSingle(K.data, cur_fp->cam->R, cur_fp->cam->t, M, cur_fp->m); if (isAtCameraBack(cur_fp->cam->R, cur_fp->cam->t, M) || isAtCameraBack(pre_fp->cam->R, pre_fp->cam->t, M)) continue; if (err1 < maxEpiErr && err2 < maxEpiErr) { MapPoint* pM = new MapPoint(M[0], M[1], M[2], pre_fp->f); doubleArrCopy(pM->cov, 0, cov, 9); mapPts.push_back(pM); pM->lastFrame = cur_fp->f; for (FeaturePoint* p = cur_fp; p; p = p->preFrame) p->mpt = pM; //add the feature point pM->addFeature(camId, cur_fp); pM->setLocalStatic(); //compute the NCC block pM->nccBlks[camId].computeScaled(m_lastKeyPos->imgSmall, m_lastKeyPos->imgScale, cur_fp->x, cur_fp->y); int x = max(0, min((int) cur_fp->x, m_rgb.w - 1)); int y = max(0, min((int) cur_fp->y, m_rgb.h - 1)); pM->setColor(m_rgb(x, y)); numRecons++; } } } return numRecons; }