void GLImagePane::drawClickedPoint(ImgRGB& img) {
	assert(s_clicked_camid >= 0);
	if (s_clicked_mappt) {
		if (!s_clicked_mappt->pFeatures[s_clicked_camid])
			return;

		double m0[2];
		project(m_pSLAM->slam[m_camId].K.data,
				m_pSLAM->slam[m_camId].m_camPos.current()->R,
				m_pSLAM->slam[m_camId].m_camPos.current()->t,
				s_clicked_mappt->M, m0);

		//drawCircleToData(W, H, imgData, m0[0], m0[1], 15, 255, 0, 255);
		double var[4];
		getProjectionCovMat(m_pSLAM->slam[m_camId].K.data,
				m_pSLAM->slam[m_camId].m_camPos.current()->R,
				m_pSLAM->slam[m_camId].m_camPos.current()->t,
				s_clicked_mappt->M, s_clicked_mappt->cov, var, 4);

		drawEllipseToData(img, m0[0], m0[1], var, 255, 0, 255);
	} else {
		if (s_clicked_featPt)
			drawPoint(img, s_clicked_featPt->xo, s_clicked_featPt->yo, 255, 0, 0);
	}
}
예제 #2
0
static void intraCamCovWeightedLMStep(const double K[9],
		const double R[9],
		const double t[3],
		int npts,
		const double covs[],
		const double Ws[],
		const double Ms[],
		const double ms[],
		double param[6],
		const double lambda) {

	double sA[36], A[36], invSA[36], sB[6], B[6], rm[2], rerr[2];
	double Jw[6], Jt[6];
	memset(sA, 0, sizeof(double) * 36);
	memset(sB, 0, sizeof(double) * 6);

	const double* pMs = Ms;
	const double* pcovs = covs;
	const double* pms = ms;
	for (int i = 0; i < npts; i++, pMs += 3, pcovs += 9, pms += 2) {
		double var[4], ivar[4];
		project(K, R, t, pMs, rm);
		getProjectionCovMat(K, R, t, pMs, pcovs, var, 1.0);
		mat22Inv(var, ivar);
		_perspectiveSO3JacobiNum(K, R, t, pMs, pms, rm, Jw);
		_perspectiveTJacobiNum(K, R, t, pMs, pms, rm, Jt);

		//J:\in R ^{2x6}
		double J[12] = { Jw[0], Jw[1], Jw[2], Jt[0], Jt[1], Jt[2], Jw[3], Jw[4], Jw[5], Jt[3], Jt[4], Jt[5] };

		//\sum_i w[i]*(J_\omega^T *ivar[i] * J_\omega)
		double Jivar[12];
		matATB(2, 6, 2, 2, J, ivar, Jivar);
		matAB(6, 2, 2, 6, Jivar, J, A);
		mat66scale(A, Ws[i]);
		mat66sum(sA, A, sA);

		//-\sum_i w[i]*( J_\omega^T *ivar[i] *\epsilon_i)
		rerr[0] = (-rm[0] + pms[0]);
		rerr[1] = (-rm[1] + pms[1]);

		matAB(6, 2, 2, 1, Jivar, rerr, B);
		mat16scale(B, Ws[i]);
		mat16sum(sB, B, sB);
	}

	sA[0] += lambda;
	sA[7] += lambda;
	sA[14] += lambda;
	sA[21] += lambda;
	sA[28] += lambda;
	sA[35] += lambda;

	matInv(6, sA, invSA);
	matAB(6, 6, 6, 1, invSA, sB, param);
}
예제 #3
0
double reprojErrCov(const double* K, const double* R, const double* t,
		const double* M, const double cov[9], const double m[2]) {
	double var[4];
	double ivar[4];
	double m0[2];
	project(K, R, t, M, m0);
	getProjectionCovMat(K, R, t, M, cov, var, 1.0);
	mat22Inv(var, ivar);
	double dx = m0[0] - m[0];
	double dy = m0[1] - m[1];

	double s = ivar[0] * dx * dx + 2 * ivar[1] * dx * dy + ivar[3] * dy * dy;
	return sqrt(s);
}
예제 #4
0
int SingleSLAM::regMapPoints(std::vector<MapPoint*>& mapPts,
		PtrVec<NCCBlock>& nccblks, double maxEpiErr, double maxNcc,
		double scale) {
	CamPoseItem* cam = m_camPos.current();
	assert(cam);

	Mat_d featPts;
	std::vector<FeaturePoint*> featPtsPtr;
	getUnMappedFeatPts(featPtsPtr, &featPts);

	int numMapPoints = mapPts.size();
	int nRegistered = 0;

	ImgG img;
	scaleDownAvg(m_img, img, scale);

	for (int i = 0; i < numMapPoints; i++) {
		double ms0[2], var[4];
		project(K.data, cam->R, cam->t, mapPts[i]->M, ms0);
		if (ms0[0] < 0 || ms0[0] >= videoReader->_w || ms0[1] < 0
				|| ms0[1] >= videoReader->_h)
			continue;

		getProjectionCovMat(K.data, cam->R, cam->t, mapPts[i]->M,
				mapPts[i]->cov, var, Const::PIXEL_ERR_VAR);
		int iNearest = searchNearestPoint(featPts, ms0[0], ms0[1], var,
				maxEpiErr, 0);
		if (iNearest < 0)
			continue;

		//compare the image blocks
		double fx = featPts.data[2 * iNearest];
		double fy = featPts.data[2 * iNearest + 1];

		NCCBlock nb;
		getNCCBlock(img, fx * scale, fy * scale, nb);

		double ncc = matchNCCBlock(&nb, nccblks[i]);
		if (ncc >= maxNcc) {
			//find a projection
			mapPts[i]->addFeature(camId, featPtsPtr[iNearest]);
			featPtsPtr[iNearest]->mpt = mapPts[i];
			nRegistered++;
		}
	}
	return nRegistered;
}
예제 #5
0
static double reprojError2CovWeighted(const double* K,
		const double* R,
		const double* t,
		int npts,
		const double* covs,
		const double* Ws,
		const double* Ms,
		const double* ms) {
	double rm[2], var[4], ivar[4];
	double err = 0;
	for (int i = 0; i < npts; ++i) {
		project(K, R, t, Ms + 3 * i, rm);
		getProjectionCovMat(K, R, t, Ms + 3 * i, covs + 9 * i, var, 1.0);
		mat22Inv(var, ivar);
		err += mahaDist2(rm, ms + 2 * i, ivar);
	}
	return err;
}
예제 #6
0
//#define DEBUG_MODE
int SingleSLAM::poseUpdate3D(bool largeErr) {
	propagateFeatureStates();
//get the feature points corresponding to the map points
	std::vector<Track2DNode*> nodes;
	int num = getStaticMappedTrackNodes(nodes);
	if (num < 1) {
        double* cR = m_camPos.current()->R;
        double* cT = m_camPos.current()->t;
        CamPoseItem* camPos = m_camPos.add(currentFrame(), camId,cR, cT);
        updateCamParamForFeatPts(K, camPos);

		warn(
				"[camera id:%d]intra-camera pose update failed! less than five static map points (%d)",
				camId, num);
        //leaveBACriticalSection();
        //CoSLAM::ptr->pause();
        //enterBACriticalSection();
		return -1;
	}

//choose the feature points for pose estimation
	std::vector<FeaturePoint*> featPts;
	chooseStaticFeatPts(featPts);
	std::vector<FeaturePoint*> mappedFeatPts;

	mappedFeatPts.reserve(nRowBlk * nColBlk * 2);

	for (size_t i = 0; i < featPts.size(); i++) {
		if (featPts[i]->mpt)
			mappedFeatPts.push_back(featPts[i]);
	}

//get the 2D-3D corresponding points
	int n3D2Ds = mappedFeatPts.size();
	Mat_d ms(n3D2Ds, 2), Ms(n3D2Ds, 3), covs(n3D2Ds, 9);
	for (int i = 0; i < n3D2Ds; i++) {
		FeaturePoint* fp = mappedFeatPts[i];
		ms.data[2 * i] = fp->x;
		ms.data[2 * i + 1] = fp->y;

		Ms.data[3 * i] = fp->mpt->x;
		Ms.data[3 * i + 1] = fp->mpt->y;
		Ms.data[3 * i + 2] = fp->mpt->z;

		memcpy(covs.data, fp->mpt->cov, sizeof(double) * 9);
	}

	Mat_d R(3, 3), t(3, 1);
	double* cR = m_camPos.current()->R;
	double* cT = m_camPos.current()->t;

	IntraCamPoseOption opt;

	//test
	Mat_d old_errs(n3D2Ds, 1);
	//get reprojection error beform pose update
	for (int i = 0; i < n3D2Ds; i++) {
		FeaturePoint* fp = mappedFeatPts[i];
		double m[2];
		project(K.data, cR, cT, fp->mpt->M, m);
		old_errs[i] = dist2(m, fp->m);
	}
	//end of test

	intraCamEstimate(K.data, cR, cT, Ms.rows, 0, Ms.data, ms.data,
			Param::maxErr, R.data, t.data, &opt);

	Mat_d new_errs(n3D2Ds, 1);
	for (int i = 0; i < n3D2Ds; i++) {
		FeaturePoint* fp = mappedFeatPts[i];
		double m[2];
		project(K.data, R, t, fp->mpt->M, m);
		new_errs[i] = dist2(m, fp->m);
	}

//find outliers
	int numOut = 0;
	double errThres = largeErr ? 6.0 : 2.0;
	double rm[2], var[4], ivar[4];
	for (int i = 0; i < num; i++) {
		double* pM = nodes[i]->pt->mpt->M;
		double* pCov = nodes[i]->pt->mpt->cov;
		project(K, R, t, pM, rm);
		getProjectionCovMat(K, R, t, pM, pCov, var, Const::PIXEL_ERR_VAR);
		mat22Inv(var, ivar);
		double err = mahaDist2(rm, nodes[i]->pt->m, ivar);
		if (err < errThres) { //inlier
			nodes[i]->pt->reprojErr = err;
			seqTriangulate(K, R, t, nodes[i]->pt->m, pM, pCov,
					Const::PIXEL_ERR_VAR);
			project(K, R, t, pM, rm);
			getProjectionCovMat(K, R, t, pM, pCov, var, Const::PIXEL_ERR_VAR);
			mat22Inv(var, ivar);
			err = mahaDist2(rm, nodes[i]->pt->m, ivar);
			//			if (err >= 1) {
			//				nodes[i]->pt->mpt->setUncertain();
			//				//test
			//				printf("poseUpdate:1\n");
			//			}
		} else {
			//outliers
			numOut++;
			double repErr = dist2(rm, nodes[i]->pt->m);
			nodes[i]->pt->reprojErr = repErr;
			nodes[i]->pt->mpt->setUncertain();
		}
	}
	CamPoseItem* camPos = m_camPos.add(currentFrame(), camId, R.data, t.data);
	updateCamParamForFeatPts(K, camPos);
//	if (currentFrame() > 9)
//		MyApp::bStop = true;
#ifdef DEBUG_MODE
//if the number of outliers are too much (may due to some distant points)
	if (currentFrame() >= 76) {
		//test
		printf("f:%d,cam:%d : n3d2d:%d, num:%d, numOut:%d\n", currentFrame(),
				camId, n3D2Ds, num, numOut);
		char dirPath[1024];
		sprintf(dirPath, "/home/tsou/slam_posefailed/%s", MyApp::timeStr);
		mkdir(dirPath, S_IRWXU | S_IRWXG | S_IROTH | S_IXOTH);

		savePGM(m_img, "/home/tsou/slam_posefailed/%s/%d_img_%d.pgm",
				MyApp::timeStr, currentFrame(), camId);
		writeMat(Ms, "/home/tsou/slam_posefailed/%s/%d_pts3d_%d.txt",
				MyApp::timeStr, currentFrame(), camId);
		writeMat(ms, "/home/tsou/slam_posefailed/%s/%d_pts2d_%d.txt",
				MyApp::timeStr, currentFrame(), camId);
		writeMat(covs, "/home/tsou/slam_posefailed/%s/%d_cov3d_%d.txt",
				MyApp::timeStr, currentFrame(), camId);
		writeMat(3, 3, K, "/home/tsou/slam_posefailed/%s/%d_K_%d.txt",
				MyApp::timeStr, currentFrame(), camId);
		writeMat(old_errs, "/home/tsou/slam_posefailed/%s/%d_old_errs_%d.txt",
				MyApp::timeStr, currentFrame(), camId);
		writeMat(new_errs, "/home/tsou/slam_posefailed/%s/%d_new_errs_%d.txt",
				MyApp::timeStr, currentFrame(), camId);

		writeMat(3, 3, cR, "/home/tsou/slam_posefailed/%s/%d_oldR_%d.txt",
				MyApp::timeStr, currentFrame(), camId);
		writeMat(3, 1, cT, "/home/tsou/slam_posefailed/%s/%d_oldT_%d.txt",
				MyApp::timeStr, currentFrame(), camId);
		writeMat(R, "/home/tsou/slam_posefailed/%s/%d_R_%d.txt", MyApp::timeStr,
				currentFrame(), camId);
		writeMat(t, "/home/tsou/slam_posefailed/%s/%dT_%d.txt", MyApp::timeStr,
				currentFrame(), camId);

		//test
		logInfo("program paused for debug at camera %d!\n", currentFrame(), camId);
		leaveBACriticalSection();
		CoSLAM::ptr->pause();
		enterBACriticalSection();
	}
#endif
	return num;
}
예제 #7
0
int SingleSLAM::fastPoseUpdate3D() {
	propagateFeatureStates();
//get the feature points corresponding to the map points
	std::vector<Track2DNode*> nodes;
	int num = getStaticMappedTrackNodes(nodes);
	if (num < 5) {
		repErr(
				"[camera id:%d]intra-camera pose update failed! less than five static map points (%d)",
				camId, num);
		return -1;
	}

//choose the feature points for pose estimation
	std::vector<FeaturePoint*> featPts;
	int iChoose = chooseStaticFeatPts(featPts);
//test
	logInfo("number of chosen features :%d\n", iChoose);

	std::vector<FeaturePoint*> mappedFeatPts;
	std::vector<FeaturePoint*> unmappedFeatPts;

	mappedFeatPts.reserve(nRowBlk * nColBlk * 2);
	unmappedFeatPts.reserve(nRowBlk * nColBlk * 2);

	for (size_t i = 0; i < featPts.size(); i++) {
		if (featPts[i]->mpt)
			mappedFeatPts.push_back(featPts[i]);
		else if (featPts[i]->preFrame)
			unmappedFeatPts.push_back(featPts[i]);
	}

	//get the 2D-3D corresponding points
	int n3D2Ds = mappedFeatPts.size();
	Mat_d ms(n3D2Ds, 2), Ms(n3D2Ds, 3), repErrs(n3D2Ds, 1);
	for (int i = 0; i < n3D2Ds; i++) {
		FeaturePoint* fp = mappedFeatPts[i];
		ms.data[2 * i] = fp->x;
		ms.data[2 * i + 1] = fp->y;

		Ms.data[3 * i] = fp->mpt->x;
		Ms.data[3 * i + 1] = fp->mpt->y;
		Ms.data[3 * i + 2] = fp->mpt->z;

		repErrs.data[i] = fp->reprojErr;
	}

	//get the 2D-2D corresponding points
	int n2D2Ds = unmappedFeatPts.size();
	Mat_d ums(n2D2Ds, 2), umspre(n2D2Ds, 2), Rpre(n2D2Ds, 9), tpre(n2D2Ds, 3);
	for (int i = 0; i < n2D2Ds; i++) {
		FeaturePoint* fp = unmappedFeatPts[i];

		ums.data[2 * i] = fp->x;
		ums.data[2 * i + 1] = fp->y;

		//travel back to the frame of the first appearance
		//while (fp->preFrame) {
		fp = fp->preFrame;
		//}
		assert(fp);

		umspre.data[2 * i] = fp->x;
		umspre.data[2 * i + 1] = fp->y;

		doubleArrCopy(Rpre.data, i, fp->cam->R, 9);
		doubleArrCopy(tpre.data, i, fp->cam->t, 3);
	}

//estimate the camera pose using both 2D-2D and 3D-2D correspondences
	double R[9], t[3];
	double* cR = m_camPos.current()->R;
	double* cT = m_camPos.current()->t;

//	//test
//	logInfo("==============start of camId:%d=================\n", camId);
//	logInfo("n3D2D:%d, n2D2D:%d\n", n3D2Ds, n2D2Ds);
//
//	write(K, "/home/tsou/data/K.txt");
//	write(cR, 3, 3, "/home/tsou/data/%d_R0.txt", camId);
//	write(cT, 3, 1, "/home/tsou/data/%d_T0.txt", camId);
//	write(repErrs, "/home/tsou/data/%d_errs.txt", camId);
//	write(Ms, "/home/tsou/data/%d_Ms.txt", camId);
//	write(ms, "/home/tsou/data/%d_ms.txt", camId);
//	write(Rpre, "/home/tsou/data/%d_Rpre.txt", camId);
//	write(tpre, "/home/tsou/data/%d_tpre.txt", camId);
//	write(umspre, "/home/tsou/data/%d_umspre.txt", camId);
//	write(ums, "/home/tsou/data/%d_ums.txt", camId);
//
//	//test
//	printMat(3, 3, cR);
//	printMat(3, 1, cT);

	IntraCamPoseOption opt;
	double R_tmp[9], t_tmp[3];
	intraCamEstimate(K.data, cR, cT, n3D2Ds, repErrs.data, Ms.data, ms.data, 6,
			R_tmp, t_tmp, &opt);

	if (getCameraDistance(R_tmp, t_tmp, Rpre.data, tpre.data) > 1000) {
		opt.verboseLM = 1;
		intraCamEstimateEpi(K.data, R_tmp, t_tmp, n3D2Ds, repErrs.data, Ms.data,
				ms.data, n2D2Ds, 0, Rpre.data, tpre.data, umspre.data, ums.data,
				6, R, t, &opt);
	} else {
		doubleArrCopy(R, 0, R_tmp, 9);
		doubleArrCopy(t, 0, t_tmp, 3);
	}

//	printMat(3, 3, cR);
//	printMat(3, 1, cT);
//	printMat(3, 3, R);
//	printMat(3, 1, cT);
//	logInfo("==============end of camId:%d=================\n", camId);
//	intraCamEstimate(K.data,cR,cT,n3D2Ds, repErrs.data,Ms.data,ms.data,6.0,R,t,&opt);
//	find outliers

	int numOut = 0;
	double rm[2], var[4], ivar[4];
	for (int i = 0; i < num; i++) {
		double* pM = nodes[i]->pt->mpt->M;
		double* pCov = nodes[i]->pt->mpt->cov;
		project(K, R, t, pM, rm);
		getProjectionCovMat(K, R, t, pM, pCov, var, Const::PIXEL_ERR_VAR);
		mat22Inv(var, ivar);
		double err = mahaDist2(rm, nodes[i]->pt->m, ivar);
		if (err < 1) { //inlier
			nodes[i]->pt->reprojErr = err;
			seqTriangulate(K, R, t, nodes[i]->pt->m, pM, pCov,
					Const::PIXEL_ERR_VAR);
			project(K, R, t, pM, rm);
			getProjectionCovMat(K, R, t, pM, pCov, var, Const::PIXEL_ERR_VAR);
			mat22Inv(var, ivar);
			err = mahaDist2(rm, nodes[i]->pt->m, ivar);
			if (err >= 1) {
				nodes[i]->pt->mpt->setFalse();
			}
		} else {
			//outliers
			numOut++;
			double repErr = dist2(rm, nodes[i]->pt->m);
			nodes[i]->pt->reprojErr = repErr;
			nodes[i]->pt->mpt->setUncertain();
		}
	}
	CamPoseItem* camPos = m_camPos.add(currentFrame(), camId, R, t);
	updateCamParamForFeatPts(K, camPos);

	return num;
}
void GLImagePane::mouseDown(wxMouseEvent& event) {
	int x = event.GetX();
	int y = event.GetY();

	double fm[2] = { imgWidth * x * 1.0 / getWidth(), imgHeight * y * 1.0
			/ getHeight() };
	FeaturePoint* pFeatPt = searchNearestFeatPt(
			m_pSLAM->slam[m_camId].m_featPts, m_pSLAM->curFrame, fm, 7.0);
	if (pFeatPt) {
		s_clicked_camid = m_camId;
		s_clicked_featPt = pFeatPt;
		s_clicked_mappt = pFeatPt->mpt;
		if (pFeatPt->mpt && pFeatPt->mpt->state == STATE_MAPPOINT_CURRENT) {
			replaceMapPointFlag(m_pSLAM->curMapPts, FLAG_MAPPOINT_TEST4,
					FLAG_MAPPOINT_NORMAL);
			pFeatPt->mpt->flag = FLAG_MAPPOINT_TEST4;
			//		//test
			pFeatPt->mpt->print();
			double M[3], cov[9];
			//		isDynamicPoint(2,pFeatPt->mpt,2.0,M,cov);

			//print reprojection errors
			MapPoint* p = pFeatPt->mpt;
			logInfo("newpt : %s\n", p->bNewPt ? "yes" : "no");
			for (int i = 0; i < SLAM_MAX_NUM; i++) {
				FeaturePoint* fp = p->pFeatures[i];
				if (fp) {
					CamPoseItem* cam = fp->cam;
					double err = reprojErrorSingle(m_pSLAM->slam[i].K.data,
							cam->R, cam->t, p->M, fp->m);
					logInfo("[view#%d]error:%lf  ", i, err);
					double rm[2], var[4], ivar[4];

					project(fp->K, fp->cam->R, fp->cam->t, p->M, rm);
					getProjectionCovMat(fp->K, fp->cam->R, fp->cam->t, p->M,
							p->cov, var, Const::PIXEL_ERR_VAR);
					mat22Inv(var, ivar);

					double err2 = mahaDist2(rm, fp->m, ivar);
					logInfo("maha error:%lf\n", err2);

					printMat(3, 3, cam->R);
					printMat(3, 1, cam->t);

					logInfo(
							"is at the cam#%d's back : '%s'\n----------------------------------------\n",
							i,
							isAtCameraBack(cam->R, cam->t, p->M) ?
									"yes" : "no");
					//get the number of previous frames
					//				int num = 0;
					//				while (fp != 0) {
					//					num++;
					//					fp = fp->preFrame;
					//				}
					//				logInfo("[%d]number :%d\n", i, num);
				}
			}
			if (isStaticPoint(m_pSLAM->numCams, p, 6.0, M, cov, 10)) {
				//test
				printf("static\n");
			}
			if (isDynamicPoint(m_pSLAM->numCams, p, 6.0, M, cov)) {
				//test
				printf("dynamic\n");
			}

			printf("static frame num:%d\n", p->staticFrameNum);
			//test map registration
			//			Mat_d camDist;
			//			m_pSLAM->getCurrentCamDist(camDist);
			//			MyApp::bDebug = true;
			//			int groupId = m_pSLAM->m_groupId[m_camId];
			//			bool bReg = m_pSLAM->currentStaticPointRegisterInGroup(m_pSLAM->m_groups[groupId], camDist, pFeatPt->mpt,
			//					10.0);
			//			logInfo("register:%d\n", bReg ? 1 : 0);
			MyApp::redrawViews();
		}
		MyApp::redrawViews();
	} else {
		s_clicked_camid = -1;
		s_clicked_featPt = 0;
		s_clicked_mappt = 0;
	}
}