Esempio n. 1
0
Quaternion::Quaternion(Acceleration* mAcceleration, Omega* mOmega, EncoderYaw* mEncoderYaw) : _mAcceleration(mAcceleration), _mOmega(mOmega), IsUseCompass(false), IsUseEncoderYaw(true), _mCompass(0), Interval(0), Valid(false), _mEncoderYaw(mEncoderYaw){
	PrevT.setZero();
	Matrix3f Q;
	Q.setIdentity();
	Q *= 1e-6f;
	Matrix3f R;
	R.setIdentity();
	R *= 1e-1f;
	_QuaternionKalman = new Kalman(mAcceleration->getAngle(), Q, R);
	PrevTick = App::mApp->mTicks->getTicks();
}
Esempio n. 2
0
static inline Matrix3f createRotation(const Vector3f & _axis, float angle)
{
	Vector3f axis = normalize(_axis);
	float si = sinf(angle);
	float co = cosf(angle);

	Matrix3f ret;
	ret.setIdentity();

	ret *= co;
	for (int r = 0; r < 3; ++r) for (int c = 0; c < 3; ++c) ret.at(c,r) += (1.0f - co) * axis[c] * axis[r];

	Matrix3f skewmat;
	skewmat.setZeros();
	skewmat.at(1,0) = -axis.z;
	skewmat.at(0,1) =  axis.z;
	skewmat.at(2,0) =  axis.y;
	skewmat.at(0,2) = -axis.y;
	skewmat.at(2,1) =  axis.x;
	skewmat.at(1,2) = -axis.x;
	skewmat *= si;
	ret += skewmat;

	return ret;
}
Esempio n. 3
0
bool MatrixTest::setIdentityTests(void)
{
	Matrix3f A;
	A.setIdentity();

	for (int i = 0; i < 3; i++) {
		for (int j = 0; j < 3; j++) {
			if (i == j) {
				ut_test(fabs(A(i, j) -  1) < 1e-7);

			} else {
				ut_test(fabs(A(i, j) -  0) < 1e-7);
			}
		}
	}

	return true;
}
Esempio n. 4
0
int main()
{
    Matrix3f A;
    A.setIdentity();

    for (int i = 0; i < 3; i++) {
        for (int j = 0; j < 3; j++) {
            if (i == j) {
                TEST( fabs(A(i, j) -  1) < 1e-7);

            } else {
                TEST( fabs(A(i, j) -  0) < 1e-7);
            }
        }
    }

    return 0;
}
Esempio n. 5
0
bool MatrixTest::matrixMultTests(void)
{
	float data[9] = {1, 0, 0, 0, 1, 0, 1, 0, 1};
	Matrix3f A(data);
	float data_check[9] = {1, 0, 0, 0, 1, 0, -1, 0, 1};
	Matrix3f A_I(data_check);
	Matrix3f I;
	I.setIdentity();
	Matrix3f R = A * A_I;
	ut_test(isEqual(R, I));

	Matrix3f R2 = A;
	R2 *= A_I;
	ut_test(isEqual(R2, I));


	Matrix3f A2 = eye<float, 3>() * 2;
	Matrix3f B = A2.emult(A2);
	Matrix3f B_check = eye<float, 3>() * 4;
	ut_test(isEqual(B, B_check));

	return true;
}
Esempio n. 6
0
bool Quaternion::Update(){
	if(_mOmega->getIsValided()){
		Vector3f omega = _mOmega->getOmega() * MathTools::RADIAN_PER_DEGREE;
		Vector4f q;
		q << 0, omega[0], omega[1], omega[2];
		Vector4f t;
		t[0] = -q[1]*_Quaternion[1]-q[2]*_Quaternion[2]-q[3]*_Quaternion[3];
		t[1] = q[1]*_Quaternion[0]+q[2]*_Quaternion[3]-q[3]*_Quaternion[2];
		t[2] = -q[1]*_Quaternion[3]+q[2]*_Quaternion[0]+q[3]*_Quaternion[1];
		t[3] = q[1]*_Quaternion[2]-q[2]*_Quaternion[1]+q[3]*_Quaternion[0];
		Interval = App::mApp->mTicks->getTicks() - PrevTick;
		PrevTick = App::mApp->mTicks->getTicks();
		Interval /= 1000.0f;
		if(Interval <= 0){
			Valid = false;
			return false;
		}
		t *= 0.5f * Interval;
		q = _Quaternion + 0.5f * (t + PrevT);
		PrevT = t;
		q.normalize();
		Vector3f e = QuaternionToEuler(q);
		bool valid = true;
		bool AccValid = _mAcceleration->getIsValided() && (_mAcceleration->getAcc().norm() > Acceleration::Gravity * 0.95f) && (_mAcceleration->getAcc().norm() < Acceleration::Gravity * 1.05f);
		Vector3f angle;
		bool MagValid;

		if(IsUseCompass){
			MagValid = _mCompass->getIsValided() && _mCompass->getMag().norm() > 0.9f && _mCompass->getMag().norm() < 1.1f;
		}
		if(AccValid){
			angle = _mAcceleration->getAngle();
			if(IsUseCompass && MagValid){
				angle[2] = _mCompass->getAngle()[2];
			}
			else if(IsUseEncoderYaw){
				angle[2] = _mEncoderYaw->getYaw();
			}
			else{
				angle[2] = e[2];
			}
		}
		else{
			angle[0] = e[0];
			angle[1] = e[1];
			if(IsUseCompass && MagValid){
				angle[2] = _mCompass->getAngle()[2];
			}
			else if(IsUseEncoderYaw){
				angle[2] = _mEncoderYaw->getYaw();
			}
			else{
				angle[2] = e[2];
			}
		}

		Matrix3f A;
		A.setIdentity();
		Matrix3f H;
		H.setIdentity();
		if(valid && AccValid && _QuaternionKalman->Filtering(A, e, H, angle)){
			angle = _QuaternionKalman->getCorrectedData();
			for(int i = 0; i < 3; i++){
				if(angle[i] != angle[i]){
					Valid = false;
					return false;
				}
			}
			if(IsUseEncoderYaw && fabs(fabs(_mEncoderYaw->getYaw()) - MathTools::PI) < 0.01f){
				angle[2] = _mEncoderYaw->getYaw();
			}
			_Euler = angle;
		}
		else{
			_Euler = e;
		}
		_Quaternion = EulerToQuaternion(_Euler);
		return true;
	}
	else{
		Valid = false;
		return false;
	}
}
HRESULT PrimeSenseSensor::createFirstConnected()
{
	HRESULT hr = S_OK;

	openni::Status rc = openni::STATUS_OK;
	const char* deviceURI = openni::ANY_DEVICE;

	rc = openni::OpenNI::initialize();

	std::cout << "After initialization: " << openni::OpenNI::getExtendedError() << std::endl;

	// Create Device
	rc = m_device.open(deviceURI);
	if (rc != openni::STATUS_OK)
	{
		std::cout << "Device open failed: " << openni::OpenNI::getExtendedError() << std::endl;
		openni::OpenNI::shutdown();
		return S_FALSE;
	}

	openni::PlaybackControl* pc = m_device.getPlaybackControl();

	// Create Depth Stream
	rc = m_depthStream.create(m_device, openni::SENSOR_DEPTH);
	if (rc == openni::STATUS_OK)
	{
		rc = m_depthStream.start();
		if (rc != openni::STATUS_OK)
		{
			std::cout << "Couldn't start depth stream: " << openni::OpenNI::getExtendedError() << std::endl;
			m_depthStream.destroy();
		}
	}
	else
	{
		std::cout << "Couldn't find depth stream: " << openni::OpenNI::getExtendedError() << std::endl;
	}

	// Create Color Stream
	rc = m_colorStream.create(m_device, openni::SENSOR_COLOR);
	if (rc == openni::STATUS_OK)
	{
		rc = m_colorStream.start();
		if (rc != openni::STATUS_OK)
		{
			std::cout << "Couldn't start color stream: " << openni::OpenNI::getExtendedError() << " Return code: " << rc << std::endl;
			m_colorStream.destroy();
		}
	}
	else
	{
		std::cout << "Couldn't find color stream: " << openni::OpenNI::getExtendedError() << std::endl;
	}
	if (GlobalAppState::get().s_enableColorCropping) 
	{
		if (m_colorStream.isCroppingSupported()) {
			m_colorStream.setCropping(GlobalAppState::get().s_colorCropX, GlobalAppState::get().s_colorCropY, GlobalAppState::get().s_colorCropWidth, GlobalAppState::get().s_colorCropHeight);
		}
		else {
			std::cout << "cropping is not supported for this color stream" << std::endl;
		}
	}

	// Check Streams
	if (!m_depthStream.isValid() || !m_colorStream.isValid())
	{
		std::cout << "No valid streams. Exiting" << std::endl;
		openni::OpenNI::shutdown();
		return S_FALSE;
	}

	openni::CameraSettings* cs = m_colorStream.getCameraSettings();

	//cs->setAutoWhiteBalanceEnabled(false);
	//cs->setAutoExposureEnabled(false);
	//cs->setGain(1);
	//cs->setExposure(10);

	std::cout << "getGain: " << cs->getGain() << std::endl;
	std::cout << "getExposure: " << cs->getExposure() << std::endl;
	std::cout << "getAutoExposureEnabled: " << cs->getAutoExposureEnabled() << std::endl;
	std::cout << "getAutoWhiteBalanceEnabled: " << cs->getAutoWhiteBalanceEnabled() << std::endl;

	// Get Dimensions
	m_depthVideoMode = m_depthStream.getVideoMode();
	m_colorVideoMode = m_colorStream.getVideoMode();

	int depthWidth = m_depthVideoMode.getResolutionX();
	int depthHeight = m_depthVideoMode.getResolutionY();
	int colorWidth = m_colorVideoMode.getResolutionX();
	int colorHeight = m_colorVideoMode.getResolutionY();

	RGBDSensor::init(depthWidth, depthHeight, colorWidth, colorHeight, 1);

	m_streams = new openni::VideoStream*[2];
	m_streams[0] = &m_depthStream;
	m_streams[1] = &m_colorStream;

	if (rc != openni::STATUS_OK)
	{
		openni::OpenNI::shutdown();
		return 3;
	}

	m_device.setImageRegistrationMode(openni::IMAGE_REGISTRATION_DEPTH_TO_COLOR);

	float focalLengthX = (depthWidth/2.0f)/tan(m_depthStream.getHorizontalFieldOfView()/2.0f);
	float focalLengthY = (depthHeight/2.0f)/tan(m_depthStream.getVerticalFieldOfView()/2.0f);
	initializeDepthIntrinsics(focalLengthX, focalLengthY, depthWidth/2.0f, depthHeight/2.0f);

	focalLengthX =  (colorWidth/2.0f)/tan(m_colorStream.getHorizontalFieldOfView()/2.0f);
	focalLengthY =  (colorHeight/2.0f)/tan(m_colorStream.getVerticalFieldOfView()/2.0f);
	initializeColorIntrinsics(focalLengthX, focalLengthY, colorWidth/2.0f, colorHeight/2.0f);

	Matrix3f R; R.setIdentity(); Vector3f t; t.setZero();
	initializeColorExtrinsics(R, t);

	R(0, 0) =  9.9991741106823473e-001; R(0, 1) =  3.0752530258331304e-003; R(0, 2) = -1.2478536028949385e-002;
	R(1, 0) = -3.0607678272497924e-003; R(1, 1) =  9.9999461994140826e-001; R(1, 2) =  1.1797408808971066e-003;
	R(2, 0) =  1.2482096895408091e-002; R(2, 1) = -1.1414495457493831e-003; R(2, 2) =  9.9992144408949846e-001;

	//t[0] = -2.5331974929667012e+001;  t[1] = 6.1798287248283634e-001; t[2] = 3.8510108109251804e+000;
	//t[0] /= 1000.0f; t[1] /= 1000.0f; t[2] /= 1000.0f;
	//
	//initializeDepthExtrinsics(R, t);

	return hr;
}
void CalibrationNode::performEstimation(){
	if(rotationRB_vec.size() < 5 ){
		std::cout << "Insufficient data" << std::endl;
		return;
	}

	//perform least squares estimation
	Matrix3f M;
	Matrix4f rbi, rbj, cbi, cbj;
	Matrix4f A, B;

	Matrix3f I;
	I.setIdentity();

	MatrixXf C(0,3), bA(0,1), bB(0,1);

	Vector3f ai, bi;

	VectorXf V_tmp;
	MatrixXf C_tmp;

	M.setZero();

	for(int i=0; i < (int)rotationRB_vec.size(); i++){
		for(int j=0; j < (int)rotationRB_vec.size(); j++){
			if(i!=j){
				rbi << rotationRB_vec[i] , translationRB_vec[i] ,  0, 0, 0, 1;
				rbj << rotationRB_vec[j] , translationRB_vec[j] ,  0, 0, 0, 1;
				A = rbj.inverse()*rbi;

				cbi << rotationCB_vec[i] , translationCB_vec[i] ,  0, 0, 0, 1;
				cbj << rotationCB_vec[j] , translationCB_vec[j] ,  0, 0, 0, 1;
				B = cbj*cbi.inverse();

				ai = getLogTheta(A.block(0,0,3,3));
				bi = getLogTheta(B.block(0,0,3,3));

				M += bi*ai.transpose();

				MatrixXf C_tmp = C;
				C.resize(C.rows()+3, NoChange);
				C << C_tmp,  Matrix3f::Identity() - A.block(0,0,3,3);

				V_tmp = bA;
				bA.resize(bA.rows()+3, NoChange);
				bA << V_tmp, A.block(0,3,3,1);

				V_tmp = bB;
				bB.resize(bB.rows()+3, NoChange);
				bB << V_tmp, B.block(0,3,3,1);

			}//end of if i!=j
		}
	}//end of for(.. i < rotationRB_vec.size(); ..)

#if ESTIMATION_DEBUG
	std::cout << "M = [ " << M << " ]; " << endl;
#endif

	EigenSolver<Matrix3f> es(M.transpose()*M);
	Matrix3cf D = es.eigenvalues().asDiagonal();
	Matrix3cf V = es.eigenvectors();

	Matrix3cf Lambda = D.inverse().array().sqrt();
	Matrix3cf Theta_X = V * Lambda * V.inverse() * M.transpose();
	std::cout << "Orientation of Camera Frame with respect to Robot tool-tip frame." << std::endl;
	std::cout << "Theta_X = [ " << Theta_X.real()  << " ]; " << endl;

	//Estimating translational offset
	for(int i=0; i < bB.rows()/3; i++){
		bB.block(i*3,0,3,1) = Theta_X.real()*bB.block(i*3,0,3,1);
	}
	bA = bA - bB; // this is d. saving memory

	std::cout << "Translation of Camera Frame with respect to Robot tool-tip frame." << std::endl;
	cout << "bX = [ " << (C.transpose()*C).inverse() * C.transpose() * bA << " ]; " << endl;

}