TEST_F(QuaternionTest, creating_matrix_from_quaternion_gives_correct_matrix) { const auto quat = create_random_quaternion(); const auto res = quaternion_to_matrix(quat); const auto correct = make_matrix_from_quaternion(quat); for (auto i = 0; i < 16; ++i) { EXPECT_EQ(correct[i], res[i]); } }
TEST_F(QuaternionTest, creating_matrix_from_identity_quaternion_gives_identity_matrix) { const Math::Matrix4d identity; const Math::Quaternion<double> quat; const auto res = quaternion_to_matrix(quat); for (auto i = 0; i < 16; ++i) { EXPECT_EQ(identity[i], res[i]); } }
TEST_F(QuaternionTest, creating_quaternion_from_simple_rotation_matrix_and_getting_the_matrix_from_the_quaternion_gives_the_same_matrix) { const auto matrix = create_random_rotation_matrix(); const Math::Quaternion<double> quat(matrix); const auto result = quaternion_to_matrix(quat); for (auto i = 0; i < 16; ++i) { EXPECT_FLOAT_EQ(matrix[i], result[i]); } }
TEST_F(QuaternionTest, creating_quaternion_from_identity_matrix_and_creating_matrix_from_quaternion_returns_identity) { const Math::Matrix4d identity; const Math::Quaternion<double> quat(identity); const auto result = quaternion_to_matrix(quat); for (auto i = 0; i < 16; ++i) { EXPECT_EQ(identity[i], result[i]); } }
vector3d ahrs_drift_correction(dataexchange_t *data) { vector3d corr_vector, acc_g; corr_vector.x = 0.0; corr_vector.y = 0.0; corr_vector.z = 0.0; //do correction only then acceleration is close to 1G (unreliable if greater) acc_g = adxl345_raw_to_g(data, SCALE_2G_10B); double acc_magnitude = vector_magnitude(acc_g); if (fabs(acc_magnitude - 1.0) <= 0.15) { float corr_strength = 0.15; //vectors for rotation matrix vector3d acc_v, mag_v; acc_v.x = (*data).acc_x; acc_v.y = (*data).acc_y; acc_v.z = (*data).acc_z; mag_v.x = (*data).mag_x; mag_v.y = (*data).mag_y; mag_v.z = (*data).mag_z; hmc5883l_applyCalibration(&mag_v, calib_ptr); vector3d down = vector_inv(acc_v); vector3d east = vector_cross(down, mag_v); vector3d north = vector_cross(east, down); //normalize vectors vector_norm(&down); vector_norm(&east); vector_norm(&north); //matrix from rotation quaternion matrix3x3d rot_matrix = quaternion_to_matrix((*data).qr); //correction vector calculation vector3d sum1 = vector_sum(vector_cross(north, matrix_row_to_vector(&rot_matrix, 1)), vector_cross(east, matrix_row_to_vector(&rot_matrix, 2))); vector3d sum2 = vector_sum(sum1, vector_cross(down, matrix_row_to_vector(&rot_matrix, 3))); corr_vector = vector_scale(sum2, corr_strength); } return corr_vector; }
int main( int argc, char *argv[] ) { int arg; int use_tracker = FALSE; int use_isense = FALSE; int use_udp = FALSE; int orientation_only = FALSE; IsenseDataPacket iSenseData; UDPDataPacket udpData; int data_available = NO; for ( arg = 1; arg < argc; arg++ ) { if ( !strcmp( argv[arg], "-full" ) ) { width = 640; height = 480; border = false; fullscreen = true; stereo = false; } else if ( !strcmp( argv[arg], "-hmd" ) ) { width = 1280; height = 480; border = false; fullscreen = true; stereo = true; // HMDScreen( HMD_STEREO ); } else if ( !strcmp( argv[arg], "-svga" ) ) { width = 2048; height = 768; border = false; fullscreen = true; stereo = true; HMDScreen( HMD_STEREO ); } else if ( !strcmp( argv[arg], "-nVisL" ) ) { fprintf( stderr, "LowRes nVis\n" ); width = 2048; height = 768; border = false; fullscreen = true; stereo = true; // HMDScreen( HMD_STEREO ); } else if ( !strcmp( argv[arg], "-nVis" ) ) { width = 2560; height = 1024; border = false; fullscreen = true; stereo = true; // HMDScreen( HMD_STEREO ); } else if ( !strcmp( argv[arg], "-noborder" ) ) border = FALSE; else if ( !strcmp( argv[arg], "-tracker" ) ) use_tracker = TRUE; else if ( !strcmp( argv[arg], "-isense" ) ) use_isense = TRUE; else if ( !strcmp( argv[arg], "-udp" ) ) use_udp = TRUE; else if ( !strcmp( argv[arg], "-ori" ) ) orientation_only = TRUE; else if ( !strcmp( argv[arg], "-sensor" ) ) { arg++; if ( arg < argc ) sscanf( argv[arg], "%d", &viewpoint_sensor ); fprintf( stderr, "Using sensor %d.\n", viewpoint_sensor ); } else closure_record = argv[arg]; } fprintf( stderr, "Closure record: %s\n", closure_record ); /* Start up optical tracker. */ if ( use_tracker ) { SetupTracker(); SensorSetHysteresis( 1, 2.0, 0.5 ); // mm and degrees /* * The simulated tracker goes through 7 phases. * The first is stationary. * The next three are translation movements along X,Y and Z, respectively. * The last three are rotations around Z, Y and X, respectively. * The following sets the amplitude of each phase. * This call will have no effect on real tracker movements. */ SimulateSetMovement( viewpoint_sensor, sim_translation, sim_rotation ); /* Shift the nominal viewpoint up, then tilt the view back down to look at the target. */ viewpoint_position[Y] = nominal_head_height; SimulateSetLocation( viewpoint_sensor, viewpoint_position ); viewpoint_orientation[Y][Y] = viewpoint_orientation[Z][Z] = cos( radians( nominal_head_tilt ) ); viewpoint_orientation[Y][Z] = sin( radians( nominal_head_tilt ) ) ; viewpoint_orientation[Z][Y] = - viewpoint_orientation[Y][Z]; SimulateSetOrientation( viewpoint_sensor, viewpoint_orientation ); SimulateSetLocation( object_sensor, object_position ); SimulateSetMovement( object_sensor, sim_translation, sim_rotation ); } if ( use_isense || use_udp ) { if ( UDPTrackerInitClient( &trkr, NULL, DEFAULT_PORT ) ) { MessageBox( NULL, "Error opening socket.", "Isense UDP", MB_OK ); use_isense = NO; } } /* * Define a viewing projection with: * 45° vertical field-of-view - horizontal fov will be determined by window aspect ratio. * 60 mm inter-pupilary distance - the units don't matter to OpenGL, but all the dimensions * that I give for the model room here are in mm. * 100.0 to 10000.0 depth clipping planes - making this smaller would improve the depth resolution. */ viewpoint = new Viewpoint( 6.0, 45.0, 10.0, 10000.0); int x = 100, y = 100; /* * Create window. */ window = new OpenGLWindow(); window->Border = border; // Remove borders for an HMD display. window->FullScreen = fullscreen; if ( window->Create( NULL, argv[0], 0, 0, width, height ) ) { /* * Create sets the new window to be the active window. * Setup the lights and materials for that window. */ glDefaultLighting(); glDefaultMaterial(); // wall_texture->Define(); } window->Activate(); window->SetKeyboardCallback( keyboard_callback ); // Shade model glEnable(GL_TEXTURE_2D); // Enable Texture Mapping ( NEW ) glEnable(GL_LIGHTING); glShadeModel(GL_SMOOTH); // Enable Smooth Shading glClearDepth(1.0f); // Depth Buffer Setup glEnable(GL_DEPTH_TEST); // Enables Depth Testing glDepthFunc(GL_LEQUAL); // The Type Of Depth Testing To Do glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_NICEST); // Really Nice Perspective Calculations create_objects(); TimerSet( &frame_timer, 10.0 ); frame_counter = 0; for ( double angle = 0.0; true; angle += 5.0 ) { if ( TimerTimeout( &frame_timer )) { fprintf( stderr, "Frame rate: %f\n", (double) frame_counter / TimerElapsedTime( &frame_timer ) ); if ( use_isense ) { UDPTrackerGetIsenseData( &trkr, &iSenseData ); printf("Isense Quarternion %7.3f %7.3f %7.3f %7.3f %.3f\n", iSenseData.Station[0].orientation[0], iSenseData.Station[0].orientation[1], iSenseData.Station[0].orientation[2], iSenseData.Station[0].orientation[3], iSenseData.Station[0].timestamp ); } TimerStart( &frame_timer ); frame_counter = 0; } if ( use_tracker ) { data_available = !GetSensorPositionOrientation( viewpoint_sensor, YES, viewpoint_position, viewpoint_orientation ); if ( data_available ) { if ( !orientation_only ) viewpoint->SetPosition( viewpoint_position ); viewpoint->SetOrientation( viewpoint_orientation ); } } else if ( use_isense ) { data_available = !( UDPTrackerGetIsenseData( &trkr, &iSenseData )); if ( data_available ) { isense_to_matrix( iSenseData.Station[0].orientation, viewpoint_orientation ); isense_to_vector( iSenseData.Station[0].position, viewpoint_position ); scale_vector( 10.0, viewpoint_position, viewpoint_position ); if ( !orientation_only ) viewpoint->SetPosition( viewpoint_position ); viewpoint->SetOrientation( viewpoint_orientation ); } } else if ( use_udp ) { data_available = !( UDPTrackerGetData( &trkr, &udpData )); if ( data_available ) { quaternion_to_matrix( udpData.Station[0].orientation, viewpoint_orientation ); copy_vector( udpData.Station[0].position, viewpoint_position ); scale_vector( 10.0, viewpoint_position, viewpoint_position ); if ( !orientation_only ) viewpoint->SetPosition( viewpoint_position ); viewpoint->SetOrientation( viewpoint_orientation ); } } else { data_available = data_available; } if ( use_tracker ) { data_available = !GetSensorPositionOrientation( object_sensor, YES, object_position, object_orientation ); object->SetPosition( object_position ); object->SetOrientation( object_orientation ); } else if ( use_isense && data_available ) { isense_to_matrix( iSenseData.Station[1].orientation, object_orientation ); isense_to_vector( iSenseData.Station[1].position, object_position ); scale_vector( 10.0, object_position, object_position ); object->SetOrientation( object_orientation ); if ( !orientation_only ) object->SetPosition( object_position ); } else { object->SetOrientation( angle, j_vector ); object->SetPosition( object_position ); } window->Clear(); if ( stereo ) { viewpoint->Apply( window, LEFT_EYE ); render(); viewpoint->Apply( window, RIGHT_EYE ); render(); } else { viewpoint->Apply( window, CYCLOPS ); render(); } window->Swap(); if ( ! window->RunOnce() ) break; frame_counter++; } window->Destroy(); if ( use_tracker ) KillTracker(); RevertScreen(); return( 0 ); }
FLT_DBL find_ortho (FLT_DBL * cc, FLT_DBL * rmat) { int kk; FLT_DBL ccrot[6 * 6]; FLT_DBL ccortho[6 * 6]; FLT_DBL ccrot2[6 * 6]; FLT_DBL ccti[6 * 6]; FLT_DBL rmat_transp[9]; FLT_DBL rmat_temp[9]; FLT_DBL rmat_temp2[9]; FLT_DBL vec[3]; FLT_DBL vec2[3]; FLT_DBL dist; FLT_DBL dista[3]; FLT_DBL qq[4], qq_best[4]; double center[4]; double range[4]; int count[4]; int qindex[4]; double inc[4]; FLT_DBL phi, theta; FLT_DBL temp; FLT_DBL dist_best; /* * Keep the compiler from complaining that this may be uninitialized. */ dist_best = NO_NORM; /* * Search over all possible orientations. * * Any orientation in 3-space can be specified by a unit vector, * giving an axis to rotate around, and an angle to rotate about * the given axis. The orientation is given with respect to some * fixed reference orientation. * * Since rotating by theta degrees about (A,B,C) produces the same * result as rotating -theta degrees about (-A,-B,-C), we only * need to consider 180 degrees worth of angles, not 360. * * In this application, we are finding the orientation of an orthorhombic * medium. Orthorhombic symmetry has three orthogonal symmetry planes, * so any one octant defines the whole. We thus only need to search * over rotation axes within one octant. * * Following the article in EDN, March 2, 1995, on page 95, author * "Do-While Jones" (a pen name of R. David Pogge), * "Quaternions quickly transform coordinates without error buildup", * we use quaternions to express the rotation. The article can be read * online here: * http://www.reed-electronics.com/ednmag/archives/1995/030295/05df3.htm * * If (A,B,C) is a unit vector to rotate theta degrees about, then: * * q0 = Cos (theta/2) * q1 = A * Sin(theta/2) * q2 = B * Sin(theta/2) * q3 = C * Sin(theta/2) * * so that q0^2 + q1^2 + q2^2 + q3^2 = 1. (A unit magnitude quaternion * represents a pure rotation, with no change in scale). * * For our case, taking advantage of the orthorhombic symmetry to * restrict the search space, we have: * 0 <= A <= 1 * 0 <= B <= 1 * 0 <= C <= 1 * 0 <= theta <= 180 degrees. * The rotation axis direction is limited to within one octant, * and the rotation about that axis is limited to half of the full circle. * * In terms of quaternions, this bounds all four elements between 0 and 1, * inclusive. */ /* * How much to subdivide each quaternion axis in the original scan. These * were somewhat arbitrarily chosen. These choices appear to be overkill, * but that ensures we won't accidentally miss the correct result by * insufficient sampling of the search space. */ /* * We sample the rotation angle more finely than the rotation axis. */ count[0] = SUB_ROT; count[1] = SUB_POS; count[2] = SUB_POS; count[3] = SUB_POS; /* * Between 0. and 1. for all 4 Q's (That is .5 +- .5.) */ for (kk = 0; kk < 4; kk++) { range[kk] = .5; center[kk] = .5; /* * A number meaning "not set yet", to get us through the loop the * first time. Needs to be much bigger than END_RES. */ inc[kk] = NOT_SET_YET; } while (inc[0] > END_RES && inc[1] > END_RES && inc[2] > END_RES && inc[3] > END_RES) { /* * Update inc to reflect the increment for the current search */ for (kk = 0; kk < 4; kk++) { inc[kk] = (2. * range[kk]) / (FLT_DBL) (count[kk] - 1); } /* * Start the 4-dimensional search. Keep track of the best result * found so far. The distance must be non-negative; we use -1 to mean * "not set yet". */ dist_best = NO_NORM; for (qindex[3] = 0; qindex[3] < count[3]; qindex[3]++) for (qindex[2] = 0; qindex[2] < count[2]; qindex[2]++) for (qindex[1] = 0; qindex[1] < count[1]; qindex[1]++) for (qindex[0] = 0; qindex[0] < count[0]; qindex[0]++) { /* * Calculate the quaternion for this search point. */ for (kk = 0; kk < 4; kk++) { /* * The term in parenthesis ranges from -1 to +1, * inclusive, so qq ranges from (-range+center) * to (+range + center). */ qq[kk] = range[kk] * (((FLT_DBL) (2 * qindex[kk] - (count[kk] - 1))) / ((FLT_DBL) (count[kk] - 1))) + center[kk]; } /* * Convert from a quaternion to a rotation matrix. * The subroutine also takes care of normalizing the * quaternion. */ quaternion_to_matrix (qq, rmat); /* * Apply the rotation matrix to the elastic stiffness * matrix. */ rotate_tensor (ccrot, cc, rmat); /* * Find the distance of the rotated medium from * orthorhombic aligned with the coordinate axes. */ dist = ortho_distance (ccortho, ccrot); /* * If it's the best found so far, or the first time * through, remember it. */ if (dist < dist_best || dist_best < 0.) { dist_best = dist; for (kk = 0; kk < 4; kk++) qq_best[kk] = qq[kk]; } } /* * Refine for the next, finer, search. To avoid any possible problem * caused by the optimal solution landing at an edge, we search over * twice the distance between the two search points from the previous * iteration. */ for (kk = 0; kk < 4; kk++) { center[kk] = qq_best[kk]; count[kk] = SUBDIVIDE; range[kk] = inc[kk]; } /* * We keep refining and searching the ever finer grid until we * achieve the required accuracy, at which point we fall out the * bottom of the loop here. */ } /* * We've got the answer to sufficient resolution... clean it up a bit, * then output it. */ /* * Convert the best answer from a Quaternion back to a rotation matrix */ quaternion_to_matrix (qq_best, rmat); /* * To make the order of the axes unique, we sort the principal axes * according to how well they work as a TI symmetry axis. * * Specifically, since after rotation the medium is canonically oriented, * with the X, Y, and Z axes the principal axes, the INVERSE rotation * must take the X, Y, and Z axes to the original arbitrarily oriented * principal axes. So we first inverse-rotate a coordinate axis back to a * principal axis. We then use vector_to_angles to give us the Euler * angles theta and phi for the principal axis. make_rotation_matrix then * constructs a rotation matrix that rotates that principal axis to +Z. * We then use that matrix to rotate the tensor. We then measure its * distance from VTI, and remember that distance. */ /* * First we need to find the inverse (the same as the transpose, because * it's _unitary_) of the rotation matrix rmat. */ transpose_matrix (rmat_transp, rmat); /* Test the X axis */ vec[0] = 1.; vec[1] = 0.; vec[2] = 0.; matrix_times_vector (vec2, rmat_transp, vec); vector_to_angles (vec2, &phi, &theta); make_rotation_matrix (theta, phi, 0., rmat_temp); rotate_tensor (ccrot2, cc, rmat_temp); dista[0] = ti_distance (ccti, ccrot2); /* Test the Y axis */ vec[0] = 0.; vec[1] = 1.; vec[2] = 0.; matrix_times_vector (vec2, rmat_transp, vec); vector_to_angles (vec2, &phi, &theta); make_rotation_matrix (theta, phi, 0., rmat_temp); rotate_tensor (ccrot2, cc, rmat_temp); dista[1] = ti_distance (ccti, ccrot2); /* Test the Z axis */ vec[0] = 0.; vec[1] = 0.; vec[2] = 1.; matrix_times_vector (vec2, rmat_transp, vec); vector_to_angles (vec2, &phi, &theta); make_rotation_matrix (theta, phi, 0., rmat_temp); rotate_tensor (ccrot2, cc, rmat_temp); dista[2] = ti_distance (ccti, ccrot2); /* * See which axis best functions as a TI symmetry axis, and make that one * the Z axis. */ if (dista[2] <= dista[1] && dista[2] <= dista[0]) { /* The Z axis is already the best. No rotation needed. */ make_rotation_matrix (0., 0., 0., rmat_temp); } else if (dista[1] <= dista[2] && dista[1] <= dista[0]) { /* Rotate Y to Z */ make_rotation_matrix (0., 90., 0., rmat_temp); temp = dista[2]; dista[2] = dista[1]; dista[1] = temp; } else { /* Rotate X to Z */ make_rotation_matrix (90., 90., -90., rmat_temp); temp = dista[2]; dista[2] = dista[0]; dista[0] = temp; } /* * Accumulate this axis-relabeling rotation (rmat_temp) onto the original * rotation (rmat). */ matrix_times_matrix (rmat_temp2, rmat_temp, rmat); /* * Now find the next-best TI symmetry axis and make that one the Y axis. */ if (dista[1] <= dista[0]) { /* Already there; do nothing. */ make_rotation_matrix (0., 0., 0., rmat_temp); } else { /* Rotate X to Y */ make_rotation_matrix (90., 0., 0., rmat_temp); temp = dista[1]; dista[1] = dista[0]; dista[0] = temp; } /* * Accumulate the new axis relabeling rotation (rmat_temp) onto the * combined previous rotation matrix (rmat_temp2) to produce the final * desired result, rmat. The axes should now be in sorted order. */ matrix_times_matrix (rmat, rmat_temp, rmat_temp2); return dist_best; }