void FrameData::setRotation( const eq::Vector3f& rotation ) { _rotation = eq::Matrix4f::IDENTITY; _rotation.rotate_x( rotation.x() ); _rotation.rotate_y( rotation.y() ); _rotation.rotate_z( rotation.z() ); setDirty( DIRTY_CAMERA ); }
void FrameData::setModelRotation( const eq::Vector3f& rotation ) { _modelRotation = eq::Matrix4f(); _modelRotation.rotate_x( rotation.x( )); _modelRotation.rotate_y( rotation.y( )); _modelRotation.rotate_z( rotation.z( )); setDirty( DIRTY_CAMERA ); }
void Channel::_updateNearFar( const mesh::BoundingSphere& boundingSphere ) { // compute dynamic near/far plane of whole model const FrameData& frameData = _getFrameData(); const eq::Matrix4f& rotation = frameData.getCameraRotation(); const eq::Matrix4f headTransform = getHeadTransform() * rotation; eq::Matrix4f modelInv; compute_inverse( headTransform, modelInv ); const eq::Vector3f zero = modelInv * eq::Vector3f::ZERO; eq::Vector3f front = modelInv * eq::Vector3f( 0.0f, 0.0f, -1.0f ); front -= zero; front.normalize(); front *= boundingSphere.w(); const eq::Vector3f center = frameData.getCameraPosition().get_sub_vector< 3 >() - boundingSphere.get_sub_vector< 3 >(); const eq::Vector3f nearPoint = headTransform * ( center - front ); const eq::Vector3f farPoint = headTransform * ( center + front ); if( useOrtho( )) { LBASSERTINFO( fabs( farPoint.z() - nearPoint.z() ) > std::numeric_limits< float >::epsilon(), nearPoint << " == " << farPoint ); setNearFar( -nearPoint.z(), -farPoint.z() ); } else { // estimate minimal value of near plane based on frustum size const eq::Frustumf& frustum = getFrustum(); const float width = fabs( frustum.right() - frustum.left() ); const float height = fabs( frustum.top() - frustum.bottom() ); const float size = LB_MIN( width, height ); const float minNear = frustum.near_plane() / size * .001f; const float zNear = LB_MAX( minNear, -nearPoint.z() ); const float zFar = LB_MAX( zNear * 2.f, -farPoint.z() ); setNearFar( zNear, zFar ); } }
bool Tracker::_update() { #ifdef WIN32 return false; #else const ssize_t wrote = write(_fd, COMMAND_POINT, 1); // send data if (wrote == -1) { LBERROR << "Write error: " << lunchbox::sysError << std::endl; return false; } unsigned char buffer[12]; if (!_read(buffer, 12, 500000)) { LBERROR << "Read error" << std::endl; return false; } const short xpos = (buffer[1] << 8 | buffer[0]); const short ypos = (buffer[3] << 8 | buffer[2]); const short zpos = (buffer[5] << 8 | buffer[4]); const short head = (buffer[7] << 8 | buffer[6]); const short pitch = (buffer[9] << 8 | buffer[8]); const short roll = (buffer[11] << 8 | buffer[10]); // 32640 is 360 degrees (2pi) -> scale is 1/5194.81734 const eq::Vector3f hpr(head / -5194.81734f + M_PI, pitch / -5194.81734f + 2.0f * M_PI, roll / -5194.81734f + 2.0f * M_PI); eq::Vector3f pos; // highest value for y and z position of the tracker sensor is 32639, // after that it switches back to zero (and vice versa if descending // values). pos.x() = ypos; if (pos.x() > 16320) // 32640 / 2 = 16320 pos.x() -= 32640; pos.y() = zpos; if (pos.y() > 16320) pos.y() -= 32640; pos.z() = xpos; pos /= 18000.f; // scale to meter // position and rotation are stored in transformation matrix // and matrix is scaled to the application's units _matrix = eq::Matrix4f(); _matrix.rotate_x(hpr.x()); _matrix.rotate_y(hpr.y()); _matrix.rotate_z(hpr.z()); _matrix.setTranslation(pos); LBINFO << "Tracker pos " << pos << " hpr " << hpr << " = " << _matrix; // M = M_world_emitter * M_emitter_sensor * M_sensor_object _matrix = _worldToEmitter * _matrix * _sensorToObject; LBINFO << "Tracker matrix " << _matrix; return true; #endif }