/**
	 * updateCollisions
	 */
	void PhysicsManagerImpl::updateCollisions()
	{
		// Should eventually be done using abstraction with
		//	"engine"
		std::list<rigidbody*>::iterator it;
		if(!collisions.empty())
		{
			for(it=collisions.begin();it!=collisions.end();++it)
			{
#ifdef USE_IRRLICHT_ENGINE
				rigidbody * obj = *it;
				if(obj == 0)
				{
					collisions.erase(it);
					it--;
					continue;
				}

				scenenode * node = static_cast<scenenode*>((*it)->getUserPointer());
				if(node)
				{
					// Bullet specific code
					// Position
					btVector3 p = obj->getCenterOfMassPosition();
					node->setPosition(irr::core::vector3df((f32)p[0], (f32)p[1], (f32)p[2]));

					// Rotation
					btVector3 e(quatToEuler(obj->getOrientation()));
					node->setRotation(irr::core::vector3df(e[0], e[1], e[2]));
				} else {
					colRemovals.push(*it);
				}
#endif
			}
		}

		// Removal queue
		queueProcess();
	}
Beispiel #2
0
myQuat_t getPosition(mpu9250_t dev)
{
    imuData_t imuData;
    if (getIMUData(dev, &imuData))
    {
        if (! validIMUData(imuData))
        {
            puts("invalid IMU data");
            if (failureCount++ > 20)
            {
                puts("Error in IMU, restarting!");
                identifyYourself("IMU failure");
                if (! initialiseIMU(&dev))
                {
                    puts("Could not initialise IMU! dying...");
                    exit(1);
                }
            }
            return currentQ;
        }
        failureCount = 0;

        /************************************************************************
         * Get Gyro data that records current angular velocity and create an
         * estimated quaternion representation of orientation using this
         * velocity, the time interval between the last calculation and the
         * previous orientation:
         * change in position = velocity * time,
         * new position = old position + change in position)
         ************************************************************************/

        // get orientation as deduced by adding gyro measured rotational
        // velocity (times time interval) to previous orientation.
        double gx1 = (double)(imuData.gyro.x_axis);
        double gy1 = (double)(imuData.gyro.y_axis);
        double gz1 = (double)(imuData.gyro.z_axis);
        double omega[3] = { gx1, gy1, gz1 }; // this will be in degrees/sec
        //double omegaMagnitude = fabs(vecLength(omega));

        uint32_t dt = imuData.ts - lastIMUData.ts; // dt in microseconds
        lastIMUData = imuData;
        myQuat_t gRot = makeQuatFromAngularVelocityTime(omega, dt/1000000.0);
        myQuat_t gyroDeducedQ =  quatMultiply(currentQ, gRot);
        quatNormalise(&gyroDeducedQ);
        if (! useGyroscopes)
        {
            makeIdentityQuat(&gyroDeducedQ);
        }

        if (! (useAccelerometers | useMagnetometers))
        {
            // return now with just the gyro data
            currentQ = gyroDeducedQ;
            lastIMUData = imuData;
            return currentQ;
        }

        // get orientation from the gyro as Roll Pitch Yaw (ie Euler angles)
        double gyroDeducedYPR[3];
        quatToEuler(gyroDeducedQ, gyroDeducedYPR);
        double currentYPR[3];
        quatToEuler(currentQ, currentYPR);


        // Calculate a roll/pich/yaw from the accelerometers and magnetometers.

        /************************************************************************
         * Get Accelerometer data that records gravity direction and create a
         * 'measured' Quaternion representation of orientation (will only be
         * pitch and roll, not yaw)
         ************************************************************************/

        // get gravity direction from accelerometer
        double ax1 = imuData.accel.x_axis / 1024.0;
        double ay1 = imuData.accel.y_axis / 1024.0;
        double az1 = imuData.accel.z_axis / 1024.0;
        double downSensor[3] = { ax1, ay1, az1 };
        if (fabs(vecLength(downSensor) - 0.98) > 0.5)
        {
            // excessive accelerometer reading, bail out
            //printf("too much accel: %f ", fabs(vecLength(downSensor)));
            //dumpVec(downSensor);
            currentQ = gyroDeducedQ;
            lastIMUData = imuData;
            return currentQ;
        }
        vecNormalise(downSensor);

        // The accelerometers can only measure pitch and roll (in the world reference
        // frame), calculate the pitch and roll values to account for
        // accelerometer readings, make yaw = 0.

        double ypr[3];
        double downX = downSensor[X_AXIS];
        double downY = downSensor[Y_AXIS];
        double downZ = downSensor[Z_AXIS];

        // add a little X into Z when calculating roll to compensate for
        // situations when pitching near vertical as Y, Z will be near 0 and
        // the results will be unstable
        double fiddledDownZ = sqrt(downZ*downZ + 0.001*downX*downX);
        if (downZ <= 0) // compensate for loss of sign when squaring Z
            fiddledDownZ *= -1.0;
        ypr[ROLL] = atan2(downY, fiddledDownZ);
        ypr[PITCH] = -atan2(downX, sqrt(downY*downY + downZ*downZ));
        ypr[YAW] = 0;  // yaw == 0, accelerometer cannot measure yaw
        if (! useAccelerometers) 
        {
            // if no accelerometers, use roll and pitch values from gyroscope
            // derived orientation
            ypr[PITCH] = gyroDeducedYPR[PITCH];
            ypr[ROLL] = gyroDeducedYPR[ROLL];
            ypr[YAW] = gyroDeducedYPR[YAW];
        }
        else if (vecLength(downSensor) == 0)
        {
            puts("weightless?");
            // something wrong! - weightless??
            ypr[PITCH] = gyroDeducedYPR[PITCH];
            ypr[ROLL] = gyroDeducedYPR[ROLL];
            ypr[YAW] = gyroDeducedYPR[YAW];
        }

        if (! useMagnetometers)
        {
            ypr[YAW] = gyroDeducedYPR[YAW];
        }
        myQuat_t accelQ = eulerToQuat(ypr);
        myQuat_t invAccelQ = quatConjugate(accelQ);


        /************************************************************************
         * Get Magnetometer data that records north direction and create a
         * 'measured' Quaternion representation of orientation (will only be
         * yaw)
         ************************************************************************/

        // get magnetometer indication of North
        double mx1 = (double)(imuData.mag.x_axis - magHardCorrection[X_AXIS]);
        double my1 = (double)(imuData.mag.y_axis - magHardCorrection[Y_AXIS]);
        double mz1 = (double)(imuData.mag.z_axis - magHardCorrection[Z_AXIS]);
        mx1 *= magSoftCorrection[X_AXIS];
        my1 *= magSoftCorrection[Y_AXIS];
        mz1 *= magSoftCorrection[Z_AXIS];
        // NB MPU9250 has different XYZ axis for magnetometers than for gyros
        // and accelerometers so juggle x,y,z here...
        double magV[3] = { my1, mx1, -mz1 };
        vecNormalise(magV);
        // make quaternion representing mag readings
        myQuat_t magQ = quatFromValues(0, magV[X_AXIS], magV[Y_AXIS], magV[Z_AXIS]);
        if (vecLength(magV) == 0)
        {
            // something wrong! - no magnetic field??
            //puts("not on earth?");
            magQ = quatFromValues(1, 0, 0, 0);
        }

        // the magnetometers can only measure yaw (in the world reference
        // frame), adjust the yaw of the roll/pitch/yaw values calculated
        // earlier to account for the magnetometer readings.
        if (useMagnetometers) 
        {
            // pitch and roll the mag direction using accelerometer info to
            // create a quaternion that represents the IMU as if it was
            // horizontal (so we can get pure yaw)
            myQuat_t horizQ = quatMultiply(accelQ, magQ);
            horizQ = quatMultiply(horizQ, invAccelQ);

            // calculate pure yaw
            ypr[YAW] = -atan2(horizQ.y, horizQ.x);
        }

        // measuredQ is the orientation as measured using the
        // accelerometer/magnetometer readings
        myQuat_t measuredQ = eulerToQuat(ypr);
        // check measured and deduced orientations are not wildly different (eg
        // due to a 360d alias flip) and adjust if problem...
        measuredQ = adjustForCongruence(measuredQ, gyroDeducedQ);

        /*************************************************************************
         * The gyro estimated orientation will be smooth and precise over short
         * time intervals but small errors will accumulate causing it to
         * drift over time.  The 'measured' orientation as obtained from the
         * magnetometer and accelerometer will be jumpy but will always average
         * around the correct orientation.
         *
         * Take the mag and accel 'measured' orientation and the gyro
         * 'estimated' orientation and create a new 'current' orientation that
         * is the estimated orientation corrected slightly towards the measured
         * orientation.  That way we get the smooth precision of the gyros but
         * eliminate the accumulation of drift errors.
         ************************************************************************/

        if (useGyroscopes)
        {
            // the new orientation is the gyro deduced position corrected
            // towards the accel/mag measured position using interpolation
            // between quaternions.
            currentQ = slerp(gyroDeducedQ, measuredQ, 0.02);
        }
        else
        {
            currentQ = measuredQ;
        }

        if (! isQuatValid(currentQ))
        {
            puts("error in quaternion, reseting orientation...");
            displayData(imuData);
            dumpQuat(currentQ);
            makeIdentityQuat(&currentQ);
        }
    }
    return currentQ;
}
geometry_msgs::PoseStamped PSMoveTemplateController::updatePose(geometry_msgs::PoseStamped object_pose, MoveServerPacket *move_server_packet, float normalized_scale)
{
    geometry_msgs::PoseStamped transformed_pose;

    /////////////////////////
    //Calculate scale
    QVector3D cam(camera_position_);
    QVector3D obj(object_pose.pose.position.x,object_pose.pose.position.y,object_pose.pose.position.z);
    double distance = (cam - obj).length();
    //ROS_ERROR("distance: %f   vfov: %f    center distance: %f", distance, camera_update_.vfov, cos(camera_update_.vfov) * distance);
    double scale = (cos(camera_update_.vfov) * distance) * 1.0 / 5.0; // distance=5m is scale 1.0

    float rotation_scale = normalized_scale;// * scale;
    float position_scale = 0.01 * normalized_scale * scale; // move position reported in mm, so scale it down (right now it's 10x real-world scale)

    /////////////////////////
    //Translation- move relative to camera for direction but still translate in world space
    // create a vector representing the offset to be applied to position.
    QVector3D position_offset((move_server_packet->state[0].handle_pos[0] - old_move_position_.x()) * position_scale,
                              (move_server_packet->state[0].handle_pos[1] - old_move_position_.y()) * position_scale,
                              (move_server_packet->state[0].handle_pos[2] - old_move_position_.z()) * position_scale);
    //rotate the offset to be relative to camera orientation, can now be applied to position to move relative to camera
    QVector3D rotated_position_offset = camera_orientation_.rotatedVector(position_offset);

    //Update x, y, and z values
    // NEED TO RESTORE THESE FOR POSITION CONTROL
    if(first_time_)
    {
        transformed_pose.pose.position.x = object_pose.pose.position.x;
        transformed_pose.pose.position.y = object_pose.pose.position.y;
        transformed_pose.pose.position.z = object_pose.pose.position.z;
    }
    else
    {
        transformed_pose.pose.position.x = object_pose.pose.position.x;// + rotated_position_offset.x();
        transformed_pose.pose.position.y = object_pose.pose.position.y;// + rotated_position_offset.y();
        transformed_pose.pose.position.z = object_pose.pose.position.z;// + rotated_position_offset.z();
    }

    /////////////////////////
    //Rotation
    // calculate the relative rotation from the old move orientation, so we only have the difference
    QQuaternion move_orientation(move_server_packet->state[0].quat[3],move_server_packet->state[0].quat[0],move_server_packet->state[0].quat[1],move_server_packet->state[0].quat[2]);
    QQuaternion move_orientation_offset = old_move_orientation_.conjugate() * move_orientation;

    // object orientation
    QQuaternion object_orientation(object_pose.pose.orientation.w,object_pose.pose.orientation.x,object_pose.pose.orientation.y,object_pose.pose.orientation.z);

    camera_orientation_.normalize();
    move_orientation_offset.normalize();
    object_orientation.normalize();

    printf("DEBUG\n");
    //printf(" cam:   %.3f, %.3f, %.3f, %.3f\n",camera_orientation_.scalar(),camera_orientation_.x(),camera_orientation_.y(),camera_orientation_.z());
    //printf(" move1: %.3f, %.3f, %.3f, %.3f\n",move_orientation_offset.scalar(),move_orientation_offset.x(),move_orientation_offset.y(),move_orientation_offset.z());
    //printf(" obj:   %.3f, %.3f, %.3f, %.3f\n",object_orientation.scalar(),object_orientation.x(),object_orientation.y(),object_orientation.z());

    float x,y,z;
    quatToEuler(camera_orientation_,y,z,x);
    printf(" cam:   %.3f, %.3f, %.3f\n",x,y,z);
    quatToEuler(move_orientation_offset,y,z,x);
    printf(" move1: %.3f, %.3f, %.3f\n",x,y,z);
    quatToEuler(object_orientation,y,z,x);
    printf(" obj:   %.3f, %.3f, %.3f\n",x,y,z);

    if(first_time_)
    {
        // now we need to apply the orientation offset relative to the camera view
//        QQuaternion camera_T_move = camera_orientation_ * move_orientation;
//        QQuaternion object_T_camera = last_object_T_move_.conjugate() * camera_orientation_;
//        QQuaternion object_T_move = object_T_camera * camera_T_move;

//        camera_T_object = camera_orientation_.conjugate() * object_orientation;
        degrees_ = 0;
    }
//    QQuaternion rotated_orientation = move_orientation * last_object_T_move_.conjugate();

    ///////
    // old stuff - not quite right, doesn't take starting pose of move into consideration
    //based on http://www.arcsynthesis.org/gltut/Positioning/Tut08%20Camera%20Relative%20Orientation.html
    //new_rotation_offset = camera_orientation^-1 * (rotation_offset * camera_orientation)
    //QQuaternion rotated_orientation_offset = camera_orientation_ * move_orientation_offset * camera_orientation_.conjugate() * object_orientation;
    QQuaternion rotated_orientation_offset = camera_orientation_ * QQuaternion::fromAxisAndAngle(QVector3D(1,0,0),1) * camera_orientation_.conjugate() * object_orientation; // making transform constant

    rotated_orientation_offset = camera_orientation_ * move_orientation * camera_orientation_.conjugate();
    quatToEuler(rotated_orientation_offset,y,z,x);
    printf(" off:   %.3f, %.3f, %.3f\n",x,y,z);

    ///////
    // based on matrices
    // calculate camera x axis
//    QVector3D camera_x_axis = camera_orientation_.rotatedVector(QVector3D(1,0,0));
//    move_orientation_offset = QQuaternion::fromAxisAndAngle(camera_x_axis,degrees_++);//camera_orientation_ * move_orientation_offset;

//    QQuaternion camera_T_new_object = move_orientation_offset * camera_T_object;
//    QQuaternion rotated_orientation_offset = camera_orientation_ * camera_T_new_object;

    ///////
    // based on vectors
    //Gets the world vector space for cameras vectors
    QVector3D camera_x = camera_orientation_.rotatedVector(QVector3D(1,0,0));
    QVector3D camera_y = camera_orientation_.rotatedVector(QVector3D(0,1,0));
    QVector3D camera_z = camera_orientation_.rotatedVector(QVector3D(0,0,1));
    printf(" camera X:   %.3f, %.3f, %.3f\n",camera_x.x(),camera_x.y(),camera_x.z());
    printf(" camera Y:   %.3f, %.3f, %.3f\n",camera_y.x(),camera_y.y(),camera_y.z());
    printf(" camera Z:   %.3f, %.3f, %.3f\n",camera_z.x(),camera_z.y(),camera_z.z());

    QVector3D move_x = move_orientation.rotatedVector(QVector3D(1,0,0));
    QVector3D move_y = move_orientation.rotatedVector(QVector3D(0,1,0));
    QVector3D move_z = move_orientation.rotatedVector(QVector3D(0,0,1));
    printf(" move X:   %.3f, %.3f, %.3f\n",move_x.x(),move_x.y(),move_x.z());
    printf(" move Y:   %.3f, %.3f, %.3f\n",move_y.x(),move_y.y(),move_y.z());
    printf(" move Z:   %.3f, %.3f, %.3f\n",move_z.x(),move_z.y(),move_z.z());


//    //Turns relative vectors from world to objects local space
//    QVector3D object_relative_y = object_orientation.conjugate().rotatedVector(camera_y);
//    QVector3D object_relative_x = object_orientation.conjugate().rotatedVector(camera_x);
//    QVector3D object_relative_z = object_orientation.conjugate().rotatedVector(camera_z);

//    QQuaternion rotated_orientation_offset = QQuaternion::fromAxisAndAngle(object_relative_z,1)
//                                           * QQuaternion::fromAxisAndAngle(object_relative_y,0)
//                                           * QQuaternion::fromAxisAndAngle(object_relative_x,0);

    rotated_orientation_offset.normalize();

    // DEBUG
//    ROS_ERROR("DEBUG");
//    float x,y,z;
//    quatToEuler(object_orientation,y,x,z);
//    ROS_ERROR(" obj: %.3f, %.3f, %.3f",x,y,z);
//    quatToEuler(move_orientation_offset,y,x,z);
//    ROS_ERROR(" off: %.3f, %.3f, %.3f",x,y,z);
//    quatToEuler(rotated_orientation_offset,y,x,z);
//    ROS_ERROR(" rot: %.3f, %.3f, %.3f",x,y,z);
    // DEBUG

    QQuaternion transformed_orientation;
    //if(first_time_)
    //    transformed_orientation = object_orientation;
    //else
        transformed_orientation = rotated_orientation_offset;

    transformed_pose.pose.orientation.w = transformed_orientation.scalar();
    transformed_pose.pose.orientation.x = transformed_orientation.x();
    transformed_pose.pose.orientation.y = transformed_orientation.y();
    transformed_pose.pose.orientation.z = transformed_orientation.z();

    object_pose_.pose.orientation.w = transformed_orientation.scalar();
    object_pose_.pose.orientation.x = transformed_orientation.x();
    object_pose_.pose.orientation.y = transformed_orientation.y();
    object_pose_.pose.orientation.z = transformed_orientation.z();

    /////////////////////////
    //Header
    transformed_pose.header = object_pose.header;
    transformed_pose.header.stamp = ros::Time::now();

    first_time_ = false;

    return transformed_pose;
}
Beispiel #4
0
static void export_alembic_camera(AlembicArchive &archive, const RenderedBuffer & renderedBuffer, bool isUseEuler)
{
	static const int cameraKey = 0xFFFFFF;
	Alembic::AbcGeom::OObject topObj(*archive.archive, Alembic::AbcGeom::kTop);

	Alembic::AbcGeom::OXform xform;
	if (archive.xform_map.find(cameraKey) != archive.xform_map.end())
	{
		xform = archive.xform_map[cameraKey];
	}
	else
	{
		xform = Alembic::AbcGeom::OXform(topObj, "camera_xform", archive.timesampling);
		archive.xform_map[cameraKey] = xform;

		Alembic::AbcGeom::OXformSchema &xformSchema = xform.getSchema();
		archive.xform_schema_map[cameraKey] = xformSchema;
	}
		
	// set camera transform
	{
		Alembic::AbcGeom::OXformSchema &xformSchema = archive.xform_schema_map[cameraKey];
		xformSchema.setTimeSampling(archive.timesampling);
		
		Alembic::AbcGeom::XformSample xformSample;

		D3DXMATRIX convertMat(
			1, 0, 0, 0,
			0, 1, 0, 0,
			0, 0, -1, 0,
			0, 0, 0, 1);

		D3DXMATRIX convertedWordInv;
		::D3DXMatrixMultiply(&convertedWordInv, &renderedBuffer.world_inv, &convertMat);
			
		D3DXVECTOR3 eye;
		{
			D3DXVECTOR3 v;
			UMGetCameraEye(&v);
			d3d_vector3_transform(eye, v,convertedWordInv);
		}
			
		D3DXVECTOR3 at;
		{
			D3DXVECTOR3 v;
			UMGetCameraAt(&v);
			d3d_vector3_transform(at, v, convertedWordInv);
		}

		D3DXVECTOR3 up;
		{
			D3DXVECTOR3 v;
			UMGetCameraUp(&v);
			d3d_vector3_dir_transform(up, v, convertedWordInv);
			::D3DXVec3Normalize(&up, &up);
		}

		Imath::V3d trans((double)eye.x, (double)eye.y, (double)(eye.z));
		xformSample.setTranslation(trans);

		D3DXMATRIX view;
		::D3DXMatrixLookAtLH(&view, &eye, &at, &up);

		Imath::M44d rot(
			-view.m[0][0], view.m[0][1], view.m[0][2], 0,
			-view.m[1][0], view.m[1][1], view.m[1][2], 0,
			view.m[2][0], -view.m[2][1], -view.m[2][2], 0,
			0, 0, 0, 1);

		Imath::Quatd quat = Imath::extractQuat(rot);
		quat.normalize();

		if (isUseEuler)
		{
			Imath::V3d imeuler;
			quatToEuler(imeuler, quat);

			//UMMat44d umrot(
			//	-view.m[0][0], view.m[0][1], view.m[0][2], 0,
			//	-view.m[1][0], view.m[1][1], view.m[1][2], 0,
			//	view.m[2][0], -view.m[2][1], -view.m[2][2], 0,
			//	0, 0, 0, 1);
			//UMVec3d umeuler = umbase::um_matrix_to_euler_xyz(umrot.transposed());
			xformSample.setXRotation(umbase::um_to_degree(imeuler.y));
			xformSample.setYRotation(umbase::um_to_degree(imeuler.x));
			xformSample.setZRotation(-umbase::um_to_degree(imeuler.z));
		}
		else
		{
			xformSample.setRotation(quat.axis(), umbase::um_to_degree(quat.angle()));
		}

		xformSchema.set(xformSample);
	}
		
	Alembic::AbcGeom::OCamera camera;
	if (archive.camera_map.find(cameraKey) != archive.camera_map.end())
	{
		camera = archive.camera_map[cameraKey];
	}
	else
	{
		camera = Alembic::AbcGeom::OCamera(xform, "camera", archive.timesampling);
		archive.camera_map[cameraKey] = camera;
			
		Alembic::AbcGeom::OCameraSchema &cameraSchema = camera.getSchema();
		archive.camera_schema_map[cameraKey] = cameraSchema;
	}

	Alembic::AbcGeom::OCameraSchema &cameraSchema = archive.camera_schema_map[cameraKey];
	cameraSchema.setTimeSampling(archive.timesampling);
	Alembic::AbcGeom::CameraSample sample;

	D3DXVECTOR4 v;
	UMGetCameraFovLH(&v);

	sample.setNearClippingPlane(v.z);
	sample.setFarClippingPlane(v.w);

	double fovy = v.x;
	double aspect = v.y;
	double fovx = 2.0 * atan(tan(fovy / 2.0)*(aspect));
	double w = BridgeParameter::instance().frame_width / 10.0;
	double h = BridgeParameter::instance().frame_height / 10.0;
	double focalLength = w / (2.0 * tan(fovx / 2.0));

	sample.setHorizontalAperture(w / 10.0);
	sample.setVerticalAperture(h / 10.0);
	sample.setFocalLength(focalLength);

	cameraSchema.set(sample);
}