示例#1
0
	//gamma is useless but I keep it for simmetry
	//with Euler_direction
	void Euler_direction2angles(Matrix1D<DOUBLE> &v0,
		DOUBLE &alpha, DOUBLE &beta)
	{
		DOUBLE abs_ca, sb, cb;
		DOUBLE aux_alpha;
		DOUBLE aux_beta;
		DOUBLE error, newerror;
		Matrix1D<DOUBLE> v_aux;
		Matrix1D<DOUBLE> v;

		//if not normalized do it so
		v.resize(3);
		v = v0;
		v.selfNormalize();

		v_aux.resize(3);
		cb = v(2);

		if (fabs((cb)) > 0.999847695)/*one degree */
		{
			std::cerr << "\nWARNING: Routine Euler_direction2angles is not reliable\n"
				"for small tilt angles. Up to 0.001 deg it should be OK\n"
				"for most applications but you never know";
		}

		if (fabs((cb - 1.)) < FLT_EPSILON)
		{
			alpha = 0.;
			beta = 0.;
		}
		else
		{/*1*/

			aux_beta = acos(cb); /* beta between 0 and PI */


			sb = sin(aux_beta);

			abs_ca = fabs(v(0)) / sb;
			if (fabs((abs_ca - 1.)) < FLT_EPSILON)
				aux_alpha = 0.;
			else
				aux_alpha = acos(abs_ca);

			v_aux(0) = sin(aux_beta) * cos(aux_alpha);
			v_aux(1) = sin(aux_beta) * sin(aux_alpha);
			v_aux(2) = cos(aux_beta);

			error = fabs(dotProduct(v, v_aux) - 1.);
			alpha = aux_alpha;
			beta = aux_beta;

			v_aux(0) = sin(aux_beta) * cos(-1. * aux_alpha);
			v_aux(1) = sin(aux_beta) * sin(-1. * aux_alpha);
			v_aux(2) = cos(aux_beta);
			newerror = fabs(dotProduct(v, v_aux) - 1.);
			if (error > newerror)
			{
				alpha = -1. * aux_alpha;
				beta = aux_beta;
				error = newerror;
			}

			v_aux(0) = sin(-aux_beta) * cos(-1. * aux_alpha);
			v_aux(1) = sin(-aux_beta) * sin(-1. * aux_alpha);
			v_aux(2) = cos(-aux_beta);
			newerror = fabs(dotProduct(v, v_aux) - 1.);
			if (error > newerror)
			{
				alpha = -1. * aux_alpha;
				beta = -1. * aux_beta;
				error = newerror;
			}

			v_aux(0) = sin(-aux_beta) * cos(aux_alpha);
			v_aux(1) = sin(-aux_beta) * sin(aux_alpha);
			v_aux(2) = cos(-aux_beta);
			newerror = fabs(dotProduct(v, v_aux) - 1.);

			if (error > newerror)
			{
				alpha = aux_alpha;
				beta = -1. * aux_beta;
				error = newerror;
			}
		}/*else 1 end*/
		beta = RAD2DEG(beta);
		alpha = RAD2DEG(alpha);
	}/*Eulerdirection2angles end*/
示例#2
0
文件: plans.c 项目: B-robur/OpenPilot
/**
 * @brief execute autocruise
 */
void plan_run_AutoCruise()
{
    PositionStateData positionState;

    PositionStateGet(&positionState);
    PathDesiredData pathDesired;
    PathDesiredGet(&pathDesired);
    FlightModeSettingsPositionHoldOffsetData offset;
    FlightModeSettingsPositionHoldOffsetGet(&offset);

    float controlVector[4];
    ManualControlCommandRollGet(&controlVector[0]);
    ManualControlCommandPitchGet(&controlVector[1]);
    ManualControlCommandYawGet(&controlVector[2]);
    controlVector[3] = 0.5f; // dummy, thrust is normalized separately
    normalizeDeadband(controlVector); // return value ignored
    ManualControlCommandThrustGet(&controlVector[3]); // no deadband as we are using thrust for velocity
    controlVector[3] = boundf(controlVector[3], 1e-6f, 1.0f); // bound to above zero, to prevent loss of vector direction

    // normalize old desired movement vector
    float vector[3] = { pathDesired.End.North - hold_position[0],
                        pathDesired.End.East - hold_position[1],
                        pathDesired.End.Down - hold_position[2] };
    float length    = sqrtf(vector[0] * vector[0] + vector[1] * vector[1] + vector[2] * vector[2]);
    if (length < 1e-9f) {
        length = 1.0f; // should not happen since initialized properly in setup()
    }
    vector[0] /= length;
    vector[1] /= length;
    vector[2] /= length;

    // start position is advanced according to actual movement - in the direction of desired vector only
    // projection using scalar product
    float kp = (positionState.North - hold_position[0]) * vector[0]
               + (positionState.East - hold_position[1]) * vector[1]
               + (positionState.Down - hold_position[2]) * vector[2];
    if (kp > 0.0f) {
        hold_position[0] += kp * vector[0];
        hold_position[1] += kp * vector[1];
        hold_position[2] += kp * vector[2];
    }

    // new angle is equal to old angle plus offset depending on yaw input and time
    // (controlVector is normalized with a deadband, change is zero within deadband)
    float angle = RAD2DEG(atan2f(vector[1], vector[0]));
    float dT    = PIOS_DELTATIME_GetAverageSeconds(&actimeval);
    angle    += 10.0f * controlVector[2] * dT; // TODO magic value could eventually end up in a to be created settings

    // resulting movement vector is scaled by velocity demand in controlvector[3] [0.0-1.0]
    vector[0] = cosf(DEG2RAD(angle)) * offset.Horizontal * controlVector[3];
    vector[1] = sinf(DEG2RAD(angle)) * offset.Horizontal * controlVector[3];
    vector[2] = -controlVector[1] * offset.Vertical * controlVector[3];

    pathDesired.End.North   = hold_position[0] + vector[0];
    pathDesired.End.East    = hold_position[1] + vector[1];
    pathDesired.End.Down    = hold_position[2] + vector[2];
    // start position has the same offset as in position hold
    pathDesired.Start.North = pathDesired.End.North + offset.Horizontal; // in FlyEndPoint the direction of this vector does not matter
    pathDesired.Start.East  = pathDesired.End.East;
    pathDesired.Start.Down  = pathDesired.End.Down;
    PathDesiredSet(&pathDesired);
}
示例#3
0
STransitionSelectionParams::STransitionSelectionParams(
	const CMovementTransitions& transitions,
	const CPlayer& player,
	const CMovementRequest& request,
	const Vec3& playerPos,
	const SMovementTransitionsSample& oldSample,
	const SMovementTransitionsSample& newSample,
	const bool bHasLockedBodyTarget,
	const Vec3& targetBodyDirection,
	const Lineseg& safeLine,
	const CTimeValue runningDuration,
	const uint8 _allowedTransitionFlags,
	const float entitySpeed2D,
	const float entitySpeed2DAvg,
	const SExactPositioningTarget*const pExactPositioningTarget,
	const EStance stance,

	SActorFrameMovementParams*const pMoveParams) : m_transitionType(eTT_None), m_transitionDistance(0.0f), m_pseudoSpeed(0.0f), m_travelAngle(0.0f), m_jukeAngle(0.0f), m_stance(stance)
{
	// TODO: check for flatness?

	m_travelAngle = Ang3::CreateRadZ( newSample.bodyDirection, oldSample.moveDirection ); // probably should be oldSample?
	m_context = request.HasContext() ? request.GetContext() : 0;

	// --------------------------------------------------------------------------
	// Calculate vToMoveTarget, vAfterMoveTarget, distToMoveTarget, distAfterMoveTarget & allowedTransitionFlags

	Vec3 vToMoveTarget, vAfterMoveTarget;
	float distToMoveTarget, distAfterMoveTarget;
	uint8 allowedTransitionFlags = _allowedTransitionFlags;

	{
		if (request.HasMoveTarget())
		{
			const Vec3& vMoveTarget = request.GetMoveTarget();
			vToMoveTarget = vMoveTarget - playerPos;
			distToMoveTarget = vToMoveTarget.GetLength2D();

			m_future.hasMoveTarget = true;
			m_future.vMoveTarget = vMoveTarget;

			// Disallow certain transitions when preparing an exact positioning target
			// and fudge the distance to make sure we don't start when too close to it
			if (pExactPositioningTarget && pExactPositioningTarget->preparing && !pExactPositioningTarget->activated)
			{
				allowedTransitionFlags &= ~BIT(eTT_Stop);
				allowedTransitionFlags &= ~BIT(eTT_DirectionChange);

				const Vec3& exactPosLocation = pExactPositioningTarget->location.t;
				const float distFromMoveTargetToExactPosSquared = vMoveTarget.GetSquaredDistance(exactPosLocation);

				const float minimumDangerDistance = 0.1f;
				const float maxDistanceTraveledPerFrame = gEnv->pTimer->GetFrameTime() * 12.5f;
				const float dangerDistance = max(minimumDangerDistance, maxDistanceTraveledPerFrame);

				const bool moveTargetIsWithinDangerDistance = (distFromMoveTargetToExactPosSquared <= sqr(dangerDistance));
				if (moveTargetIsWithinDangerDistance)
				{
					// Fudge distToMoveTarget so we start at least distanceTraveledInTwoFrames
					// This only works for eTT_Start transitions (but we disabled the others above)
					distToMoveTarget = max(0.0f, distToMoveTarget - dangerDistance);
				}
			}

			if (request.HasInflectionPoint())
			{
				const Vec3& vInflectionPoint = request.GetInflectionPoint();
				vAfterMoveTarget = vInflectionPoint - vMoveTarget;
				distAfterMoveTarget = vAfterMoveTarget.GetLength2D();
			}
			else
			{
				vAfterMoveTarget.zero();
				distAfterMoveTarget = 0.0f;
			}
		}
		else
		{
			m_future.hasMoveTarget = false;

			vToMoveTarget.zero();
			vAfterMoveTarget.zero();
			distToMoveTarget = distAfterMoveTarget = 0.0f;
		}
	}

	// --------------------------------------------------------------------------

	const float maximumSpeedForStart = 0.5f;
	const float minimumSpeedForWalkStop = 1.0f;
	const float minimumSpeedForRunStop = 3.5f;
	const float minimumRunningDurationForRunStop = 1.0f; // (seconds)

	const float minimumSpeedForJuke = 4.4f*0.6f; // 4.4 is slowest runspeed in Crysis2; 0.6 is the strafing slowdown
	const float minimumRunningDurationForJuke = 0.1f; // (seconds)

	if (newSample.pseudoSpeed > 0.0f)
	{
		// Either:
		// - we are in a Stop and want to start again <-- note oldPseudoSpeed cannot be used to detect this, it could be already 0 from prev. frame
		// - we are in a Start and want to continue starting [but possibly stop or change direction at the movetarget]
		// - we are stopped and want to Start [but possibly stop or change direction at the movetarget]
		// - we are moving and want to continue moving [but possibly stop or change direction at the movetarget]

		m_pseudoSpeed = newSample.pseudoSpeed;

		if ( (allowedTransitionFlags & (1<<eTT_Start)) && (entitySpeed2DAvg <= maximumSpeedForStart) )
		{
			//New sample's movement direction is accurate for start transitions.
			m_travelAngle = Ang3::CreateRadZ( newSample.bodyDirection, newSample.moveDirection );
			m_transitionType = eTT_Start;
			m_bPredicted = true;
			m_transitionDistance = distToMoveTarget;
			m_future.vMoveDirection = newSample.moveDirection;
			const Vec3 realTargetBodyDirection = bHasLockedBodyTarget ? targetBodyDirection : m_future.vMoveDirection;
			m_targetTravelAngle = Ang3::CreateRadZ( realTargetBodyDirection, m_future.vMoveDirection );
			m_future.qOrientation = Quat::CreateRotationVDir( realTargetBodyDirection );
			MovementTransitionsLog("[%x] Juke failed because we are trying to start", gEnv->pRenderer->GetFrameID());
		}
		else // at the moment start & stop are mutually exclusive
		{
			if (!(allowedTransitionFlags & (1<<eTT_Start)))
				MovementTransitionsLog("[%x] Start failed because current animation state (%s) doesn't support starting", gEnv->pRenderer->GetFrameID(), const_cast<CPlayer&>(player).GetAnimationGraphState() ? const_cast<CPlayer&>(player).GetAnimationGraphState()->GetCurrentStateName() : "");
			else
				MovementTransitionsLog("[%x] Start failed because speed is %f while maximum %f is allowed", gEnv->pRenderer->GetFrameID(), player.GetAnimatedCharacter()->GetEntitySpeedHorizontal(), maximumSpeedForStart);

			m_transitionType = eTT_None;

			// try immediate directionchange first
			if (allowedTransitionFlags & (1<<eTT_DirectionChange))
			{
				if (
						((oldSample.pseudoSpeed == AISPEED_RUN) || (oldSample.pseudoSpeed == AISPEED_SPRINT)) &&
						(runningDuration > minimumRunningDurationForJuke) &&
						(entitySpeed2D >= minimumSpeedForJuke)
					)
				{
					if (gEnv->pAISystem && !gEnv->pAISystem->GetSmartObjectManager()->CheckSmartObjectStates(player.GetEntity(), "Busy"))
					{
						// === IMMEDIATE DIRECTIONCHANGE ===

						// ---------------------------------
						// Assume a directionchange after moving forward for one meter (assumedDistanceToJuke=1)
						// Look up a transition math for that proposed directionchange
						// ---------------------------------
						m_pseudoSpeed = oldSample.pseudoSpeed;
						m_transitionDistance = -1;

						float assumedDistanceToJuke = 1.0f;
						CRY_ASSERT(assumedDistanceToJuke > FLT_EPSILON);

						Vec3 vToProposedMoveTarget = newSample.moveDirection * distToMoveTarget; // vector from current position to current movetarget
						Vec3 vToProposedJukePoint = oldSample.moveDirection * assumedDistanceToJuke;
						Vec3 vAfterProposedJukePoint = (vToProposedMoveTarget - vToProposedJukePoint).GetNormalizedSafe(newSample.moveDirection);

						m_jukeAngle = Ang3::CreateRadZ( vToProposedJukePoint, vAfterProposedJukePoint );
						m_transitionType = eTT_DirectionChange;
						m_bPredicted = false;
						m_future.vMoveDirection = vAfterProposedJukePoint;
						Vec3 realTargetBodyDirection = bHasLockedBodyTarget ? targetBodyDirection : vAfterProposedJukePoint;
						m_targetTravelAngle = Ang3::CreateRadZ( realTargetBodyDirection, vAfterProposedJukePoint );

						MovementTransitionsLog("[%x] Considering angle %+3.2f", gEnv->pRenderer->GetFrameID(), RAD2DEG(m_jukeAngle));

						const STransition* pTransition = NULL;
						int index = -1;
						STransitionMatch bestMatch;
						transitions.FindBestMatch(*this, &pTransition, &index, &bestMatch);

						if (pTransition)
						{
							// -------------------------------------------------------
							// We found a transition matching our guess. Adjust juke point to match the distance of the transition we found
							// -------------------------------------------------------
							float proposedTransitionDistance = (pTransition->minDistance + pTransition->maxDistance)*0.5f;
							CRY_ASSERT(proposedTransitionDistance > FLT_EPSILON);
							vToProposedJukePoint = oldSample.moveDirection * proposedTransitionDistance;
							vAfterProposedJukePoint = vToProposedMoveTarget - vToProposedJukePoint;
							float proposedDistAfterMoveTarget = vAfterProposedJukePoint.GetLength2D();
							vAfterProposedJukePoint.NormalizeSafe(newSample.moveDirection);

							if (proposedDistAfterMoveTarget >= transitions.GetMinDistanceAfterDirectionChange())
							{
								m_jukeAngle = Ang3::CreateRadZ( vToProposedJukePoint, vAfterProposedJukePoint );
								m_future.vMoveDirection = vAfterProposedJukePoint;
								realTargetBodyDirection = bHasLockedBodyTarget ? targetBodyDirection : vAfterProposedJukePoint;
								m_targetTravelAngle = Ang3::CreateRadZ( realTargetBodyDirection, vAfterProposedJukePoint );

								MovementTransitionsLog("[%x] Proposing angle %+3.2f", gEnv->pRenderer->GetFrameID(), RAD2DEG(m_jukeAngle));

								m_transitionDistance = proposedTransitionDistance;
								m_future.qOrientation = Quat::CreateRotationVDir( realTargetBodyDirection );
							}
							else
							{
								MovementTransitionsLog("[%x] Immediate Juke failed because not enough distance after the juke (distance needed = %f, max distance = %f)", gEnv->pRenderer->GetFrameID(), transitions.GetMinDistanceAfterDirectionChange(), proposedDistAfterMoveTarget);
								m_transitionType = eTT_None;
							}
						}
						else
						{
							MovementTransitionsLog("[%x] Immediate Juke failed because no animation found for this angle/stance/speed", gEnv->pRenderer->GetFrameID());
							m_transitionType = eTT_None;
						}
					}
					else
					{
						MovementTransitionsLog("[%x] Immediate Juke failed because smart object is playing", gEnv->pRenderer->GetFrameID());
					}
				}
				else
				{
					if (!((oldSample.pseudoSpeed == AISPEED_RUN) || (oldSample.pseudoSpeed == AISPEED_SPRINT)))
					{
						MovementTransitionsLog("[%x] Immediate Juke failed because current pseudospeed (%f) is not supported", gEnv->pRenderer->GetFrameID(), oldSample.pseudoSpeed);
					}
					else if (runningDuration <= minimumRunningDurationForJuke)
					{
						MovementTransitionsLog("[%x] Immediate Juke failed because running only %f seconds while more than %f is needed", gEnv->pRenderer->GetFrameID(), runningDuration.GetSeconds(), minimumRunningDurationForJuke);
					}
					else //if (entitySpeed2D < minimumSpeedForJuke)
					{
						MovementTransitionsLog("[%x] Immediate Juke failed because speed is only %f while %f is needed", gEnv->pRenderer->GetFrameID(), entitySpeed2D, minimumSpeedForJuke);
					}
				}
			}
			else
			{
				MovementTransitionsLog("[%x] Immediate Juke failed because current animation state (%s) doesn't support juking", gEnv->pRenderer->GetFrameID(), const_cast<CPlayer&>(player).GetAnimationGraphState() ? const_cast<CPlayer&>(player).GetAnimationGraphState()->GetCurrentStateName() : NULL);
			}

			if (m_transitionType == eTT_None) // directionchange wasn't found
			{
				if ((allowedTransitionFlags & (1<<eTT_Stop)) && (distAfterMoveTarget < FLT_EPSILON))
				{
					// === PREDICTED STOP ===
					// We want to stop in the future
					m_transitionType = eTT_Stop;
					m_bPredicted = true;
					m_transitionDistance = distToMoveTarget;
					m_future.vMoveDirection = vToMoveTarget.GetNormalizedSafe(newSample.moveDirection);
					m_arrivalAngle = request.HasDesiredBodyDirectionAtTarget() ? Ang3::CreateRadZ( request.GetDesiredBodyDirectionAtTarget(), m_future.vMoveDirection ) : 0.0f;
					m_future.qOrientation = request.HasDesiredBodyDirectionAtTarget() ? Quat::CreateRotationVDir(request.GetDesiredBodyDirectionAtTarget()) : Quat::CreateRotationVDir(m_future.vMoveDirection);
					MovementTransitionsLog("[%x] Predicted Juke failed because we are trying to stop", gEnv->pRenderer->GetFrameID());
				}
				else if ((allowedTransitionFlags & (1<<eTT_DirectionChange)) && (distAfterMoveTarget >= transitions.GetMinDistanceAfterDirectionChange()))
				{
					// === PREDICTED DIRECTIONCHANGE ===
					// We want to change direction in the future
					// NOTE: This logic will fail if we trigger the juke really late, because then the distToMoveTarget will be very small and the angle calculation not precise
					m_transitionType = eTT_DirectionChange;
					m_bPredicted = true;
					m_transitionDistance = distToMoveTarget;
					m_jukeAngle = Ang3::CreateRadZ( vToMoveTarget, vAfterMoveTarget );
					m_future.vMoveDirection = vAfterMoveTarget.GetNormalized();
					const Vec3 realTargetBodyDirection = bHasLockedBodyTarget ? targetBodyDirection : m_future.vMoveDirection;
					m_future.qOrientation = Quat::CreateRotationVDir( realTargetBodyDirection );
					m_targetTravelAngle = Ang3::CreateRadZ( realTargetBodyDirection, m_future.vMoveDirection );
				}
			}
		}
	}
	else // if (newSample.pseudoSpeed <= 0.0f)
	{
		// Either:
		// - we are in a Stop and want to continue stopping
		// - we are moving and suddenly want to stop
		// - we are in a Start and want to stop <-- oldPseudoSpeed logic is wrong, oldPseudoSpeed will be 0 for a while so we should use real velocity
		// - we are stopped already and just want to stay stopped
		MovementTransitionsLog("[%x] Juke failed because we are not running or trying to stop", gEnv->pRenderer->GetFrameID());
		MovementTransitionsLog("[%x] Start failed because we are not requesting movement", gEnv->pRenderer->GetFrameID());

		if (
			(( (oldSample.pseudoSpeed == AISPEED_RUN) || (oldSample.pseudoSpeed == AISPEED_SPRINT)) && (runningDuration > minimumRunningDurationForRunStop) && (entitySpeed2D >= minimumSpeedForRunStop))
			||
			((oldSample.pseudoSpeed == AISPEED_WALK) && (entitySpeed2D >= minimumSpeedForWalkStop))
		)
		{
			if (allowedTransitionFlags & (1<<eTT_Stop))
			{
				// === IMMEDIATE STOP ===
				if( gEnv->pAISystem )
				{
					ISmartObjectManager* pSmartObjectManager = gEnv->pAISystem->GetSmartObjectManager();
					if (!pSmartObjectManager || !pSmartObjectManager->CheckSmartObjectStates(player.GetEntity(), "Busy"))
					{
						// Trigger immediate stop when currently running and suddenly wanting to stop.
						//
						// NOTE: If this happens right before a forbidden area and the safeLine is not correct
						//    or the Stop transition distance isn't configured properly the AI will enter it..
						//
						m_pseudoSpeed = oldSample.pseudoSpeed;
						m_transitionDistance = -1;
						m_arrivalAngle = 0.0f;
							m_transitionType = eTT_Stop;
						m_bPredicted = false;

						const STransition* pTransition = NULL;
						int index = -1;
						STransitionMatch bestMatch;
						transitions.FindBestMatch(*this, &pTransition, &index, &bestMatch);

						float minDistanceForStop = pTransition ? pTransition->minDistance : 0.0f;

						bool bIsOnSafeLine = IsOnSafeLine(safeLine, playerPos, newSample.moveDirection, minDistanceForStop);

						if (bIsOnSafeLine)
						{
							m_transitionDistance = minDistanceForStop;
							m_future.vMoveDirection = newSample.moveDirection;
							m_future.qOrientation = Quat::CreateRotationVDir(newSample.moveDirection);

							pMoveParams->desiredVelocity = player.GetEntity()->GetWorldRotation().GetInverted() * player.GetLastRequestedVelocity();

							float maxSpeed = player.GetStanceMaxSpeed(m_stance);
							if (maxSpeed > 0.01f)
								pMoveParams->desiredVelocity /= maxSpeed;
						}
						else
						{
							m_transitionType = eTT_None;
						}
					}
				}
			}
		}
	}

	if (request.HasDesiredSpeed())
	{
		m_future.speed = request.GetDesiredTargetSpeed();
	}
}
    void PathPlanningWidget::insertRow(const tf::Transform& point_pos,const int count)
    {
      /*! Whenever we have a new Way-Point insereted either from the RViz or the RQT Widget the the TreeView needs to update the information and insert new row that corresponds to the new insered point.
          This function takes care of parsing the data recieved from the RViz or the RQT widget and creating new row with the appropriate data format and Children. One for the position giving us the current position of the Way-Point in all the axis.
          One child for the orientation giving us the Euler Angles of each axis.
      */

      ROS_INFO("inserting new row in the TreeView");
      QAbstractItemModel *model = ui_.treeView->model();

      //convert the quartenion to roll pitch yaw angle
      tf::Vector3 p = point_pos.getOrigin();
      tfScalar rx,ry,rz;
      point_pos.getBasis().getRPY(rx,ry,rz,1);

      if(count == 0)
      {
        model->insertRow(count,model->index(count, 0));

        model->setData(model->index(0,0,QModelIndex()),QVariant("add_point_button"),Qt::EditRole);
        pointRange();
      }
      else
      {
      //ROS_INFO_STREAM("Quartenion at add_row: "<<orientation.x()<<"; "<<orientation.y()<<"; "<<orientation.z()<<"; "<<orientation.w()<<";");

       if(!model->insertRow(count,model->index(count, 0)))  //&& count==0
       {
         return;
       }
      //set the strings of each axis of the position
      QString pos_x = QString::number(p.x());
      QString pos_y = QString::number(p.y());
      QString pos_z = QString::number(p.z());

      //repeat that with the orientation
      QString orient_x = QString::number(RAD2DEG(rx));
      QString orient_y = QString::number(RAD2DEG(ry));
      QString orient_z = QString::number(RAD2DEG(rz));

      model->setData(model->index(count,0),QVariant(count),Qt::EditRole);

      //add a child to the last inserted item. First add children in the treeview that
      //are just telling the user that if he expands them he can see details about the position and orientation of each point
      QModelIndex ind = model->index(count, 0);
      model->insertRows(0, 2, ind);
      QModelIndex chldind_pos = model->index(0, 0, ind);
      QModelIndex chldind_orient = model->index(1, 0, ind);
      model->setData(chldind_pos, QVariant("Position"), Qt::EditRole);
      model->setData(chldind_orient, QVariant("Orientation"), Qt::EditRole);
//*****************************Set the children for the position**********************************************************
      //now add information about each child separately. For the position we have coordinates for X,Y,Z axis.
      //therefore we add 3 rows of information
      model->insertRows(0, 3, chldind_pos);

      //next we set up the data for each of these columns. First the names
      model->setData(model->index(0, 0, chldind_pos), QVariant("X:"), Qt::EditRole);
      model->setData(model->index(1, 0, chldind_pos), QVariant("Y:"), Qt::EditRole);
      model->setData(model->index(2, 0, chldind_pos), QVariant("Z:"), Qt::EditRole);

      //second we add the current position information, for each position axis separately
      model->setData(model->index(0, 1, chldind_pos), QVariant(pos_x), Qt::EditRole);
      model->setData(model->index(1, 1, chldind_pos), QVariant(pos_y), Qt::EditRole);
      model->setData(model->index(2, 1, chldind_pos), QVariant(pos_z), Qt::EditRole);
//***************************************************************************************************************************

//*****************************Set the children for the orientation**********************************************************
      //now we repeat everything again,similar as the position for adding the children for the orientation
      model->insertRows(0, 3, chldind_orient);
      //next we set up the data for each of these columns. First the names
      model->setData(model->index(0, 0, chldind_orient), QVariant("Rx:"), Qt::EditRole);
      model->setData(model->index(1, 0, chldind_orient), QVariant("Ry:"), Qt::EditRole);
      model->setData(model->index(2, 0, chldind_orient), QVariant("Rz:"), Qt::EditRole);

      //second we add the current position information, for each position axis separately
      model->setData(model->index(0, 2, chldind_orient), QVariant(orient_x), Qt::EditRole);
      model->setData(model->index(1, 2, chldind_orient), QVariant(orient_y), Qt::EditRole);
      model->setData(model->index(2, 2, chldind_orient), QVariant(orient_z), Qt::EditRole);
//****************************************************************************************************************************
      pointRange();
    }

    }
示例#5
0
void ThresholdTwists(int num_images, ModelMap &models, 
                     std::vector<ImageData> &image_data, bool panos_only) 
{
    int *num_large_twists = new int[num_images];
    int *degree = new int[num_images];

    for (int i = 0; i < num_images; i++) {
        num_large_twists[i] = 0;
        degree[i] = 0;
    }

    for (int i = 0; i < num_images; i++) {
        ModelTable::iterator iter;
        for (iter = models.Begin(i); iter != models.End(i); iter++) {
            unsigned int j = iter->first; // iter->m_index;
            
            if (i >= j)
                continue;
            
            MatchIndex idx = GetMatchIndex(i, j);
            if (models.Contains(idx)) {
                TwoFrameModel &m = models.GetModel(idx);
                
                /* Compute the twist */
                double Rp_i[9], Rp_j[9];
                matrix_transpose(3, 3, m.m_camera0.R, Rp_i);
                matrix_transpose(3, 3, m.m_camera1.R, Rp_j);
                
                double Rp_ij[9];
                matrix_transpose_product(3, 3, 3, 3, Rp_i, Rp_j, Rp_ij);
                
                double twist_angle = GetTwist(Rp_ij);
                
                if (fabs(RAD2DEG(twist_angle)) >= 12.0) { 
                    num_large_twists[i]++;
                    num_large_twists[j]++;
                }
                
                degree[i]++;
                degree[j]++;
            }
        }
    }
    
    for (int i = 0; i < num_images; i++) {
        if (degree[i] == 0)
            continue;

        double perc_large_twists = (double) num_large_twists[i] / degree[i];
        
        int w = image_data[i].GetWidth();
        int h = image_data[i].GetHeight();
        
        double ratio = (double) w / h;

        if ((panos_only || perc_large_twists < 0.4) && 
             ratio > 0.4 && ratio < 2.5) {
            continue;
        }

        printf("[ThresholdTwists] Removing image %d with score %0.3f, %0.3f\n",
               i, perc_large_twists, ratio);

        std::list<unsigned int> nbrs = models.GetNeighbors(i);
        std::list<unsigned int>::iterator iter;
        for (iter = nbrs.begin(); iter != nbrs.end(); iter++) {
            unsigned int j = *iter; // iter->m_index;
            
            if (i < j) {            
                MatchIndex idx = GetMatchIndex(i, j);
                if (models.Contains(idx)) {
                    models.RemoveModel(idx);
                }
            } else {
                MatchIndex idx = GetMatchIndex(j, i);
                if (models.Contains(idx)) {
                    models.RemoveModel(idx);
                }
            }
        }
    }
}
ObjectPosition CNR_7DOFAnalyticInverseKinematicsComp::GetCurrentPosition()
{
	ObjectPosition objectPosition;

	if(m_joint.size() != 7)
	{
		return objectPosition;
	}

	struct link
	{
		double theta, a, d, alpha;
	};

	link lnk[7];
	//1
	lnk[0].d = 0;
	lnk[0].a = 0;
	lnk[0].alpha = -M_PI_2;
	//2
	lnk[1].d = 0;
	lnk[1].a = 0;
	lnk[1].alpha = -M_PI_2;
	//3
	lnk[2].d = atof(parameter.GetValue("UpperArmLength").c_str());	//상완길이
	lnk[2].a = 0;
	lnk[2].alpha = -M_PI_2;
	//4
	lnk[3].d = 0;
	lnk[3].a = 0;
	lnk[3].alpha = M_PI_2;
	//5
	lnk[4].d = atof(parameter.GetValue("LowerArmLength").c_str());	//하완길이
	lnk[4].a = 0;
	lnk[4].alpha = -M_PI_2;
	//6
	lnk[5].d = 0;
	lnk[5].a = 0;
	lnk[5].alpha = M_PI_2;
	//7
	lnk[6].d = atof(parameter.GetValue("ToolLength").c_str());		//그리퍼(손)길이
	lnk[6].a = 0;
	lnk[6].alpha = 0;


	MSLMatrix NowRot(3,3);
	MSLVector NowPos(3);

	MSLMatrix Rot(3,3);
	MSLVector Pos(3);

	double ct,st,ca,sa;
	int i;


	lnk[0].theta = DEG2RAD(m_joint[0]); // + lnk[0].joint_offset;
	ct = cos(lnk[0].theta);
	st = sin(lnk[0].theta);
	ca = cos(lnk[0].alpha);
	sa = sin(lnk[0].alpha);

	Rot(0,0) =  ct;
	Rot(1,0) =  st;
	Rot(2,0) =  0.0;
	Rot(0,1) = -ca*st;
	Rot(1,1) =  ca*ct;
	Rot(2,1) =  sa;
	Rot(0,2) =  sa*st;
	Rot(1,2) = -sa*ct;
	Rot(2,2) =  ca;
	Pos[0]   =  lnk[0].a * ct;
	Pos[1]   =  lnk[0].a * st;
	Pos[2]   =  lnk[0].d;


	for(i = 1 ; i < 7 ; i++)
	{	

		lnk[i].theta = DEG2RAD(m_joint[i]);//+ lnk[i].joint_offset;
		ct = cos(lnk[i].theta);
		st = sin(lnk[i].theta);
		ca = cos(lnk[i].alpha);
		sa = sin(lnk[i].alpha);

		NowRot(0,0) =  ct;
		NowRot(1,0) =  st;
		NowRot(2,0) =  0.0;
		NowRot(0,1) = -ca*st;
		NowRot(1,1) =  ca*ct;
		NowRot(2,1) =  sa;
		NowRot(0,2) =  sa*st;
		NowRot(1,2) = -sa*ct;
		NowRot(2,2) =  ca;
		NowPos[0]    =  lnk[i].a * ct;
		NowPos[1]    =  lnk[i].a * st;
		NowPos[2]    =  lnk[i].d;

		Pos = Pos + Rot*NowPos;
		Rot = Rot*NowRot;
	}


	objectPosition.x = Pos[0];
	objectPosition.y = Pos[1];
	objectPosition.z = Pos[2];


	//오리엔테이션에서 오일러ZYX(Z회전->Y회전->X회전) 회전의 각도를 뽑아냄
	if (Rot(2,0)==1) {
		objectPosition.roll = RAD2DEG(atan2(-Rot(0,1),-Rot(0,2)));	//alpha(x회전Roll각도)
		objectPosition.pitch = -90.;	//-PI/2;						//betta(y회전Pitch각도)
		objectPosition.yaw = 0.0;						//gamma(Z회전Yaw각도)
	} else if (Rot(2,0)==-1) {
		objectPosition.roll  = RAD2DEG(atan2(Rot(0,1),Rot(0,2)));
		objectPosition.pitch = 90.;	//PI/2;
		objectPosition.yaw = 0.0;
	} else {
		objectPosition.roll = RAD2DEG(atan2(Rot(2,1), Rot(2,2)));
		objectPosition.pitch = RAD2DEG(atan2(-Rot(2,0), sqrt(Rot(0,0)*Rot(0,0) + Rot(1,0)*Rot(1,0))));
		objectPosition.yaw = RAD2DEG(atan2(Rot(1,0), Rot(0,0)));
	}

	return objectPosition;
}
示例#7
0
// Collision response for walls.
// TODO: Separate common code with car-car collision response.
static void SimCarWallCollideResponse(void *clientdata, DtObjectRef obj1, DtObjectRef obj2, const DtCollData *collData)
{
	tCar* car;		// The car colliding with the wall.
	float nsign;	// Normal direction correction for collision plane.
	sgVec2 p;		// Cars collision point delivered by solid.

	// TODO: If other movable objects are added which could collide with the wall, it will be
	// necessary to validate if the object is actually a car.

	if (obj1 == clientdata) {
		car = (tCar*) obj2;
		nsign = -1.0f;
		p[0] = (float) collData->point2[0];
		p[1] = (float) collData->point2[1];
	} else {
		car = (tCar*) obj1;
		nsign = 1.0f;
		p[0] = (float) collData->point1[0];
		p[1] = (float) collData->point1[1];
	}

	sgVec2 n;		// Collision normal delivered by solid, corrected such that it points away from the wall.
	n[0] = nsign * (float) collData->normal[0];
	n[1] = nsign * (float) collData->normal[1];
	float pdist = sgLengthVec2(n);	// Distance of collision points.
	sgNormaliseVec2(n);

	sgVec2 r;
	sgSubVec2(r, p, (const float*)&(car->statGC));

	tCarElt *carElt = car->carElt;

	sgVec2 vp;		// Speed of car collision point in global frame of reference.
	sgVec2 rg;		// raduis oriented in global coordinates, still relative to CG (rotated aroung CG).

	float sina = sin(carElt->_yaw);
	float cosa = cos(carElt->_yaw);
	rg[0] = r[0]*cosa - r[1]*sina;
	rg[1] = r[0]*sina + r[1]*cosa;

	vp[0] = car->DynGCg.vel.x - car->DynGCg.vel.az * rg[1];
	vp[1] = car->DynGCg.vel.y + car->DynGCg.vel.az * rg[0];

	sgVec2 tmpv;
	static const float CAR_MIN_MOVEMENT = 0.02f;
	static const float CAR_MAX_MOVEMENT = 0.05f;
	sgScaleVec2(tmpv, n, MIN(MAX(pdist, CAR_MIN_MOVEMENT), CAR_MAX_MOVEMENT));
	if (car->blocked == 0) {
		sgAddVec2((float*)&(car->DynGCg.pos), tmpv);
		car->blocked = 1;
    }

	// Doing no dammage and correction if the cars are moving out of each other.
	if (sgScalarProductVec2(vp, n) > 0) {
		return;
	}

	float rp = sgScalarProductVec2(rg, n);

	// Pesudo cross product to find out if we are left or right.
	// TODO: SIGN, scrap value?
	float rpsign = n[0]*rg[1] - n[1]*rg[0];

	const float e = 1.0f;	// energy restitution
	float j = -(1.0f + e) * sgScalarProductVec2(vp, n) / (car->Minv + rp * rp * car->Iinv.z);
	const float ROT_K = 0.5f;

	// Damage.
	tdble damFactor, atmp;
	atmp = atan2(r[1], r[0]);
	if (fabs(atmp) < (PI / 3.0)) {
		// Front collision gives more damage.
		damFactor = 1.5f;
	} else {
		// Rear collision gives less damage.
		damFactor = 1.0f;
	}

	static const float DMGFACTOR = 0.00002f;
	if ((car->carElt->_state & RM_CAR_STATE_FINISH) == 0) {
		car->dammage += (int)(CAR_DAMMAGE * (DMGFACTOR*j*j) * damFactor * simDammageFactor[car->carElt->_skillLevel]);
	}

	sgScaleVec2(tmpv, n, j * car->Minv);
	sgVec2 v2a;

	if (car->collision & SEM_COLLISION_CAR) {
		sgAddVec2(v2a, (const float*)&(car->VelColl.x), tmpv);
		car->VelColl.az = car->VelColl.az + j * rp * rpsign * car->Iinv.z * ROT_K;
	} else {
		sgAddVec2(v2a, (const float*)&(car->DynGCg.vel), tmpv);
		car->VelColl.az = car->DynGCg.vel.az + j * rp * rpsign * car->Iinv.z * ROT_K;
	}

	static float VELMAX = 3.0f;
	if (fabs(car->VelColl.az) > VELMAX) {
		car->VelColl.az = SIGN(car->VelColl.az) * VELMAX;
	}

	sgCopyVec2((float*)&(car->VelColl.x), v2a);

	// Move the car for the collision lib.
	sgMakeCoordMat4(carElt->pub.posMat, car->DynGCg.pos.x, car->DynGCg.pos.y,
					car->DynGCg.pos.z - carElt->_statGC_z, RAD2DEG(carElt->_yaw),
					RAD2DEG(carElt->_roll), RAD2DEG(carElt->_pitch));
	dtSelectObject(car);
	dtLoadIdentity();
	dtTranslate(-carElt->_statGC_x, -carElt->_statGC_y, 0.0f);
	dtMultMatrixf((const float *)(carElt->_posMat));

	car->collision |= SEM_COLLISION_CAR;
}
void CCameraOffsetMgr::Update()
{
	// Testing only...
	ProcessTestingVars();
	// End Testing only...


	// Reset offsets...

	m_vPitchYawRollDelta.Init();
	m_vPosDelta.Init();

    float fTimeDelta = g_pGameClientShell->GetFrameTime();

    int i;
    for (i=0; i < MAX_CAMERA_DELTAS; i++)
	{
		if (m_CameraDeltas[i].GetTotalDelta() > 0.0f)
		{
			m_CameraDeltas[i].Pitch.UpdateVar(fTimeDelta);
			m_vPitchYawRollDelta.x += m_CameraDeltas[i].Pitch.GetValue();

			m_CameraDeltas[i].Yaw.UpdateVar(fTimeDelta);
			m_vPitchYawRollDelta.y += m_CameraDeltas[i].Yaw.GetValue();

			m_CameraDeltas[i].Roll.UpdateVar(fTimeDelta);
			m_vPitchYawRollDelta.z += m_CameraDeltas[i].Roll.GetValue();

			m_CameraDeltas[i].PosX.UpdateVar(fTimeDelta);
			m_vPosDelta.x += m_CameraDeltas[i].PosX.GetValue();

			m_CameraDeltas[i].PosY.UpdateVar(fTimeDelta);
			m_vPosDelta.y += m_CameraDeltas[i].PosY.GetValue();

			m_CameraDeltas[i].PosZ.UpdateVar(fTimeDelta);
			m_vPosDelta.z += m_CameraDeltas[i].PosZ.GetValue();
		}
	}

	for (i=0; i < MAX_STATIC_CAMERA_DELTAS; i++)
	{
		if (m_StaticCameraDeltas[i].GetTotalDelta() > 0.0f)
		{
			m_StaticCameraDeltas[i].Pitch.UpdateVar(fTimeDelta);
			m_vPitchYawRollDelta.x += m_StaticCameraDeltas[i].Pitch.GetValue();

			m_StaticCameraDeltas[i].Yaw.UpdateVar(fTimeDelta);
			m_vPitchYawRollDelta.y += m_StaticCameraDeltas[i].Yaw.GetValue();

			m_StaticCameraDeltas[i].Roll.UpdateVar(fTimeDelta);
			m_vPitchYawRollDelta.z += m_StaticCameraDeltas[i].Roll.GetValue();

			m_StaticCameraDeltas[i].PosX.UpdateVar(fTimeDelta);
			m_vPosDelta.x += m_StaticCameraDeltas[i].PosX.GetValue();

			m_StaticCameraDeltas[i].PosY.UpdateVar(fTimeDelta);
			m_vPosDelta.y += m_StaticCameraDeltas[i].PosY.GetValue();

			m_StaticCameraDeltas[i].PosZ.UpdateVar(fTimeDelta);
			m_vPosDelta.z += m_StaticCameraDeltas[i].PosZ.GetValue();
		}
	}

	ValidateDeltas();


	// Print out our current values...

	if (g_vtCamInfo.GetFloat())
	{
		if (m_vPitchYawRollDelta.x != 0.0f)
			g_pLTClient->CPrint("COM Pitch = %.4f (in Deg = %.2f)", m_vPitchYawRollDelta.x, RAD2DEG(m_vPitchYawRollDelta.x));
		if (m_vPitchYawRollDelta.y != 0.0f)
			g_pLTClient->CPrint("COM Yaw   = %.4f (in Deg = %.2f)", m_vPitchYawRollDelta.y, RAD2DEG(m_vPitchYawRollDelta.y));
		if (m_vPitchYawRollDelta.z != 0.0f)
			g_pLTClient->CPrint("COM Roll  = %.4f (in Deg = %.2f)", m_vPitchYawRollDelta.z, RAD2DEG(m_vPitchYawRollDelta.z));

		if (m_vPosDelta.x != 0.0f)
			g_pLTClient->CPrint("COM Offset X = %.2f", m_vPosDelta.x);
		if (m_vPosDelta.y != 0.0f)
			g_pLTClient->CPrint("COM Offset Y = %.2f", m_vPosDelta.y);
		if (m_vPosDelta.z != 0.0f)
			g_pLTClient->CPrint("COM Offset Z = %.2f", m_vPosDelta.z);
	}
}
示例#9
0
void OVR_CalculateState(vr_param_t *state)
{
	vr_param_t ovrState;
	float ovrScale = vr_ovr_supersample->value;
	int eye = 0;

	for (eye = 0; eye < 2; eye++) {
		ovrDistortionMesh meshData;
		ovr_vert_t *mesh = NULL;
		ovr_vert_t *v = NULL;
		ovrDistortionVertex *ov = NULL;
		unsigned int i = 0;
		float vignette_factor;
		if (vr_ovr_maxfov->value)
		{
			renderInfo[eye].eyeFov = hmd->MaxEyeFov[eye];
		} else
		{
			renderInfo[eye].eyeFov = hmd->DefaultEyeFov[eye];
		}

		ovrState.eyeFBO[eye] = &renderInfo[eye].eyeFBO;

		ovrState.renderParams[eye].projection.x.scale = 2.0f / ( renderInfo[eye].eyeFov.LeftTan + renderInfo[eye].eyeFov.RightTan );
		ovrState.renderParams[eye].projection.x.offset = ( renderInfo[eye].eyeFov.LeftTan - renderInfo[eye].eyeFov.RightTan ) * ovrState.renderParams[eye].projection.x.scale * 0.5f;
		ovrState.renderParams[eye].projection.y.scale = 2.0f / ( renderInfo[eye].eyeFov.UpTan + renderInfo[eye].eyeFov.DownTan );
		ovrState.renderParams[eye].projection.y.offset = ( renderInfo[eye].eyeFov.UpTan - renderInfo[eye].eyeFov.DownTan ) * ovrState.renderParams[eye].projection.y.scale * 0.5f;

		// set up rendering info
		eyeDesc[eye] = ovrHmd_GetRenderDesc(hmd,(ovrEyeType) eye,renderInfo[eye].eyeFov);

		VectorSet(ovrState.renderParams[eye].viewOffset,
			-eyeDesc[eye].HmdToEyeViewOffset.x,
			eyeDesc[eye].HmdToEyeViewOffset.y,
			eyeDesc[eye].HmdToEyeViewOffset.z);

		ovrHmd_CreateDistortionMesh(hmd, eyeDesc[eye].Eye, eyeDesc[eye].Fov, ovrDistortionCap_Chromatic | ovrDistortionCap_SRGB | ovrDistortionCap_TimeWarp | ovrDistortionCap_Vignette, &meshData);

		mesh = (ovr_vert_t *) Z_TagMalloc(sizeof(ovr_vert_t) * meshData.VertexCount, TAG_RENDERER);
		v = mesh;
		ov = meshData.pVertexData; 
		for (i = 0; i < meshData.VertexCount; i++)
		{

			// DK2 display not rotated - rotate the coordinates manually
			if (vid.width < vid.height) {
				v->pos.x = -ov->ScreenPosNDC.y;
				v->pos.y = ov->ScreenPosNDC.x;
			} else {
				v->pos.x = ov->ScreenPosNDC.x;
				v->pos.y = ov->ScreenPosNDC.y;
			}

			v->texR = (*(ovrVector2f*)&ov->TanEyeAnglesR); 
			v->texG = (*(ovrVector2f*)&ov->TanEyeAnglesG);
			v->texB = (*(ovrVector2f*)&ov->TanEyeAnglesB); 
			vignette_factor = ov->VignetteFactor;
			if (vignette_factor < 0) vignette_factor = 0;
			v->color[0] = v->color[1] = v->color[2] = (GLubyte)(vignette_factor  * 255.99f);
			v->color[3] = (GLubyte)( ov->TimeWarpFactor * 255.99f );
			v++; ov++;
		}

		R_BindIVBO(&renderInfo[eye].eye,NULL,0);
		R_VertexData(&renderInfo[eye].eye,sizeof(ovr_vert_t) * meshData.VertexCount, mesh);
		R_IndexData(&renderInfo[eye].eye,GL_TRIANGLES,GL_UNSIGNED_SHORT,meshData.IndexCount,sizeof(uint16_t) * meshData.IndexCount,meshData.pIndexData);
		R_ReleaseIVBO();
		Z_Free(mesh);
		ovrHmd_DestroyDistortionMesh( &meshData );
	}
	{
		// calculate this to give the engine a rough idea of the fov
		float combinedTanHalfFovHorizontal = max ( max ( renderInfo[0].eyeFov.LeftTan, renderInfo[0].eyeFov.RightTan ), max ( renderInfo[1].eyeFov.LeftTan, renderInfo[1].eyeFov.RightTan ) );
		float combinedTanHalfFovVertical = max ( max ( renderInfo[0].eyeFov.UpTan, renderInfo[0].eyeFov.DownTan ), max ( renderInfo[1].eyeFov.UpTan, renderInfo[1].eyeFov.DownTan ) );
		float horizontalFullFovInRadians = 2.0f * atanf ( combinedTanHalfFovHorizontal ); 
		float fovX = RAD2DEG(horizontalFullFovInRadians);
		float fovY = RAD2DEG(2.0 * atanf(combinedTanHalfFovVertical));
		ovrState.aspect = combinedTanHalfFovHorizontal / combinedTanHalfFovVertical;
		ovrState.viewFovY = fovY;
		ovrState.viewFovX = fovX;
		ovrState.pixelScale = ovrScale * vid.width / (float) hmd->Resolution.w;
	}

	*state = ovrState;
}
示例#10
0
//-----------------------------------------------------------------------------
bool CAI_PlaneSolver::GenerateCircleObstacleSuggestions( const AILocalMoveGoal_t &moveGoal, float probeDist )
{
	bool result = false;
	Vector npcLoc = m_pNpc->WorldSpaceCenter();
	Vector mins, maxs;

	m_pNpc->CollisionProp()->WorldSpaceSurroundingBounds( &mins, &maxs );
	float radiusNpc = (mins.AsVector2D() - maxs.AsVector2D()).Length() * 0.5;
	
	for ( int i = 0; i < m_Obstacles.Count(); i++ )
	{
		CBaseEntity *pObstacleEntity = NULL;

		float zDistTooFar;
		if ( m_Obstacles[i].hEntity && m_Obstacles[i].hEntity->CollisionProp() )
		{
			pObstacleEntity = m_Obstacles[i].hEntity.Get();

			if( pObstacleEntity == moveGoal.pMoveTarget && (pObstacleEntity->IsNPC() || pObstacleEntity->IsPlayer()) )
			{
				// HEY! I'm trying to avoid the very thing I'm trying to get to. This will make we wobble like a drunk as I approach. Don't do it.
				continue;
			}

			pObstacleEntity->CollisionProp()->WorldSpaceSurroundingBounds( &mins, &maxs );
			zDistTooFar = ( maxs.z - mins.z ) * 0.5 + GetNpc()->GetHullHeight() * 0.5;
		}
		else
			zDistTooFar = GetNpc()->GetHullHeight();
			
		if ( fabs( m_Obstacles[i].center.z - npcLoc.z ) > zDistTooFar )
			continue;

		Vector vecToNpc 		= npcLoc - m_Obstacles[i].center;
		vecToNpc.z = 0;
		float distToObstacleSq 	= sq(vecToNpc.x) + sq(vecToNpc.y);
		float radius = m_Obstacles[i].radius + radiusNpc;

		if ( distToObstacleSq > 0.001 && distToObstacleSq < sq( radius + probeDist ) )
		{
			Vector vecToObstacle = vecToNpc * -1;
			float distToObstacle = VectorNormalize( vecToObstacle );
			float weight;
			float arc;
			float radiusSq = sq(radius);

			float flDot = DotProduct( vecToObstacle, moveGoal.dir );

			// Don't steer around to avoid obstacles we've already passed, unless we're right up against them.
			// That is, do this computation without the probeDist added in.
			if( flDot < 0.0f && distToObstacleSq > radiusSq )
			{
				continue;
			}

			if ( radiusSq < distToObstacleSq )
			{
				Vector vecTangent;
				float distToTangent = FastSqrt( distToObstacleSq - radiusSq );

				float oneOverDistToObstacleSq = 1 / distToObstacleSq;

				vecTangent.x = ( -distToTangent * vecToNpc.x + radius * vecToNpc.y ) * oneOverDistToObstacleSq;
				vecTangent.y = ( -distToTangent * vecToNpc.y - radius * vecToNpc.x ) * oneOverDistToObstacleSq;
				vecTangent.z = 0;

				float cosHalfArc = vecToObstacle.Dot( vecTangent );
				arc = RAD2DEG(acosf( cosHalfArc )) * 2.0;
				weight = 1.0 - (distToObstacle - radius) / probeDist;
				if ( weight > 0.75 )
					arc += (arc * 0.5) * (weight - 0.75) / 0.25;
				
				Assert( weight >= 0.0 && weight <= 1.0 );

#if DEBUG_OBSTACLES
				// -------------------------
				Msg( "Adding arc %f, w %f\n", arc, weight );

				Vector pointTangent = npcLoc + ( vecTangent * distToTangent );
					
				NDebugOverlay::Line( npcLoc - Vector( 0, 0, 64 ), npcLoc + Vector(0,0,64), 0,255,0, false, 0.1 );
				NDebugOverlay::Line( center - Vector( 0, 0, 64 ), center + Vector(0,0,64), 0,255,0, false, 0.1 );
				NDebugOverlay::Line( pointTangent - Vector( 0, 0, 64 ), pointTangent + Vector(0,0,64), 0,255,0, false, 0.1 );
				
				NDebugOverlay::Line( npcLoc + Vector(0,0,64), center + Vector(0,0,64), 0,0,255, false, 0.1 );
				NDebugOverlay::Line( center + Vector(0,0,64), pointTangent + Vector(0,0,64), 0,0,255, false, 0.1 );
				NDebugOverlay::Line( pointTangent + Vector(0,0,64), npcLoc + Vector(0,0,64), 0,0,255, false, 0.1 );
#endif
			}
			else
			{
				arc = 210;
				weight = 1.0;
			}

			if ( m_Obstacles[i].hEntity != NULL )
			{
				weight = AdjustRegulationWeight( m_Obstacles[i].hEntity, weight );
			}
			
			AI_MoveSuggestion_t suggestion( m_Obstacles[i].type, weight, UTIL_VecToYaw(vecToObstacle), arc );
			m_Solver.AddRegulation( suggestion );
			result = true;
		}
	}
	
	m_Obstacles.RemoveAll();
	return result;

}
//TODO: Override it from the base class
bool calculateDirection(const sensor_msgs::LaserScan::ConstPtr& scan, float& finalDirection) {
	float nearestDistance = MAX_DIST;

	//zapamiętaj pozycję w tabeli, do której odnosi się najlepszy pomiar
	int nearestPosition;
	
	float direction;
	
	//liczba pomiarów
	int count = scan->scan_time / scan->time_increment;

	
	//calculate nearest direction
	for(int i = 0; i < count; i++) {
		float distance = scan->ranges[i];
		if ((distance < nearestDistance) && (distance > MIN_DIST)) {
			nearestDistance = distance;
			direction = scan->angle_min + scan->angle_increment * i;
			nearestPosition = i;
		}
	}
	
	int pi_8 = count / 8;  // 1/8 pełnego kąta - 45 stopni
	
	//calculating direction for avoiding an obstacle
	if (nearestDistance < MAX_DIST) {
		float distanceLeft;
		float distanceRight;
		
		//jeżeli żadna wartość nie przekroczy SAFE MOVE to wybierz best Position
		float bestDirection;
		
		// if any direction is better it will move in that direction
		float bestDistance = nearestDistance;
		
		//all directions except +/- 180 degrees 
		for (int i = 1; i < 4; i++) { 
			int positionLeft = nearestPosition - i * pi_8;
			if(positionLeft < 0) 
				positionLeft = nearestPosition + (8-i)*pi_8;
			
			distanceLeft = scan->ranges[positionLeft];
			
			int positionRight = nearestPosition + i * pi_8;
			if(positionRight >= count)
				positionRight = nearestPosition - (8-i)*pi_8;
				
			distanceRight = scan->ranges[positionRight];
			
			//this is the best choice and return from loop
			// if distance in this direction is greater than safe move
			if (distanceLeft >= nearestDistance + SAFE_MOVE || distanceRight >= nearestDistance + SAFE_MOVE) {
				if (distanceLeft > distanceRight) {
					finalDirection = scan->angle_min + scan->angle_increment * positionLeft;
				}
				else {
					finalDirection = scan->angle_min + scan->angle_increment * positionRight;
				}
				ROS_INFO("move in direction %f, nearest point %f", RAD2DEG(finalDirection), nearestDistance);
				return true;
			}
			else {
				if (distanceLeft > bestDistance) {
					bestDistance = distanceLeft;
					bestDirection =  scan->angle_min + scan->angle_increment * positionLeft;
				}
				if (distanceRight > bestDistance) {
					bestDistance = distanceRight;
					bestDirection = scan->angle_min + scan->angle_increment * positionRight;
				}
			} 
		}
		
		float distance;
		float position;
		position = nearestPosition - 4 * pi_8;
		if(position < 0)
			position = nearestPosition + 4 * pi_8;
			
		distance = scan->ranges[position];
		if (distance > bestDistance) {
			bestDistance = distance;
			bestDirection = scan->angle_min + scan->angle_increment * position;
		}
		
		finalDirection = bestDirection;
		
		ROS_INFO("move in direction %f, nearest point %f", RAD2DEG(finalDirection), nearestDistance);
		return true;
	}
	else {
		ROS_INFO("stable");
		return false;
	}
}
示例#12
0
static void prvComputeTask(void* pvParameters)
{
  xCANMsg msg;
  accelero_values_t accelero_values;
  double gyroscope_y = 0;
  double accelero_x = 0;
  double accelero_z = 0;
  portTickType time = xTaskGetTickCount();
  int i = 0;
  static char buf[80];
  static double previous_angle = 0;
  double d = 0;
  int iBtMsgCountdown = 10;

  uart_puts("\r\nSensorboard started...\r\n");
  uart_puts("Processor UID: ");
  uart_puts(itoa(UID, buf));
  uart_puts("\r\n");
  uart_puts("Version: ");
  uart_puts(version);
  uart_puts("\r\n");

  for (;;)
    {
      if (i++ == 100)
        {
          vLEDToggle(LED_YELLOW);
          i = 0;
        }

      if (stopped)
        vTaskDelay(portMAX_DELAY);

      vTaskDelayUntil(&time, 5 / portTICK_RATE_MS);

      accelero_get_values(&accelero_values);

      gyroscope_y = (double)gyroscope_get_value(Y) * PI / 180 / GYROSCOPE_SENSITIVITY;
      accelero_x  = (double)accelero_values.x  / ACCELERO_SENSITIVITY_TYP_6G;
      accelero_z  = (double)accelero_values.z  / ACCELERO_SENSITIVITY_TYP_6G;

      DBGLOG(bDebugKalman, itoa(atan2(accelero_x, -accelero_z) * 1000, buf));
      DBGLOG(bDebugKalman, " ");
      DBGLOG(bDebugKalman, itoa(gyroscope_y * 1000, buf));
      DBGLOG(bDebugKalman, " ");

      xSemaphoreTake(xVMutex, portMAX_DELAY);
      kalman_state_update(gyroscope_y, 0.005);
      xSemaphoreGive(xVMutex);

      kalman_cov_update(accelero_x, accelero_z);

      DBGLOG(bDebugKalman, itoa(kalman_get_angle() * 1000, buf));
      DBGLOG(bDebugKalman, " ");
      DBGLOG(bDebugKalman, itoa(kalman_get_rate() * 1000, buf));
      DBGLOG(bDebugKalman, " ");
      d = dAlpha * d + (1 - dAlpha) * kalman_get_rate();
      DBGLOG(bDebugKalman, itoa(d * 1000, buf));
      DBGLOG(bDebugKalman, "\r\n");
      previous_angle = kalman_get_angle();

      msg.eID = MSG_ANGLE_ANGVEL;
      msg.xData.values.first = kalman_get_angle();
      msg.xData.values.second = d;

      vCANSendMsg(&msg);

      if (bBtInited && !(iBtMsgCountdown--)) {
        int xParameters[1] = {-RAD2DEG(kalman_get_angle())};

        prvBtSendMsg('a', xParameters, 1);

        iBtMsgCountdown = 10;
      }
    }
}
示例#13
0
/* Add new points to the bundle adjustment */
int BundlerApp::BundleAdjustAddNewPoints(int camera_idx, 
                                         int num_points, int num_cameras,
                                         int *added_order,
                                         camera_params_t *cameras,
                                         v3_t *points, v3_t *colors,
                                         double reference_baseline,
                                         std::vector<ImageKeyVector> &pt_views)
{
    int pt_count = num_points;

    int image_idx = added_order[camera_idx];

    /* Recompute the locations of the new points given the initial
     * pose estimate */
    for (int i = 0; i < num_cameras; i++) {
	int other = added_order[i];

	if (other == image_idx)
	    continue;

	int first = MIN(image_idx, other);
	int second = MAX(image_idx, other);

	MatchIndex idx = GetMatchIndex(first, second);

        SetMatchesFromTracks(first, second);

	printf("  Matches[%d,%d] = %d\n", image_idx, other,
               (int) m_matches.GetNumMatches(idx));
	       // (int) m_match_lists[idx].size());

	double disti = GetCameraDistance(cameras + i, cameras + camera_idx);

	printf("  dist0, disti = %0.3e, %0.3e\n", reference_baseline, disti);

	if (disti < m_min_camera_distance_ratio * reference_baseline) {
	    printf("  Distance too low (possible panorama?)\n");
            // m_match_lists[idx].clear();
            m_matches.ClearMatch(idx);
	    continue;
	}

        std::vector<KeypointMatch> &list = m_matches.GetMatchList(idx);
	for (int j = 0; j < (int) list.size(); j++) {
	    int idx1 = list[j].m_idx1;
	    int idx2 = list[j].m_idx2;

	    int this_idx, other_idx;
	    if (image_idx == first) {
		this_idx = idx1;
		other_idx = idx2;
	    } else {
		other_idx = idx1;
		this_idx = idx2;
	    }
		
	    if (GetKey(other,other_idx).m_extra == -2) {
		/* The other key was already marked as an outlier */
		continue;
	    } else if (GetKey(image_idx,this_idx).m_extra == -2) {
		/* This key was already marked as an outlier */
		continue;
	    }

	    if (GetKey(other,other_idx).m_extra == -1 &&
		GetKey(image_idx,this_idx).m_extra >= 0) {  

		/**** Connecting an existing point *** */


		/* Connect up the other point to this one */
		int pt_idx = GetKey(image_idx,this_idx).m_extra;

		/* Check reprojection error */	    
		v2_t pr = sfm_project_final(cameras + i, points[pt_idx], 
					    true, m_estimate_distortion);

		double dx = GetKey(other,other_idx).m_x - Vx(pr);
		double dy = GetKey(other,other_idx).m_y - Vy(pr);
		    
		double proj_error = sqrt(dx * dx + dy * dy);

		if (proj_error >= 32.0) {
		    printf("  Would have connected existing match "
			   "%d ==> %d [%d] (cam: %d), \n"
			   "    but reprojection error (%0.3f) "
			   "is too high.\n", 
			   this_idx, other_idx, pt_idx, other, proj_error);
		} else {
		    printf("  Connecting existing match "
			   "%d ==> %d [%d] (cam: %d) [%0.3f]\n",
			   this_idx, other_idx, pt_idx, other, proj_error);
		    
		    GetKey(other,other_idx).m_extra = pt_idx;
		    pt_views[pt_idx].push_back(ImageKey(i, other_idx));
		}
	    } else if (GetKey(other,other_idx).m_extra == -1) {

		if (GetKey(image_idx,this_idx).m_extra != -1) {
		    printf("Error!  Key (%d,%d) shouldn't be seen yet!\n",
			   image_idx, this_idx);
		    printf("Point index is %d\n", 
			   GetKey(image_idx,this_idx).m_extra);
		}

		/* This is a new point */
		GetKey(other,other_idx).m_extra = pt_count;
		GetKey(image_idx,this_idx).m_extra = pt_count;

		/* Set up the 3D point */
		v2_t p = v2_new(GetKey(other,other_idx).m_x,
				GetKey(other,other_idx).m_y);
		    
		v2_t q = v2_new(GetKey(image_idx,this_idx).m_x,
				GetKey(image_idx,this_idx).m_y);

                if (m_optimize_for_fisheye) {
                    double p_x = Vx(p), p_y = Vy(p);
                    double q_x = Vx(q), q_y = Vy(q);
                    
                    m_image_data[other].
                        UndistortPoint(p_x, p_y, Vx(p), Vy(p));
                    m_image_data[image_idx].
                        UndistortPoint(q_x, q_y, Vx(q), Vy(q));
                }

		double proj_error = 0.0;
		bool in_front = false;
		double angle = 0.0;

		points[pt_count] = 
		    Triangulate(p, q, cameras[i], cameras[camera_idx], 
				proj_error, in_front, angle, true);


		/* Check that the angle between the rays is large
		 * enough */
		if (RAD2DEG(angle) < m_ray_angle_threshold) {
		    printf(" Ray angle %d => %d is too small (%0.3f)\n", 
			   this_idx, other_idx, RAD2DEG(angle));

		    /* Remove point */
		    GetKey(other,other_idx).m_extra = -1;
		    GetKey(image_idx,this_idx).m_extra = -1;

		    continue;
		}

		/* Check the reprojection error */
		if (proj_error >= ADD_REPROJECTION_ERROR) {
		    printf("  Projection error for %d => %d is %0.3e, "
			   "removing\n",
			   this_idx, other_idx, proj_error);

		    /* Remove point */
		    GetKey(other,other_idx).m_extra = -2;
		    GetKey(image_idx,this_idx).m_extra = -2;

		    continue;
		}

		/* Check cheirality */
		if (!in_front) {
		    printf("  Cheirality violated!\n");

		    /* Remove point */
		    GetKey(other,other_idx).m_extra = -2;
		    GetKey(image_idx,this_idx).m_extra = -2;

		    continue;
		}

		printf("  Adding match %d ==> %d [%d] (cam: %d ==> %d) "
		       "[%0.3f, %0.3f]\n", 
		       other_idx, this_idx, pt_count, image_idx, other, 
		       RAD2DEG(angle), proj_error);

		/* Finally, add the point */
		unsigned char r = GetKey(other,other_idx).m_r;
		unsigned char g = GetKey(other,other_idx).m_g;
		unsigned char b = GetKey(other,other_idx).m_b;

		colors[pt_count] = v3_new((double) r, 
					  (double) g,
					  (double) b);
    
		ImageKeyVector views;
		views.push_back(ImageKey(i, other_idx));
		views.push_back(ImageKey(camera_idx, this_idx));
		pt_views.push_back(views);

		pt_count++;

	    } else if (GetKey(other,other_idx).m_extra >= 0 && 
		       GetKey(image_idx,this_idx).m_extra == -1) {

		/* We didn't connect this point originally --
		 * check if it's now a good idea to add it in */

		/* Connect up the other point to this one */
		int pt_idx = GetKey(other,other_idx).m_extra;

		/* Check reprojection error */
		v2_t pr = sfm_project_final(cameras + camera_idx, 
					    points[pt_idx],
					    true, m_estimate_distortion);

		double dx = GetKey(image_idx,this_idx).m_x - Vx(pr);
		double dy = GetKey(image_idx,this_idx).m_y - Vy(pr);
		    
		double proj_error = sqrt(dx * dx + dy * dy);

		if (proj_error <= INIT_REPROJECTION_ERROR) {
		    printf("  Reconnecting point [%d] (%d) (error: %0.3f)\n", 
			   pt_idx, this_idx, proj_error);
		    GetKey(image_idx,this_idx).m_extra = pt_idx;
		    pt_views[pt_idx].push_back(ImageKey(camera_idx,this_idx));
		} else {
		    /* Throw out this point as an outlier */
		    GetKey(image_idx,this_idx).m_extra = -2;
		}
	    }
	}

        // m_match_lists[idx].clear();
        m_matches.ClearMatch(idx);
    }

    return pt_count;
}
示例#14
0
/* Add new points to the bundle adjustment */
int 
BundlerApp::BundleAdjustAddAllNewPoints(int num_points, int num_cameras,
                                        int *added_order,
                                        camera_params_t *cameras,
                                        v3_t *points, v3_t *colors,
                                        double reference_baseline,
                                        std::vector<ImageKeyVector> &pt_views,
                                        double max_reprojection_error,
                                        int min_views)
{
    std::vector<int> track_idxs;
    std::vector<ImageKeyVector> new_tracks;

    int num_tracks_total = (int) m_track_data.size();
    int *tracks_seen = new int[num_tracks_total];
    for (int i = 0; i < num_tracks_total; i++) {
	tracks_seen[i] = -1;
    }

    /* Gather up the projections of all the new tracks */
    for (int i = 0; i < num_cameras; i++) {
	int image_idx1 = added_order[i];

	int num_keys = GetNumKeys(image_idx1);
	
	for (int j = 0; j < num_keys; j++) {
	    Keypoint &key = GetKey(image_idx1, j);

	    if (key.m_track == -1)
		continue;  /* Key belongs to no track */

	    if (key.m_extra != -1)
		continue;  /* Key is outlier or has already been added */

	    int track_idx = key.m_track;
	    
	    /* Check if this track is already associated with a point */
	    if (m_track_data[track_idx].m_extra != -1)
		continue;

	    /* Check if we've seen this track */
	    int seen = tracks_seen[track_idx];

	    if (seen == -1) {
		/* We haven't yet seen this track, create a new track */
		tracks_seen[track_idx] = (int) new_tracks.size();

		ImageKeyVector track;
		track.push_back(ImageKey(i, j));
		new_tracks.push_back(track);
		track_idxs.push_back(track_idx);
	    } else {
		new_tracks[seen].push_back(ImageKey(i, j));
	    }
	}
    }
    
    delete [] tracks_seen;

    /* Now for each (sub) track, triangulate to see if the track is
     * consistent */
    int pt_count = num_points;

    int num_ill_conditioned = 0;
    int num_high_reprojection = 0;
    int num_cheirality_failed = 0;
    int num_added = 0;

    int num_tracks = (int) new_tracks.size();
    for (int i = 0; i < num_tracks; i++) {
	int num_views = (int) new_tracks[i].size();
	
	if (num_views < min_views) continue;  /* Not enough views */

#if 0
	printf("Triangulating track ");
	PrintTrack(new_tracks[i]);
	printf("\n");
#endif

	/* Check if at least two cameras fix the position of the point */
	bool conditioned = false;
	bool good_distance = false;
	double max_angle = 0.0;
	for (int j = 0; j < num_views; j++) {
	    for (int k = j+1; k < num_views; k++) {
		int camera_idx1 = new_tracks[i][j].first;
		int image_idx1 = added_order[camera_idx1];
		int key_idx1 = new_tracks[i][j].second;

		int camera_idx2 = new_tracks[i][k].first;
		int image_idx2 = added_order[camera_idx2];
		int key_idx2 = new_tracks[i][k].second;

		Keypoint &key1 = GetKey(image_idx1, key_idx1);
		Keypoint &key2 = GetKey(image_idx2, key_idx2);

		v2_t p = v2_new(key1.m_x, key1.m_y);
		v2_t q = v2_new(key2.m_x, key2.m_y);

                if (m_optimize_for_fisheye) {
                    double p_x = Vx(p), p_y = Vy(p);
                    double q_x = Vx(q), q_y = Vy(q);
                    
                    m_image_data[image_idx1].
                        UndistortPoint(p_x, p_y, Vx(p), Vy(p));
                    m_image_data[image_idx2].
                        UndistortPoint(q_x, q_y, Vx(q), Vy(q));
                }

		double angle = ComputeRayAngle(p, q, 
					       cameras[camera_idx1], 
					       cameras[camera_idx2]);

		if (angle > max_angle)
		    max_angle = angle;

		/* Check that the angle between the rays is large
		 * enough */
		if (RAD2DEG(angle) >= m_ray_angle_threshold) {
		    conditioned = true;
		}

#if 0
		double dist_jk = 
		    GetCameraDistance(cameras + j, cameras + k, 
				      m_explicit_camera_centers);

		if (dist_jk > m_min_camera_distance_ratio * reference_baseline)
		    good_distance = true;
#else
                good_distance = true;
#endif
	    }
	}
	
	if (!conditioned || !good_distance) {
	    num_ill_conditioned++;

#if 0
	    printf(">> Track is ill-conditioned [max_angle = %0.3f]\n", 
		   RAD2DEG(max_angle));
	    fflush(stdout);
#endif
	    continue;
	}
	
	double error;
	v3_t pt;

        if (!m_panorama_mode) {
            pt = TriangulateNViews(new_tracks[i], added_order, cameras, 
                                   error, true);
        } else {
            pt = GeneratePointAtInfinity(new_tracks[i], added_order, cameras, 
                                         error, true);
        }
       
		// Changed by Wan, Yi
	if (::isnan(error) || error > max_reprojection_error) {
	    num_high_reprojection++;
#if 0
	    printf(">> Reprojection error [%0.3f] is too large\n", error);
	    fflush(stdout);
#endif
	    continue;	    
	}

	bool all_in_front = true;
	for (int j = 0; j < num_views; j++) {
	    int camera_idx = new_tracks[i][j].first;
	    bool in_front = CheckCheirality(pt, cameras[camera_idx]);
	 
	    if (!in_front) {
		all_in_front = false;
		break;
	    }
	}

	if (!all_in_front) {
	    num_cheirality_failed++;

#if 0
	    printf(">> Cheirality check failed\n");
	    fflush(stdout);
#endif
	    continue;
	}
	
	/* All tests succeeded, so let's add the point */
#if 0
	printf("Triangulating track ");
	PrintTrack(new_tracks[i]);
	printf("\n");
	printf(">> All tests succeeded [%0.3f, %0.3f] for point [%d]\n", 
	       RAD2DEG(max_angle), error, pt_count);
#endif

	fflush(stdout);

	points[pt_count] = pt;

	int camera_idx = new_tracks[i][0].first;
	int image_idx = added_order[camera_idx];
	int key_idx = new_tracks[i][0].second;

	unsigned char r = GetKey(image_idx, key_idx).m_r;
	unsigned char g = GetKey(image_idx, key_idx).m_g;
	unsigned char b = GetKey(image_idx, key_idx).m_b;
	colors[pt_count] = v3_new((double) r, (double) g, (double) b);
    
	pt_views.push_back(new_tracks[i]);

	/* Set the point index on the keys */
	for (int j = 0; j < num_views; j++) {
	    int camera_idx = new_tracks[i][j].first;
	    int image_idx = added_order[camera_idx];
	    int key_idx = new_tracks[i][j].second;
	    GetKey(image_idx, key_idx).m_extra = pt_count;
	}

	int track_idx = track_idxs[i];
	m_track_data[track_idx].m_extra = pt_count;
	
	pt_count++;
        num_added++;
    }

    printf("[AddAllNewPoints] Added %d new points\n", num_added);
    printf("[AddAllNewPoints] Ill-conditioned tracks: %d\n", 
           num_ill_conditioned);
    printf("[AddAllNewPoints] Bad reprojections: %d\n", num_high_reprojection);
    printf("[AddAllNewPoints] Failed cheirality checks: %d\n", 
           num_cheirality_failed);

    return pt_count;
}
示例#15
0
void CPlayerRotation::ClampAngles()
{
	{
		//cap up/down looking
		float minAngle,maxAngle;
		GetStanceAngleLimits(minAngle,maxAngle);

		float currentViewPitch=GetLocalPitch();
		float newPitch = currentViewPitch + m_deltaAngles.x;

		if(newPitch < minAngle)
			newPitch = minAngle;
		else if(newPitch > maxAngle)
			newPitch = maxAngle;

		m_deltaAngles.x = newPitch - currentViewPitch;

	}

	{
		//further limit the view if necessary
		float limitV = m_params.vLimitRangeV;
		float limitH = m_params.vLimitRangeH;
		Vec3  limitDir = m_params.vLimitDir;
		float limitVUp = m_params.vLimitRangeVUp;
		float limitVDown = m_params.vLimitRangeVDown;

		if(m_player.m_stats.isFrozen.Value())
		{
			float clampMin = g_pGameCVars->cl_frozenAngleMin;
			float clampMax = g_pGameCVars->cl_frozenAngleMax;
			float frozenLimit = DEG2RAD(clampMin + (clampMax-clampMin)*(1.f-m_player.GetFrozenAmount(true)));

			if(limitV == 0 || limitV>frozenLimit)
				limitV = frozenLimit;

			if(limitH == 0 || limitH>frozenLimit)
				limitH = frozenLimit;

			if(g_pGameCVars->cl_debugFreezeShake)
			{
				static float color[] = {1,1,1,1};
				gEnv->pRenderer->Draw2dLabel(100,200,1.5,color,false,"limit: %f", RAD2DEG(frozenLimit));
			}
		}

		if(m_player.m_stats.isOnLadder)
		{
			limitDir = -m_player.m_stats.ladderOrientation;
			limitH = DEG2RAD(40.0f);
		}

		if((limitH+limitV+limitVUp+limitVDown) && limitDir.len2()>0.1f)
		{
			//A matrix is built around the view limit, and then the player view angles are checked with it.
			//Later, if necessary the upVector could be made customizable.
			Vec3 forward(limitDir);
			Vec3 up(m_baseQuat.GetColumn2());
			Vec3 right(-(up % forward));
			right.Normalize();

			Matrix33 limitMtx;
			limitMtx.SetFromVectors(right,forward,right%forward);
			//gEnv->pRenderer->GetIRenderAuxGeom()->DrawLine(m_player.GetEntity()->GetWorldPos(), ColorB(0,0,255,255), m_player.GetEntity()->GetWorldPos() + limitMtx.GetColumn(0), ColorB(0,0,255,255));
			//gEnv->pRenderer->GetIRenderAuxGeom()->DrawLine(m_player.GetEntity()->GetWorldPos(), ColorB(0,255,0,255), m_player.GetEntity()->GetWorldPos() + limitMtx.GetColumn(1), ColorB(0,255,0,255));
			//gEnv->pRenderer->GetIRenderAuxGeom()->DrawLine(m_player.GetEntity()->GetWorldPos(), ColorB(255,0,0,255), m_player.GetEntity()->GetWorldPos() + limitMtx.GetColumn(2), ColorB(255,0,0,255));
			limitMtx.Invert();

			Vec3 localDir(limitMtx * m_viewQuat.GetColumn1());
//			Vec3 localDir(limitMtx * m_player.GetEntity()->GetWorldRotation().GetColumn1());

			Ang3 limit;

			if(limitV)
			{
				limit.x = asinf(localDir.z) + m_deltaAngles.x;

				float deltaX(limitV - fabs(limit.x));

				if(deltaX < 0.0f)
					m_deltaAngles.x += deltaX*(limit.x>0.0f?1.0f:-1.0f);
			}

			if(limitVUp || limitVDown)
			{
				limit.x = asinf(localDir.z) + m_deltaAngles.x;

				if(limit.x>=limitVUp && limitVUp!=0)
				{
					float deltaXUp(limitVUp - limit.x);
					m_deltaAngles.x += deltaXUp;
				}

				if(limit.x<=limitVDown && limitVDown!=0)
				{
					float deltaXDown(limitVDown - limit.x);
					m_deltaAngles.x += deltaXDown;
				}
			}

			if(limitH)
			{
				limit.z = cry_atan2f(-localDir.x,localDir.y) + m_deltaAngles.z;

				float deltaZ(limitH - fabs(limit.z));

				if(deltaZ < 0.0f)
					m_deltaAngles.z += deltaZ*(limit.z>0.0f?1.0f:-1.0f);
			}
		}
	}
}
/*
 * Update the status bar information.
 */
void GERBVIEW_FRAME::UpdateStatusBar()
{
    EDA_DRAW_FRAME::UpdateStatusBar();

    GBR_SCREEN* screen = (GBR_SCREEN*) GetScreen();

    if( !screen )
        return;

    int dx;
    int dy;
    double dXpos;
    double dYpos;
    wxString line;
    wxString locformatter;

    if( m_DisplayOptions.m_DisplayPolarCood )  // display relative polar coordinates
    {
        double       theta, ro;

        dx = GetCrossHairPosition().x - screen->m_O_Curseur.x;
        dy = GetCrossHairPosition().y - screen->m_O_Curseur.y;

        // atan2 in the 0,0 case returns 0
        theta = RAD2DEG( atan2( -dy, dx ) );

        ro = hypot( dx, dy );
        wxString formatter;
        switch( g_UserUnit )
        {
        case INCHES:
            formatter = wxT( "Ro %.6f Th %.1f" );
            break;

        case MILLIMETRES:
            formatter = wxT( "Ro %.5f Th %.1f" );
            break;

        case UNSCALED_UNITS:
            formatter = wxT( "Ro %f Th %f" );
            break;

        case DEGREES:
            wxASSERT( false );
            break;
        }

        line.Printf( formatter, To_User_Unit( g_UserUnit, ro ), theta );

        SetStatusText( line, 3 );
    }

    // Display absolute coordinates:
    dXpos = To_User_Unit( g_UserUnit, GetCrossHairPosition().x );
    dYpos = To_User_Unit( g_UserUnit, GetCrossHairPosition().y );

    wxString absformatter;

    switch( g_UserUnit )
    {
    case INCHES:
        absformatter = wxT( "X %.6f  Y %.6f" );
        locformatter = wxT( "dx %.6f  dy %.6f  dist %.4f" );
        break;

    case MILLIMETRES:
        absformatter = wxT( "X %.5f  Y %.5f" );
        locformatter = wxT( "dx %.5f  dy %.5f  dist %.3f" );
        break;

    case UNSCALED_UNITS:
        absformatter = wxT( "X %f  Y %f" );
        locformatter = wxT( "dx %f  dy %f  dist %f" );
        break;

    case DEGREES:
        wxASSERT( false );
        break;
    }

    line.Printf( absformatter, dXpos, dYpos );
    SetStatusText( line, 2 );

    if( !m_DisplayOptions.m_DisplayPolarCood )  // display relative cartesian coordinates
    {
        // Display relative coordinates:
        dx = GetCrossHairPosition().x - screen->m_O_Curseur.x;
        dy = GetCrossHairPosition().y - screen->m_O_Curseur.y;
        dXpos = To_User_Unit( g_UserUnit, dx );
        dYpos = To_User_Unit( g_UserUnit, dy );

        // We already decided the formatter above
        line.Printf( locformatter, dXpos, dYpos, hypot( dXpos, dYpos ) );
        SetStatusText( line, 3 );
    }
}
vector<double> CNR_7DOFAnalyticInverseKinematicsComp::GetJointPosition()
{
	vector<double> jointPosition(7);


		// user code here
		MSLVector EE(m_Position.x, m_Position.y, m_Position.z); //앤드이펙터 포지션

		double cr, cp , cy;
		double sr, sp , sy;

		cr = cos(DEG2RAD(m_Position.roll));		sr = sin(DEG2RAD(m_Position.roll));	//roll값 적용
		cp = cos(DEG2RAD(m_Position.pitch));		sp = sin(DEG2RAD(m_Position.pitch));	//pitch값적용
		if(m_Position.yaw == 0){//yaw값이 0이면 해가 안구해져서 이렇게 처리
			cy = cos(1e-011);		sy = sin(1e-011);	//yaw값 적용
		}	else	{
			cy = cos(DEG2RAD(m_Position.yaw));		sy = sin(DEG2RAD(m_Position.yaw));	//yaw값 적용
		}


		double uplen = atof(parameter.GetValue("UpperArmLength").c_str());		//상완길이 파라미터로부터 설정해야함	
		double lowlen = atof(parameter.GetValue("LowerArmLength").c_str());		//하완길이 파라미터로부터 설정해야함
		double handlen = atof(parameter.GetValue("ToolLength").c_str());		//손목길이 파라미터로부터 설정해야함	

		double ax_o,ay_o,az_o,ox_o,oy_o,oz_o,nx_o,ny_o,nz_o;//원래의 프레임 방향인자

		nx_o = cy*cp;		ox_o = cy*sp*sr-sy*cr;	ax_o = cy*sp*cr+sy*sr;	
		ny_o = sy*cp;		oy_o = sy*sp*sr+cy*cr;	ay_o = sy*sp*cr-cy*sr;	
		nz_o = -sp;			oz_o = cp*sr;			az_o = cp*cr;	

		double px_o, py_o,pz_o;	//	원래의 손목위치;

		px_o = EE[0] - handlen * ax_o; //손의 길이만큼 목표위치를 빼서 손목의 위치를 계산
		py_o = EE[1] - handlen * ay_o;
		pz_o = EE[2] - handlen * az_o;


		MSLVector W(3);

		W[0] =  py_o;	//손목의 위치를 알고리즘에게 맞게 변환해서 입력
		W[1] = -pz_o;
		W[2] = -px_o;

		if(W[0] == 0 && W[1] == 0)
			W[0] += 1e-011;//x,y가 둘다 0이면 해가 안구해져서 이렇게 처리



		double  nx,ny,nz,ox,oy,oz,ax,ay,az;

		//회전축 n,o,a를 알고리즘 방향기준에 맞게 변환
		nx = ny_o;
		ox = oy_o;
		ax = ay_o;

		ny = -nz_o;
		oy = -oz_o;
		ay = -az_o;

		nz = -nx_o; 
		oz = -ox_o; 
		az = -ax_o;

		double wlen = W.length();	//손목까지의 거리

		MSLVector P(0.0, 0.0, 100.0);  //첫번째 관절벡터
		double len1 = uplen;				//상완길이
		double len2 = lowlen;				//하완길이
		MSLVector SCP, C;
		SCP = (P*W)/(W*W)*W;		//scp는 shoulder 점에서 p벡터의 w벡터에 대한 수선벡터의 베이스까지향하는 벡터이다. (scp+ (p벡터의 w벡터에 대한 수선벡터) = p벡터) 
		C = P - SCP;

		double v,omega,dtemp;
		double rx, ry, rz, so,co;
		omega = DEG2RAD(atof(parameter.GetValue("RedundantValue").c_str()));		//여자유도 변수 적용
		v = 1-cos(omega);
		dtemp = sqrt(W[0]*W[0] + W[1]*W[1] +W[2]*W[2]);
		rx = W[0]/dtemp; 
		ry = W[1]/dtemp; 
		rz = W[2]/dtemp;

		so = sin(omega);
		co = cos(omega);

		MSLMatrix R(3,3);
		R[0][0] = rx * rx * v + co;
		R[0][1] = rx * ry * v - rz * so; 
		R[0][2] = rx *rz*v + ry * so;

		R[1][0] = rx * ry * v + rz * so;   
		R[1][1] = ry * ry * v + co;
		R[1][2] = ry * rz * v - rx * so;

		R[2][0] = rx * rz * v - ry * so;
		R[2][1] = ry * rz * v + rx * so;
		R[2][2] = rz * rz * v + co;

		C = R * C;		//c벡터를 손목 회전벡터로 오메가각도만큼 회전
		C = C.norm() ;	//c를 노말라이징

		double he;
		he = 0.5 * (wlen + uplen + lowlen);
		he = sqrt(he*(he - wlen)*(he - lowlen)*(he - uplen));
		double clen = 2.0 * he / wlen;
		C = clen * C;		//W벡터 중간정도에서 엘보까지 가는 벡터 완성
		MSLVector SE;	//어깨에서 엘보까지 벡터
		SE = (W/wlen)*sqrt(uplen*uplen - clen*clen) + C;
		MSLVector EW; //엘보에서 손목까지 벡터
		EW = W - SE;

		double th1, th2, th3, th4, th5, th6, th7;
		double c1,s1,c2,s2,c3,s3,c4,s4,c5,s5,c6,s6;
		th2 = acos(SE[1]/uplen);
		c2 = cos(th2);
		s2 = sin(th2);

		double ss,cc;
		ss = -SE[2]/(uplen*sin(th2));
		cc = -SE[0]/(uplen*sin(th2));

		th1 = atan2(ss,cc);
		c1 = cos(th1);
		s1 = sin(th1);

		th4 = acos( (SE*EW)/((SE.length())*(EW.length())) );

		c4 = cos(th4);
		s4 = sin(th4);

		double AA,BB,CC;
		AA = -EW[0]/lowlen - c1*s2*c4;
		BB = -EW[2]/lowlen - s1*s2*c4;
		ss = (c1*BB - s1*AA)/s4;
		cc = (-EW[1]/lowlen + c2*c4)/(-s2*s4);
		th3 = atan2(ss,cc);
		c3 = cos(th3);
		s3 = sin(th3);

		MSLVector RHZ(ax, ay, az);
		th6 = acos( (EW*RHZ)/lowlen);
		c6 = cos(th6);
		s6 = sin(th6);

		AA = -ax -(-(c1*c2*c3+s1*s3)*s4+c1*s2*c4)*c6;
		BB = -ay -(-s2*c3*s4-c2*c4)*c6;
		CC =  (-c1*c2*s3+s1*c3)*(s2*c3*c4-c2*s4) - (-s2*s3)*((c1*c2*c3+s1*s3)*c4+c1*s2*s4);

		cc = ( (-s2*s3) *(AA) - (-c1*c2*s3+s1*c3) *(BB) )/((CC)*s6 );
		ss = -( (s2*c3*c4-c2*s4) *(AA)-  ((c1*c2*c3+s1*s3)*c4+c1*s2*s4) *(BB)) /((CC)*s6);


		th5 = atan2(ss,cc);
		c5 = cos(th5);
		s5 = sin(th5);

		AA = -((c1*c2*c3+s1*s3)*c4+c1*s2*s4)*s5+(-c1*c2*s3+s1*c3)*c5;
		BB = -((s1*c2*c3-c1*s3)*c4+s1*s2*s4)*s5+(-s1*c2*s3-c1*c3)*c5;

		ss = (oz*AA - ox*BB)/(oz*nx - ox*nz );
		cc = (nx*BB - nz*AA)/(oz*nx - ox*nz );

		th7 = atan2(ss,cc);

		jointPosition[0] = ASV_DEG(RAD2DEG(fmod(th1+M_PI_2, 2.*M_PI)));
 		jointPosition[1] = ASV_DEG(RAD2DEG(fmod(th2, 2.*M_PI)));
		jointPosition[2] = ASV_DEG(RAD2DEG(fmod(th3, 2.*M_PI)));
		jointPosition[3] = ASV_DEG(RAD2DEG(fmod(th4, 2.*M_PI)));
		jointPosition[4] = ASV_DEG(RAD2DEG(fmod(th5, 2.*M_PI)));
		jointPosition[5] = ASV_DEG(RAD2DEG(fmod(th6, 2.*M_PI)));
		jointPosition[6] = ASV_DEG(RAD2DEG(fmod(th7, 2.*M_PI)));
	
	return jointPosition;
}
示例#18
0
static int CG_CalcFov( void )
{
  float     y;
  float     phase;
  float     v;
  int       contents;
  float     fov_x, fov_y;
  float     zoomFov;
  float     f;
  int       inwater;
  int       attribFov;
  usercmd_t cmd;
  usercmd_t oldcmd;
  int       cmdNum;

  cmdNum = trap_GetCurrentCmdNumber( );
  trap_GetUserCmd( cmdNum, &cmd );
  trap_GetUserCmd( cmdNum - 1, &oldcmd );

  // switch follow modes if necessary: cycle between free -> follow -> third-person follow
  if( cmd.buttons & BUTTON_USE_HOLDABLE && !( oldcmd.buttons & BUTTON_USE_HOLDABLE ) )
  {
    if ( cg.snap->ps.pm_flags & PMF_FOLLOW ) 
    {
      if( !cg.chaseFollow )
        cg.chaseFollow = qtrue;
      else
      {
        cg.chaseFollow = qfalse;
        trap_SendClientCommand( "follow\n" );
      }
    }
    else if ( cg.snap->ps.persistant[ PERS_SPECSTATE ] != SPECTATOR_NOT )
      trap_SendClientCommand( "follow\n" );
  }

  if( cg.predictedPlayerState.pm_type == PM_INTERMISSION ||
      ( cg.snap->ps.persistant[ PERS_SPECSTATE ] != SPECTATOR_NOT ) || 
      ( cg.renderingThirdPerson ) )
  {
    // if in intermission or third person, use a fixed value
    fov_y = BASE_FOV_Y;
  }
  else
  {
    // don't lock the fov globally - we need to be able to change it
    attribFov = BG_Class( cg.predictedPlayerState.stats[ STAT_CLASS ] )->fov * 0.75f;
    fov_y = attribFov;

    if ( fov_y < 1.0f )
      fov_y = 1.0f;
    else if ( fov_y > MAX_FOV_Y )
      fov_y = MAX_FOV_Y;

    if( cg.spawnTime > ( cg.time - FOVWARPTIME ) &&
        BG_ClassHasAbility( cg.predictedPlayerState.stats[ STAT_CLASS ], SCA_FOVWARPS ) )
    {
      float fraction = (float)( cg.time - cg.spawnTime ) / FOVWARPTIME;

      fov_y = MAX_FOV_WARP_Y - ( ( MAX_FOV_WARP_Y - fov_y ) * fraction );
    }

    // account for zooms
    zoomFov = BG_Weapon( cg.predictedPlayerState.weapon )->zoomFov * 0.75f;
    if ( zoomFov < 1.0f )
      zoomFov = 1.0f;
    else if ( zoomFov > attribFov )
      zoomFov = attribFov;

    // only do all the zoom stuff if the client CAN zoom
    // FIXME: zoom control is currently hard coded to BUTTON_ATTACK2
    if( BG_Weapon( cg.predictedPlayerState.weapon )->canZoom )
    {
      if ( cg.zoomed )
      {
        f = ( cg.time - cg.zoomTime ) / (float)ZOOM_TIME;

        if ( f > 1.0f )
          fov_y = zoomFov;
        else
          fov_y = fov_y + f * ( zoomFov - fov_y );

        // BUTTON_ATTACK2 isn't held so unzoom next time
        if( !( cmd.buttons & BUTTON_ATTACK2 ) )
        {
          cg.zoomed   = qfalse;
          cg.zoomTime = MIN( cg.time, 
              cg.time + cg.time - cg.zoomTime - ZOOM_TIME );
        }
      }
      else
      {
        f = ( cg.time - cg.zoomTime ) / (float)ZOOM_TIME;

        if ( f > 1.0f )
          fov_y = fov_y;
        else
          fov_y = zoomFov + f * ( fov_y - zoomFov );

        // BUTTON_ATTACK2 is held so zoom next time
        if( cmd.buttons & BUTTON_ATTACK2 )
        {
          cg.zoomed   = qtrue;
          cg.zoomTime = MIN( cg.time, 
              cg.time + cg.time - cg.zoomTime - ZOOM_TIME );
        }
      }
    }
  }

  y = cg.refdef.height / tan( 0.5f * DEG2RAD( fov_y ) );
  fov_x = atan2( cg.refdef.width, y );
  fov_x = 2.0f * RAD2DEG( fov_x );

  // warp if underwater
  contents = CG_PointContents( cg.refdef.vieworg, -1 );

  if( contents & ( CONTENTS_WATER | CONTENTS_SLIME | CONTENTS_LAVA ) )
  {
    phase = cg.time / 1000.0f * WAVE_FREQUENCY * M_PI * 2.0f;
    v = WAVE_AMPLITUDE * sin( phase );
    fov_x += v;
    fov_y -= v;
    inwater = qtrue;
  }
  else
    inwater = qfalse;

  if( ( cg.predictedPlayerEntity.currentState.eFlags & EF_POISONCLOUDED ) &&
      ( cg.time - cg.poisonedTime < PCLOUD_DISORIENT_DURATION) &&
      cg.predictedPlayerState.stats[ STAT_HEALTH ] > 0 &&
      !( cg.snap->ps.pm_flags & PMF_FOLLOW ) )
  {
    float scale = 1.0f - (float)( cg.time - cg.poisonedTime ) /
                  BG_PlayerPoisonCloudTime( &cg.predictedPlayerState );
      
    phase = ( cg.time - cg.poisonedTime ) / 1000.0f * PCLOUD_ZOOM_FREQUENCY * M_PI * 2.0f;
    v = PCLOUD_ZOOM_AMPLITUDE * sin( phase ) * scale;
    fov_x += v;
    fov_y += v;
  }


  // set it
  cg.refdef.fov_x = fov_x;
  cg.refdef.fov_y = fov_y;

  if( !cg.zoomed )
    cg.zoomSensitivity = 1.0f;
  else
    cg.zoomSensitivity = cg.refdef.fov_y / 75.0f;

  return inwater;
}
示例#19
0
static void SimCarCollideResponse(void * /*dummy*/, DtObjectRef obj1, DtObjectRef obj2, const DtCollData *collData)
{
	sgVec2 n;		// Collision normal delivered by solid: Global(point1) - Global(point2)
	tCar *car[2];	// The cars.
	sgVec2 p[2];	// Collision points delivered by solid, in body local coordinates.
	sgVec2 r[2];	// Collision point relative to center of gravity.
	sgVec2 vp[2];	// Speed of collision point in world coordinate system.
	sgVec3 pt[2];	// Collision points in global coordinates.

	int i;

	car[0] = (tCar*)obj1;
	car[1] = (tCar*)obj2;

	// Handle cars collisions during pit stops as well.
	static const int NO_SIMU_WITHOUT_PIT = RM_CAR_STATE_NO_SIMU & ~RM_CAR_STATE_PIT;

	if ((car[0]->carElt->_state & NO_SIMU_WITHOUT_PIT) ||
		(car[1]->carElt->_state & NO_SIMU_WITHOUT_PIT))
	{
		return;
	}

    if (car[0]->carElt->index < car[1]->carElt->index) {
		// vector conversion from double to float.
		p[0][0] = (float)collData->point1[0];
		p[0][1] = (float)collData->point1[1];
		p[1][0] = (float)collData->point2[0];
		p[1][1] = (float)collData->point2[1];
		n[0]  = (float)collData->normal[0];
		n[1]  = (float)collData->normal[1];
    } else {
		// swap the cars (not the same for the simu).
		car[0] = (tCar*)obj2;
		car[1] = (tCar*)obj1;
		p[0][0] = (float)collData->point2[0];
		p[0][1] = (float)collData->point2[1];
		p[1][0] = (float)collData->point1[0];
		p[1][1] = (float)collData->point1[1];
		n[0]  = -(float)collData->normal[0];
		n[1]  = -(float)collData->normal[1];
	}

	sgNormaliseVec2(n);

	sgVec2 rg[2];	// radius oriented in global coordinates, still relative to CG (rotated aroung CG).
	tCarElt *carElt;

	for (i = 0; i < 2; i++) {
		// vector GP (Center of gravity to collision point). p1 and p2 are delivered from solid as
		// points in the car coordinate system.
		sgSubVec2(r[i], p[i], (const float*)&(car[i]->statGC));

		// Speed of collision points, linear motion of center of gravity (CG) plus rotational
		// motion around the CG.
		carElt = car[i]->carElt;
		float sina = sin(carElt->_yaw);
		float cosa = cos(carElt->_yaw);
		rg[i][0] = r[i][0]*cosa - r[i][1]*sina;
		rg[i][1] = r[i][0]*sina + r[i][1]*cosa;

		vp[i][0] = car[i]->DynGCg.vel.x - car[i]->DynGCg.vel.az * rg[i][1];
		vp[i][1] = car[i]->DynGCg.vel.y + car[i]->DynGCg.vel.az * rg[i][0];
	}

	// Relative speed of collision points.
	sgVec2 v1ab;
	sgSubVec2(v1ab, vp[0], vp[1]);

	// try to separate the cars. The computation is necessary because dtProceed is not called till
	// the collision is resolved. 
	for (i = 0; i < 2; i++) {
		sgCopyVec2(pt[i], r[i]);
		pt[i][2] = 0.0f;
		// Transform points relative to cars local coordinate system into global coordinates.
		sgFullXformPnt3(pt[i], car[i]->carElt->_posMat);
	}

	// Compute distance of collision points.
	sgVec3 pab;
	sgSubVec2(pab, pt[1], pt[0]);
	float distpab = sgLengthVec2(pab);

	sgVec2 tmpv;
	
	sgScaleVec2(tmpv, n, MIN(distpab, 0.05));
	// No "for" loop here because of subtle difference AddVec/SubVec. 
	if (car[0]->blocked == 0 && !(car[0]->carElt->_state & RM_CAR_STATE_NO_SIMU)) {
		sgAddVec2((float*)&(car[0]->DynGCg.pos), tmpv);
		car[0]->blocked = 1;
    }
	if (car[1]->blocked == 0 && !(car[1]->carElt->_state & RM_CAR_STATE_NO_SIMU)) {
		sgSubVec2((float*)&(car[1]->DynGCg.pos), tmpv);
		car[1]->blocked = 1;
    }

	// Doing no dammage and correction if the cars are moving out of each other.
	if (sgScalarProductVec2(v1ab, n) > 0) {
		return;
	}

	// impulse.
	float rpn[2];
	rpn[0] = sgScalarProductVec2(rg[0], n);
	rpn[1] = sgScalarProductVec2(rg[1], n);

	// Pseudo cross product to find out if we are left or right.
	// TODO: SIGN, scrap value?
	float rpsign[2];
	rpsign[0] =  n[0]*rg[0][1] - n[1]*rg[0][0];
	rpsign[1] = -n[0]*rg[1][1] + n[1]*rg[1][0];

	const float e = 1.0f;	// energy restitution

	float j = -(1.0f + e) * sgScalarProductVec2(v1ab, n) /
		((car[0]->Minv + car[1]->Minv) +
		rpn[0] * rpn[0] * car[0]->Iinv.z + rpn[1] * rpn[1] * car[1]->Iinv.z);

	for (i = 0; i < 2; i++) {
		if (car[i]->carElt->_state & RM_CAR_STATE_NO_SIMU) {
			continue;
		}

		// Damage.
		tdble damFactor, atmp;
		atmp = atan2(r[i][1], r[i][0]);
		if (fabs(atmp) < (PI / 3.0)) {
			// Front collision gives more damage.
			damFactor = 1.5f;
		} else {
			// Rear collision gives less damage.
			damFactor = 1.0f;
		}

		if ((car[i]->carElt->_state & RM_CAR_STATE_FINISH) == 0) {
			car[i]->dammage += (int)(CAR_DAMMAGE * fabs(j) * damFactor * simDammageFactor[car[i]->carElt->_skillLevel]);
		}

		// Compute collision velocity.
		const float ROT_K = 1.0f;

		float js = (i == 0) ? j : -j;
		sgScaleVec2(tmpv, n, js * car[i]->Minv);
		sgVec2 v2a;

		if (car[i]->collision & SEM_COLLISION_CAR) {
			sgAddVec2(v2a, (const float*)&(car[i]->VelColl.x), tmpv);
			car[i]->VelColl.az = car[i]->VelColl.az + js * rpsign[i] * rpn[i] * car[i]->Iinv.z * ROT_K;
		} else {
			sgAddVec2(v2a, (const float*)&(car[i]->DynGCg.vel), tmpv);
			car[i]->VelColl.az = car[i]->DynGCg.vel.az + js * rpsign[i] * rpn[i] * car[i]->Iinv.z * ROT_K;
		}

		static float VELMAX = 3.0f;
		if (fabs(car[i]->VelColl.az) > VELMAX) {
			car[i]->VelColl.az = SIGN(car[i]->VelColl.az) * VELMAX;
		}

		sgCopyVec2((float*)&(car[i]->VelColl.x), v2a);

		// Move the car for the collision lib.
		tCarElt *carElt = car[i]->carElt;
		sgMakeCoordMat4(carElt->pub.posMat, car[i]->DynGCg.pos.x, car[i]->DynGCg.pos.y,
						car[i]->DynGCg.pos.z - carElt->_statGC_z, RAD2DEG(carElt->_yaw),
						RAD2DEG(carElt->_roll), RAD2DEG(carElt->_pitch));
		dtSelectObject(car[i]);
		dtLoadIdentity();
		dtTranslate(-carElt->_statGC_x, -carElt->_statGC_y, 0.0f);
		dtMultMatrixf((const float *)(carElt->_posMat));

		car[i]->collision |= SEM_COLLISION_CAR;
	}
}
示例#20
0
/** Dump the track segments on screen
    @param  track track to dump
    @param  verbose if set to 1 all the segments are described (long)
    @ingroup  racemantools
 */
static void
reTrackDump(const tTrack *track, int verbose)
{
	char buf[128];
	
	snprintf(buf, sizeof(buf), "  by %s (%.0f m long, %.0f m wide) ...", 
			 track->authors, track->length, track->width);
	ReUI().addLoadingMessage(buf);

	GfLogInfo("++++++++++++ Track ++++++++++++\n");
	GfLogInfo("Name     = %s\n", track->name);
	GfLogInfo("Authors  = %s\n", track->authors);
	GfLogInfo("Filename = %s\n", track->filename);
	GfLogInfo("NSeg     = %d\n", track->nseg);
	GfLogInfo("Version  = %d\n", track->version);
	GfLogInfo("Length   = %f m\n", track->length);
	GfLogInfo("Width    = %f m\n", track->width);
	GfLogInfo("XSize    = %f m\n", track->max.x);
	GfLogInfo("YSize    = %f m\n", track->max.y);
	GfLogInfo("ZSize    = %f m\n", track->max.z);
  
	switch (track->pits.type) {
		case TR_PIT_NONE:
			GfLogInfo("Pits     = none\n");
			break;
      
		case TR_PIT_ON_TRACK_SIDE:
			GfLogInfo("Pits     = present on track side\n");
			break;
      
		case TR_PIT_ON_SEPARATE_PATH:
			GfLogInfo("Pits     = present on separate path\n");
			break;

		case TR_PIT_NO_BUILDING:
			GfLogInfo("Pits     = present, no building style\n");
			break;
    }//switch pits.type

	const int seconds = (int)track->local.timeofday;
	GfLogInfo("TimeOfDay= %02d:%02d:%02d\n", seconds / 3600, (seconds % 3600) / 60, seconds % 60);
	GfLogInfo("Sun asc. = %.1f d\n", RAD2DEG(track->local.sunascension));
	GfLogInfo("Clouds   = %d (0=none, 1=few, 2=scarce, 3=many, 4=full)\n", track->local.clouds);
	GfLogInfo("Rain     = %d (0=none, 1=little, 2=medium, 3=heavy)\n", track->local.rain);
	GfLogInfo("Water    = %d (0=none, 1=some, 2=more, 3=swampy)\n", track->local.water);

	if (verbose) {
		int i;
		tTrackSeg *seg;
#ifdef SD_DEBUG
		const char  *stype[4] = { "", "RGT", "LFT", "STR" };
#endif

		for (i = 0, seg = track->seg->next; i < track->nseg; i++, seg = seg->next) {
			GfLogTrace("  segment %d -------------- \n", seg->id);
#ifdef SD_DEBUG
			GfLogTrace("        type    %s\n", stype[seg->type]);
#endif
			GfLogTrace("        length  %f m\n", seg->length);
			GfLogTrace("  radius  %f m\n", seg->radius);
			GfLogTrace("  arc %f d Zs %f d Ze %f d Zcs %f d\n", RAD2DEG(seg->arc),
					   RAD2DEG(seg->angle[TR_ZS]),
					   RAD2DEG(seg->angle[TR_ZE]),
					   RAD2DEG(seg->angle[TR_CS]));
			GfLogTrace(" Za  %f d\n", RAD2DEG(seg->angle[TR_ZS]));
			GfLogTrace("  vertices: %-8.8f %-8.8f %-8.8f ++++ ",
					   seg->vertex[TR_SR].x,
					   seg->vertex[TR_SR].y,
					   seg->vertex[TR_SR].z);
			GfLogTrace("%-8.8f %-8.8f %-8.8f\n",
					   seg->vertex[TR_SL].x,
					   seg->vertex[TR_SL].y,
					   seg->vertex[TR_SL].z);
			GfLogTrace("  vertices: %-8.8f %-8.8f %-8.8f ++++ ",
					   seg->vertex[TR_ER].x,
					   seg->vertex[TR_ER].y,
					   seg->vertex[TR_ER].z);
			GfLogTrace("%-8.8f %-8.8f %-8.8f\n",
					   seg->vertex[TR_EL].x,
					   seg->vertex[TR_EL].y,
					   seg->vertex[TR_EL].z);
			GfLogTrace("  prev    %d\n", seg->prev->id);
			GfLogTrace("  next    %d\n", seg->next->id);
		}//for i
		GfLogTrace("From Last To First\n");
		GfLogTrace("Dx = %-8.8f  Dy = %-8.8f Dz = %-8.8f\n",
				   track->seg->next->vertex[TR_SR].x - track->seg->vertex[TR_ER].x,
				   track->seg->next->vertex[TR_SR].y - track->seg->vertex[TR_ER].y,
				   track->seg->next->vertex[TR_SR].z - track->seg->vertex[TR_ER].z);
    }//if verbose
}//reTrackDump
示例#21
0
void CBasePlayerAnimState::ComputePoseParam_BodyYaw()
{
	VPROF( "CBasePlayerAnimState::ComputePoseParam_BodyYaw" );

	// Find out which way he's running (m_flEyeYaw is the way he's looking).
	Vector vel;
	GetOuterAbsVelocity( vel );
	bool bIsMoving = vel.Length2D() > MOVING_MINIMUM_SPEED;

	// If we just initialized this guy (maybe he just came into the PVS), then immediately
	// set his feet in the right direction, otherwise they'll spin around from 0 to the 
	// right direction every time someone switches spectator targets.
	if ( !m_bCurrentFeetYawInitialized )
	{
		m_bCurrentFeetYawInitialized = true;
		m_flGoalFeetYaw = m_flCurrentFeetYaw = m_flEyeYaw;
		m_flLastTurnTime = 0.0f;
	}
	else if ( bIsMoving )
	{
		// player is moving, feet yaw = aiming yaw
		if ( m_AnimConfig.m_LegAnimType == LEGANIM_9WAY || m_AnimConfig.m_LegAnimType == LEGANIM_8WAY )
		{
			// His feet point in the direction his eyes are, but they can run in any direction.
			m_flGoalFeetYaw = m_flEyeYaw;
		}
		else
		{
			m_flGoalFeetYaw = RAD2DEG( atan2( vel.y, vel.x ) );

			// If he's running backwards, flip his feet backwards.
			Vector vEyeYaw( cos( DEG2RAD( m_flEyeYaw ) ), sin( DEG2RAD( m_flEyeYaw ) ), 0 );
			Vector vFeetYaw( cos( DEG2RAD( m_flGoalFeetYaw ) ), sin( DEG2RAD( m_flGoalFeetYaw ) ), 0 );
			if ( vEyeYaw.Dot( vFeetYaw ) < -0.01 )
			{
				m_flGoalFeetYaw += 180;
			}
		}

	}
	else if ( (gpGlobals->curtime - m_flLastTurnTime) > mp_facefronttime.GetFloat() )
	{
		// player didn't move & turn for quite some time
		m_flGoalFeetYaw = m_flEyeYaw;
	}
	else
	{
		// If he's rotated his view further than the model can turn, make him face forward.
		float flDiff = AngleNormalize(  m_flGoalFeetYaw - m_flEyeYaw );

		if ( fabs(flDiff) > m_AnimConfig.m_flMaxBodyYawDegrees )
		{
			if ( flDiff  > 0 )
				m_flGoalFeetYaw -= m_AnimConfig.m_flMaxBodyYawDegrees;
			else
				m_flGoalFeetYaw += m_AnimConfig.m_flMaxBodyYawDegrees;
		}
	}

	m_flGoalFeetYaw = AngleNormalize( m_flGoalFeetYaw );

	if ( m_flCurrentFeetYaw != m_flGoalFeetYaw )
	{
		ConvergeAngles( m_flGoalFeetYaw, GetFeetYawRate(), m_AnimConfig.m_flMaxBodyYawDegrees,
			 gpGlobals->frametime, m_flCurrentFeetYaw );

		m_flLastTurnTime = gpGlobals->curtime;
	}

	float flCurrentTorsoYaw = AngleNormalize( m_flEyeYaw - m_flCurrentFeetYaw );

	// Rotate entire body into position
	m_angRender[YAW] = m_flCurrentFeetYaw;
	m_angRender[PITCH] = m_angRender[ROLL] = 0;
		
	SetOuterBodyYaw( flCurrentTorsoYaw );
	g_flLastBodyYaw = flCurrentTorsoYaw;
}
示例#22
0
//=========================================================================================================
void* tf_main(void* thread_arg) {

    float angle;
    float v_ref, w_ref;
	int i;
    long int counter=0;

	

#ifdef EKF_LOC	
    int data[NUM_SENS];	// provided by ekf.h	
#endif
	
#ifdef HOKUYO
	int maximum_laser_data=get_data_max();	
#endif
	
    setup_termination();

    angle = 0.0;
    gyro_integral = 0.0;
    gyro_sup_integral = 0.0;
    float a,b;

#ifdef CARTESIAN_REGULATOR
	float v_return, w_return;
	int get_goal=0;
	fuzzy_horizon_result fh;
#endif

    a=4;
    b=0;
//    set_robot_speed(&a,&b);

 #define RAD2DEG(x) ((float) x*180/M_PI )

	struct timeval tvb, tva;


#ifdef LOG_FROM_MAIN_THREAD
	int cycle;
	char debug_buf[24];
	log_fd=open("timing_main.txt",O_CREAT | O_WRONLY | O_TRUNC , S_IWUSR | S_IRUSR | S_IRGRP | S_IROTH);
#endif

#ifdef HOKUYO_SENSOR

#endif


	// The timing of the first cycle is not allined due to the 
	// fact that the two threads are not setup exactly at the
	// same time
	pthread_cond_wait(&cond, &mutex);
	pthread_mutex_unlock(&mutex);		


#ifdef CARTESIAN_REGULATOR
	goal_state = (float *) malloc(sizeof(float)*3);
	curr_state = (float *) malloc(sizeof(float)*3);
	goal_state[0]=viapoints[curr_via][0];
	goal_state[1]=viapoints[curr_via][1];
	goal_state[2]=0;
#endif

	while(1){
		counter++;
		pthread_cond_wait(&cond, &mutex);
		printf("Alive! [%d]\n",counter);
		pthread_mutex_unlock(&mutex);
		gettimeofday(&tvb,NULL);
	
		//
		get_robot_state(&robot_state);

#ifdef CARTESIAN_REGULATOR
		// Cartesian Regulator
//		get_goal=cartesian_controller(robot_state, goal_state, .1, .1, &v_return, &w_return);		
		if (!via_done) {
			get_goal=cartesian_controller(curr_state, goal_state, 0.3, 0.25, &v_return, &w_return);		
			printf("v:%f  \tw:%f\n",v_return,w_return);		
			if (get_goal==1){
				curr_via++;
				goal_state[0]=viapoints[curr_via][0];
				goal_state[1]=viapoints[curr_via][1];
				get_goal=0;
			}
			if (curr_via==N_VIA) {
				via_done=1;
				v_return=0;
				w_return=0;	
			}	
		}
#ifdef OB_AV		
		apply_fuzzy_horizon_algorithm(&v_return,&w_return,&fh );
		set_robot_speed(&fh.v_ref_hor_fuz, &fh.w_ref_hor_fuz);
#else
		set_robot_speed(&v_return, &w_return);
#endif

#endif
	
#ifdef EKF_LOC
#ifdef HOKUYO_SENSOR		
		// To select the subset of laser beams of interests
		// These are defined in ekf.c by ANGLE_H		
		for (i=0; i<NUM_SENS; i++) {
			obsv[i]=data_laser[ANGLE_IDX[i]]/10;
		}
#endif
		
#ifdef IR_SENSOR		
		for (i=0; i<NUM_SENS; i++){
			obsv[i]= *(ir->range+i);
//			printf("%f\n ", obsv[i]);
		}		
		
#endif		
		
		// Get the latest control input
		u[0]=last_v_ref;
		u[1]=last_w_ref;
		// Prediction
		EKF_Prediction(xpost, &xpred, u);
		// Ekf Correction
		EKF_Correction(xpred, &xpost, obsv);		


		printf("%f\t%f\t%f\n",xpred.x, xpred.y, RAD2DEG(xpred.th));
		printf("%f\t%f\t%f\n",xpost.x, xpost.y, RAD2DEG(xpost.th));	
		
#ifdef CARTESIAN_REGULATOR
		curr_state[0]=xpost.x;
		curr_state[1]=xpost.y;
		curr_state[2]=xpost.th;	
#endif

#endif

		printf("%f\t%f\t%f\n",state[0],state[1],RAD2DEG(state[2]));
		printf("lu: %f   \tlw: %f\n",last_v_ref,last_w_ref);


		gettimeofday(&tva,NULL);



                if (tva.tv_sec==tvb.tv_sec){
			printf("%ld\n", tva.tv_usec-tvb.tv_usec);
#ifdef LOG_FROM_MAIN_THREAD 
	           	cycle=tva.tv_usec-tvb.tv_usec;
#endif
       		}
                else {
                        int delta;
                        delta = 1000000-tvb.tv_usec;
                        printf("%ld\n",tva.tv_usec+delta);
#ifdef LOG_FROM_MAIN_THREAD
                	cycle=tva.tv_usec+delta;
#endif
		}
#ifdef LOG_FROM_MAIN_THREAD
                sprintf(debug_buf,"%ld\n",cycle);
                write(log_fd, debug_buf,strlen(debug_buf));
#endif	
	}



    //========================================================================
    while (0) {


	
//		packet_type = analizza_pacchetto();
                       
        if (packet_type == LOAD_PACKET_ANALYZED) {
            //============================================            
            pthread_mutex_lock(&mutex_fp);
            fflush(fp_log);
            pthread_mutex_unlock(&mutex_fp);
            flag = 0;
        }

		
#ifdef EKF_LOC		
#ifdef HOKUYO_SENSOR		
		// To select the subset of laser beams of interests
		// These are defined in ekf.c by ANGLE_H		
		for (i=0; i<NUM_SENS; i++) {
			obsv[i]=data_laser[ANGLE_IDX[i]];
		}
#endif
		
#ifdef IR_SENSOR		
		for (i=0; i<NUM_SENS; i++){
			obsv[i]= *(ir->range+i);
//			printf("%f\n ", obsv[i]);
		}		
		
#endif		
		
		// Get the latest control input
		u[0]=last_v_ref;
		u[1]=last_w_ref;
		
		// EKF Prediction
//		EKF_Prediction(xpost, &xpred, u);
		// Ekf Correction
//		EKF_Correction(xpred, &xpost, obsv);		


//	printf("%f\t%f\t%f\n",xpred.x, xpred.y, RAD2DEG(xpred.th));
//	printf("%f\t%f\t%f\n",xpost.x, xpost.y, RAD2DEG(xpost.th));	
	printf("%f\t%f\t%f\n",state[0],state[1],RAD2DEG(state[2]));
	printf("u: %f   \tw: %f\n",u[0],u[1]);
	printf("lu: %f   \tlw: %f\n",last_v_ref,last_w_ref);


		
#endif
		
		
    }
    //========================================================================

}
示例#23
0
/**
 * SDRender
 * Initialises a scene (ie a new view).
 *
 * @return 0 if OK, -1 if something failed
 */
void SDRender::Init(tTrack *track)
{
    SDTrack = track;

    std::string datapath = GetDataDir();
    //datapath +="/";
    thesky = new SDSky;
    GfOut("SDSky class\n");

    // Sky dome / background.
    SDSkyDomeDistance = 20000;
    if (SDSkyDomeDistance > 0 && SDSkyDomeDistance < SDSkyDomeDistThresh)
        SDSkyDomeDistance = SDSkyDomeDistThresh; // If user enabled it (>0), must be at least the threshold.

    SDDynamicSkyDome = strcmp(GfParmGetStr(grHandle, GR_SCT_GRAPHIC, GR_ATT_DYNAMICSKYDOME, GR_ATT_DYNAMICSKYDOME_DISABLED), GR_ATT_DYNAMICSKYDOME_ENABLED) == 0;

    GfLogInfo("Graphic options : Sky dome : distance = %u m, dynamic = %s\n",
              SDSkyDomeDistance, SDDynamicSkyDome ? "true" : "false");

    // Dynamic weather.
    //grDynamicWeather = GfParmGetNum(grHandle, GR_SCT_GRAPHIC, GR_ATT_grDynamicWeather, (char*)NULL, grDynamicWeather);

    // Cloud layers.
    SDNbCloudLayers =
            (unsigned)(GfParmGetNum(grHandle, GR_SCT_GRAPHIC, GR_ATT_CLOUDLAYER, 0, 0) + 0.5);

    GfLogInfo("Graphic options : Number of cloud layers : %u\n", SDNbCloudLayers);

    SDMax_Visibility =
            (unsigned)(GfParmGetNum(grHandle, GR_SCT_GRAPHIC, GR_ATT_VISIBILITY, 0, 0));

    ShadowIndex = 0; // Default value index, in case file value not found in list.
    const char* pszShadow =
            GfParmGetStr(grHandle, GR_SCT_GRAPHIC, GR_ATT_SHADOW_TYPE, GR_ATT_SHADOW_NONE);

    for (int i = 0; i < NbShadowValues; i++)
    {
        if (!strcmp(pszShadow, ShadowValues[i]))
        {
            ShadowIndex = i;
            break;
        }
    }

    TexSizeIndex = 0; // Default value index, in case file value not found in list.
    const char* pszTexSize =
            GfParmGetStr(grHandle, GR_SCT_GRAPHIC, GR_ATT_SHADOW_SIZE, GR_ATT_SHADOW_1024);

    for (int i = 0; i < NbTexSizeValues; i++)
    {
        if (!strcmp(pszTexSize, TexSizeValues[i]))
        {
            TexSizeIndex = i;
            break;
        }
    }

    switch (TexSizeIndex)
    {
    case 0:
        ShadowTexSize = 512;
        break;

    case 1:
        ShadowTexSize = 1024;
        break;

    case 2:
        ShadowTexSize = 2048;
        break;

    case 3:
        ShadowTexSize = 4096;
        break;

    case 4:
        ShadowTexSize = 8192;
        break;

    default:
        ShadowTexSize = 1024;
        break;
    }

    QualityIndex = 0; // Default value index, in case file value not found in list.
    const char* pszQuality =
            GfParmGetStr(grHandle, GR_SCT_GRAPHIC, GR_ATT_AGR_QUALITY, GR_ATT_AGR_LITTLE);

    for (int i = 0; i < NbQualityValues; i++)
    {
        if (!strcmp(pszQuality, QualityValues[i]))
        {
            QualityIndex = i;
            break;
        }
    }

    carsShader = 0; // Default value index, in case file value not found in list.
    const char* pszShaders =
            GfParmGetStr(grHandle, GR_SCT_GRAPHIC, GR_ATT_SHADERS, GR_ATT_AGR_NULL);

    for (int i = 0; i < NbShadersValues; i++)
    {
        if (!strcmp(pszShaders, ShadersValues[i]))
        {
            carsShader = i;
            break;
        }
    }

    GfLogInfo("Graphic options : Shadow Type : %u\n", ShadowIndex);
    GfLogInfo("Graphic options : Shadow Texture Size : %u\n", ShadowTexSize);
    GfLogInfo("Graphic options : Shadow Quality : %u\n", QualityIndex);

    NStars = NMaxStars;
    if (AStarsData)
        delete [] AStarsData;

    AStarsData = new osg::Vec3d[NStars];

    for(int i= 0; i < NStars; i++)
    {
        AStarsData[i][0] = SDRandom() * PI;
        AStarsData[i][1] = SDRandom() * PI;
        AStarsData[i][2] = SDRandom() * 7.0;
    }

    GfLogInfo("  Stars (random) : %d\n", NStars);

    NPlanets = 0;
    APlanetsData = NULL;

    GfLogInfo("  Planets : %d\n", NPlanets);

    const int timeOfDay = (int)SDTrack->local.timeofday;
    const double domeSizeRatio = SDSkyDomeDistance / 80000.0;

    GfLogInfo("  domeSizeRation : %d\n", domeSizeRatio);

    thesky->build(datapath, SDSkyDomeDistance, SDSkyDomeDistance, 800,
                  40000, 800, 30000, NPlanets,
                  APlanetsData, NStars, AStarsData );
    GfOut("Build SKY\n");
    GLfloat sunAscension = SDTrack->local.sunascension;
    SDSunDeclination = (float)(15 * (double)timeOfDay / 3600 - 90.0);

    thesky->setSD( DEG2RAD(SDSunDeclination));
    thesky->setSRA( sunAscension );

    GfLogInfo("  Sun : time of day = %02d:%02d:%02d (declination = %.1f deg), "
              "ascension = %.1f deg\n", timeOfDay / 3600, (timeOfDay % 3600) / 60, timeOfDay % 60,
              SDSunDeclination, RAD2DEG(sunAscension));

    if ( SDSunDeclination > 180 )
        SDMoonDeclination = 3.0 + (rand() % 40);
    else
        SDMoonDeclination = (rand() % 270);

    //SDMoonDeclination = grUpdateMoonPos(timeOfDay);
    //SDMoonDeclination = 22.0; /*(rand() % 270);*/

    const float moonAscension = SDTrack->local.sunascension;

    thesky->setMD( DEG2RAD(SDMoonDeclination) );
    thesky->setMRA( DEG2RAD(moonAscension) );

    GfLogInfo("  Moon : declination = %.1f deg, ascension = %.1f deg\n",
              SDMoonDeclination, moonAscension);

    /*

    SDCloudLayer *layer = new SDCloudLayer(datapath);
    layer->setCoverage(layer->SD_CLOUD_CIRRUS);
    layer->setSpeed(30);
    layer->setDirection(20);
    layer->setElevation_m(3000);
    layer->setThickness_m(400  / domeSizeRatio);
    layer->setTransition_m(400  / domeSizeRatio);
    layer->setSpan_m(SDSkyDomeDistance / 2);
    thesky->add_cloud_layer(layer);

    SDCloudLayer *layer2 = new SDCloudLayer(datapath);
    layer2->setCoverage(layer2->SD_CLOUD_CIRRUS2);
    layer2->setSpeed(60);
    layer2->setDirection(20);
    layer2->setElevation_m(1500);
    layer2->setThickness_m(400  / domeSizeRatio);
    layer2->setTransition_m(400  / domeSizeRatio);
    layer2->setSpan_m(SDSkyDomeDistance / 2);
    thesky->add_cloud_layer(layer2);*/

    // Initialize the whole sky dome.
    SDScenery * scenery = (SDScenery *)getScenery();
    double r_WrldX = scenery->getWorldX();
    double r_WrldY = scenery->getWorldY();
    //double r_WrldZ = SDScenery::getWorldZ();
    osg::Vec3 viewPos(r_WrldX / 2, r_WrldY/ 2, 0.0 );

    weather();
    thesky->set_visibility( SDVisibility ); // Visibility in meters

    thesky->reposition( viewPos, 0, 0);
    sol_angle = (float)thesky->getSA();
    moon_angle = (float)thesky->getMA();
    thesky->repaint(SkyColor, FogColor, CloudsColor, sol_angle, moon_angle, NPlanets,
                    APlanetsData, NStars, AStarsData);
    UpdateLight();

    osg::ref_ptr<osg::Group> sceneGroup = new osg::Group;
    osg::ref_ptr<osg::Group> mRoot = new osg::Group;
    osg::ref_ptr<osgShadow::ShadowMap> vdsm = new osgShadow::ShadowMap;
    m_scene = new osg::Group;
    m_CarRoot = new osg::Group;
    m_RealRoot = new osg::Group;
    shadowRoot = new osgShadow::ShadowedScene;

    osg::ref_ptr<osgParticle::PrecipitationEffect> precipitationEffect = new osgParticle::PrecipitationEffect;

    if (SDVisibility < 2000)
    {
        sceneGroup->addChild(precipitationEffect.get());
    }

    osg::ref_ptr<osg::Group> scene = new osg::Group;
    osg::ref_ptr<osg::Group> background = new osg::Group;
    osg::ref_ptr<osg::Group> cargroup = new osg::Group;

    scene->addChild(scenery->getScene());
    cargroup->addChild(m_CarRoot.get());
    background->addChild(scenery->getBackground());

	if(ShadowIndex > 0)
	{
		switch (QualityIndex+1)
		{
		case 0:
			break;
		case 1:
			scene->setNodeMask( rcvShadowMask );
			background->setNodeMask(~(rcvShadowMask | castShadowMask));
			cargroup->setNodeMask(castShadowMask);
			break;
		case 2:
			scene->setNodeMask( rcvShadowMask );
			background->setNodeMask(~(rcvShadowMask | castShadowMask));
			cargroup->setNodeMask(rcvShadowMask | castShadowMask);
			break;
		case 3:
			scene->setNodeMask( rcvShadowMask | castShadowMask);
			background->setNodeMask(~(rcvShadowMask | castShadowMask));
			cargroup->setNodeMask(rcvShadowMask | castShadowMask);
			break;
		default:
			break;
		}
	}

    m_scene->addChild(scene.get());
    m_scene->addChild(cargroup.get());
    m_scene->addChild(background.get());

    sceneGroup->addChild(m_scene.get());

    stateSet = new osg::StateSet;
    stateSet = m_scene->getOrCreateStateSet();
    stateSet->setMode(GL_DEPTH_TEST, osg::StateAttribute::ON);
    if (SDVisibility < 2000)
        stateSet->setAttributeAndModes(precipitationEffect->getFog());

    float emis = 0.5f * sky_brightness;
    float ambian = 0.8f * sky_brightness;
    osg::ref_ptr<osg::Material> material = new osg::Material;
    material->setColorMode(osg::Material::OFF); // switch glColor usage off
    Scene_ambiant = osg::Vec4f( ambian, ambian, ambian, 1.0f);            ;
    material->setEmission(osg::Material::FRONT_AND_BACK, osg::Vec4(emis, emis, emis, 1.0f));
    material->setAmbient(osg::Material::FRONT_AND_BACK, Scene_ambiant);
    stateSet->setAttributeAndModes(material, osg::StateAttribute::OVERRIDE|osg::StateAttribute::ON);
    stateSet->setMode(GL_LIGHTING, osg::StateAttribute::OVERRIDE|osg::StateAttribute::ON);

    lightSource = new osg::LightSource;
    lightSource->getLight()->setDataVariance(osg::Object::DYNAMIC);
    lightSource->getLight()->setLightNum(0);
    // relative because of CameraView being just a clever transform node
    lightSource->setReferenceFrame(osg::LightSource::RELATIVE_RF);
    lightSource->setLocalStateSetModes(osg::StateAttribute::ON);
    lightSource->getLight()->setAmbient(osg::Vec4(0.0f, 0.0f, 0.0f, 0.0f));
    lightSource->getLight()->setDiffuse(osg::Vec4( 0.2f, 0.2f, 0.2f, 1.0f));
    lightSource->getLight()->setSpecular(osg::Vec4(0.0f, 0.0f, 0.0f, 0.0f));
    sceneGroup->addChild(lightSource);

    // we need a white diffuse light for the phase of the moon
    sunLight = new osg::LightSource;
    sunLight->getLight()->setDataVariance(osg::Object::DYNAMIC);
    sunLight->getLight()->setLightNum(1);
    sunLight->setReferenceFrame(osg::LightSource::RELATIVE_RF);
    sunLight->setLocalStateSetModes(osg::StateAttribute::ON);
    sunLight->getLight()->setAmbient(SceneAmbiant);
    sunLight->getLight()->setDiffuse(SceneDiffuse);
    sunLight->getLight()->setSpecular(SceneSpecular);
    sunLight->setStateSetModes(*stateSet,osg::StateAttribute::ON);

    osg::Vec3f sun_position = thesky->sunposition();
    osg::Vec3f sun_direction = -sun_position;
    osg::Vec4f position(sun_position, 1.0f);
    sunLight->getLight()->setPosition(position);
    sunLight->getLight()->setDirection(sun_direction);

    skyGroup = new osg::Group;
    skyGroup->setName("skyCloudsGroup");
    skyGroup->setNodeMask(thesky->BACKGROUND_BIT);
    skyGroup->addChild(thesky->getPreRoot());
    skyGroup->addChild((thesky->getCloudRoot()));

    skySS = new osg::StateSet;
    skySS = skyGroup->getOrCreateStateSet();
    skySS->setMode(GL_LIGHT0, osg::StateAttribute::OFF);
    skySS->setAttributeAndModes( new osg::ColorMask( true, true, true, false ), osg::StateAttribute::ON );

    skyGroup->setNodeMask(~(rcvShadowMask | castShadowMask));
    sunLight->addChild(skyGroup.get());

    mRoot->addChild(sceneGroup.get());
    mRoot->setStateSet(setFogState().get());
    mRoot->addChild(sunLight.get());

    // Clouds are added to the scene graph later
    osg::ref_ptr<osg::StateSet> stateSet2 = new osg::StateSet;
    stateSet2 = mRoot->getOrCreateStateSet();
    stateSet2->setMode(GL_ALPHA_TEST, osg::StateAttribute::ON);
    stateSet2->setMode(GL_LIGHTING, osg::StateAttribute::ON);
    stateSet2->setMode(GL_DEPTH_TEST, osg::StateAttribute::ON);

    m_RealRoot->addChild(mRoot.get());

    GfOut("LE POINTEUR %d\n", mRoot.get());
}//SDRender::Init
示例#24
0
/*
===============
CG_smoothWWTransitions
===============
*/
static void CG_smoothWWTransitions( playerState_t *ps, const vec3_t in, vec3_t out )
{
	vec3_t   surfNormal, rotAxis, temp;
	int      i;
	float    stLocal, sFraction, rotAngle;
	float    smoothTime, timeMod;
	qboolean performed = qfalse;
	vec3_t   inAxis[ 3 ], lastAxis[ 3 ], outAxis[ 3 ];

	if ( cg.snap->ps.pm_flags & PMF_FOLLOW )
	{
		VectorCopy( in, out );
		return;
	}

	//set surfNormal
	BG_GetClientNormal( ps, surfNormal );

	AnglesToAxis( in, inAxis );

	//if we are moving from one surface to another smooth the transition
	if( !VectorCompareEpsilon( surfNormal, cg.lastNormal, 0.01f ) )
	{
		AnglesToAxis( cg.lastVangles, lastAxis );

		rotAngle = DotProduct( inAxis[ 0 ], lastAxis[ 0 ] ) +
		           DotProduct( inAxis[ 1 ], lastAxis[ 1 ] ) +
		           DotProduct( inAxis[ 2 ], lastAxis[ 2 ] );

		// if inAxis and lastAxis collinear, prevent NaN on acos( -1 )
		if ( rotAngle < -0.9999f )
		{
			rotAngle = 180.0f;
		}
		else
		{
			rotAngle = RAD2DEG( acos( ( rotAngle - 1.0f ) / 2.0f ) );
		}

		CrossProduct( lastAxis[ 0 ], inAxis[ 0 ], temp );
		VectorCopy( temp, rotAxis );
		CrossProduct( lastAxis[ 1 ], inAxis[ 1 ], temp );
		VectorAdd( rotAxis, temp, rotAxis );
		CrossProduct( lastAxis[ 2 ], inAxis[ 2 ], temp );
		VectorAdd( rotAxis, temp, rotAxis );

		VectorNormalize( rotAxis );

		timeMod = 1.0f;

		//add the op
		CG_addSmoothOp( rotAxis, rotAngle, timeMod );
	}

	//iterate through ops
	for ( i = MAXSMOOTHS - 1; i >= 0; i-- )
	{
		smoothTime = ( int )( cg_wwSmoothTime.integer * cg.sList[ i ].timeMod );

		//if this op has time remaining, perform it
		if ( cg.time < cg.sList[ i ].time + smoothTime )
		{
			stLocal = 1.0f - ( ( ( cg.sList[ i ].time + smoothTime ) - cg.time ) / smoothTime );
			sFraction = - ( cos( stLocal * M_PI ) + 1.0f ) / 2.0f;

			RotatePointAroundVector( outAxis[ 0 ], cg.sList[ i ].rotAxis,
			                         inAxis[ 0 ], sFraction * cg.sList[ i ].rotAngle );
			RotatePointAroundVector( outAxis[ 1 ], cg.sList[ i ].rotAxis,
			                         inAxis[ 1 ], sFraction * cg.sList[ i ].rotAngle );
			RotatePointAroundVector( outAxis[ 2 ], cg.sList[ i ].rotAxis,
			                         inAxis[ 2 ], sFraction * cg.sList[ i ].rotAngle );

			AxisCopy( outAxis, inAxis );
			performed = qtrue;
		}
	}

	//if we performed any ops then return the smoothed angles
	//otherwise simply return the in angles
	if ( performed )
	{
		AxisToAngles( outAxis, out );
	}
	else
	{
		VectorCopy( in, out );
	}

	//copy the current normal to the lastNormal
	VectorCopy( in, cg.lastVangles );
	VectorCopy( surfNormal, cg.lastNormal );
}
示例#25
0
文件: plans.c 项目: B-robur/OpenPilot
static void getVector(float controlVector[4], vario_type type)
{
    FlightModeSettingsPositionHoldOffsetData offset;

    FlightModeSettingsPositionHoldOffsetGet(&offset);

    // scale controlVector[3] (thrust) by vertical/horizontal to have vertical plane less sensitive
    controlVector[3] *= offset.Vertical / offset.Horizontal;

    float length = sqrtf(controlVector[0] * controlVector[0] + controlVector[1] * controlVector[1] + controlVector[3] * controlVector[3]);

    if (length <= 1e-9f) {
        length = 1.0f; // should never happen as getVector is not called if control within deadband
    }
    {
        float direction[3] = {
            controlVector[1] / length, // pitch is north
            controlVector[0] / length, // roll is east
            controlVector[3] / length // thrust is down
        };
        controlVector[0] = direction[0];
        controlVector[1] = direction[1];
        controlVector[2] = direction[2];
    }
    controlVector[3] = length * offset.Horizontal;

    // rotate north and east - rotation angle based on type
    float angle;
    switch (type) {
    case COURSE:
        angle = vario_course;
        break;
    case NSEW:
        angle = 0.0f;
        // NSEW no rotation takes place
        break;
    case FPV:
        // local rotation, using current yaw
        AttitudeStateYawGet(&angle);
        break;
    case LOS:
        // determine location based on vector from takeoff to current location
    {
        PositionStateData positionState;
        PositionStateGet(&positionState);
        TakeOffLocationData takeoffLocation;
        TakeOffLocationGet(&takeoffLocation);
        angle = RAD2DEG(atan2f(positionState.East - takeoffLocation.East, positionState.North - takeoffLocation.North));
    }
    break;
    }
    // rotate horizontally by angle
    {
        float rotated[2] = {
            controlVector[0] * cos_lookup_deg(angle) - controlVector[1] * sin_lookup_deg(angle),
            controlVector[0] * sin_lookup_deg(angle) + controlVector[1] * cos_lookup_deg(angle)
        };
        controlVector[0] = rotated[0];
        controlVector[1] = rotated[1];
    }
}
示例#26
0
static int CG_CalcFov( void )
{
	float     y;
	float     phase;
	float     v;
	int       contents;
	float     fov_x, fov_y;
	float     zoomFov;
	float     f;
	int       inwater;
	int       attribFov;
	usercmd_t cmd;
	usercmd_t oldcmd;
	int       cmdNum;

	cmdNum = trap_GetCurrentCmdNumber();
	trap_GetUserCmd( cmdNum, &cmd );
	trap_GetUserCmd( cmdNum - 1, &oldcmd );

	// switch follow modes if necessary: cycle between free -> follow -> third-person follow
	if ( usercmdButtonPressed( cmd.buttons, BUTTON_USE_HOLDABLE ) && !usercmdButtonPressed( oldcmd.buttons, BUTTON_USE_HOLDABLE ) )
	{
		if ( cg.snap->ps.pm_flags & PMF_FOLLOW )
		{
			if ( !cg.chaseFollow )
			{
				cg.chaseFollow = qtrue;
			}
			else
			{
				cg.chaseFollow = qfalse;
				trap_SendClientCommand( "follow\n" );
			}
		}
		else if ( cg.snap->ps.persistant[ PERS_SPECSTATE ] != SPECTATOR_NOT )
		{
			trap_SendClientCommand( "follow\n" );
		}
	}

	if ( cg.predictedPlayerState.pm_type == PM_INTERMISSION ||
	     ( cg.snap->ps.persistant[ PERS_SPECSTATE ] != SPECTATOR_NOT ) ||
	     ( cg.renderingThirdPerson ) )
	{
		// if in intermission or third person, use a fixed value
		fov_y = BASE_FOV_Y;
	}
	else
	{
		// don't lock the fov globally - we need to be able to change it
		if ( ( attribFov = trap_Cvar_VariableIntegerValue( BG_Class( cg.predictedPlayerState.stats[ STAT_CLASS ] )->fovCvar ) ) )
		{
			if ( attribFov < 80 )
			{
				attribFov = 80;
			}
			else if ( attribFov >= 140 )
			{
				attribFov = 140;
			}
		}
		else
		{
			attribFov = BG_Class( cg.predictedPlayerState.stats[ STAT_CLASS ] )->fov;
		}
		attribFov *= 0.75;
		fov_y = attribFov;

		if ( fov_y < 1.0f )
		{
			fov_y = 1.0f;
		}
		else if ( fov_y > MAX_FOV_Y )
		{
			fov_y = MAX_FOV_Y;
		}

		if ( cg.spawnTime > ( cg.time - FOVWARPTIME ) &&
		     BG_ClassHasAbility( cg.predictedPlayerState.stats[ STAT_CLASS ], SCA_FOVWARPS ) )
		{
			float fraction = ( float )( cg.time - cg.spawnTime ) / FOVWARPTIME;

			fov_y = MAX_FOV_WARP_Y - ( ( MAX_FOV_WARP_Y - fov_y ) * fraction );
		}

		// account for zooms
		zoomFov = BG_Weapon( cg.predictedPlayerState.weapon )->zoomFov * 0.75f;

		if ( zoomFov < 1.0f )
		{
			zoomFov = 1.0f;
		}
		else if ( zoomFov > attribFov )
		{
			zoomFov = attribFov;
		}

		// only do all the zoom stuff if the client CAN zoom
		// FIXME: zoom control is currently hard coded to WBUTTON_ATTACK2
		if ( BG_Weapon( cg.predictedPlayerState.weapon )->canZoom )
		{
			if ( cg.zoomed )
			{
				f = ( cg.time - cg.zoomTime ) / ( float ) ZOOM_TIME;

				if ( f > 1.0f )
				{
					fov_y = zoomFov;
				}
				else
				{
					fov_y = fov_y + f * ( zoomFov - fov_y );
				}

				// WBUTTON_ATTACK2 isn't held so unzoom next time
				if ( !usercmdButtonPressed( cmd.buttons, BUTTON_ATTACK2 ) || cg.snap->ps.weaponstate == WEAPON_RELOADING )
				{
					cg.zoomed = qfalse;
					cg.zoomTime = MIN( cg.time,
					                   cg.time + cg.time - cg.zoomTime - ZOOM_TIME );
				}
			}
			else
			{
				f = ( cg.time - cg.zoomTime ) / ( float ) ZOOM_TIME;

				if ( f < 1.0f )
				{
					fov_y = zoomFov + f * ( fov_y - zoomFov );
				}

				// WBUTTON_ATTACK2 is held so zoom next time
				if ( usercmdButtonPressed( cmd.buttons, BUTTON_ATTACK2 ) && cg.snap->ps.weaponstate != WEAPON_RELOADING )
				{
					cg.zoomed = qtrue;
					cg.zoomTime = MIN( cg.time,
					                   cg.time + cg.time - cg.zoomTime - ZOOM_TIME );
				}
			}
		}
		else if ( cg.zoomed )
		{
			cg.zoomed = qfalse;
		}
	}

	y = cg.refdef.height / tan( 0.5f * DEG2RAD( fov_y ) );
	fov_x = atan2( cg.refdef.width, y );
	fov_x = 2.0f * RAD2DEG( fov_x );

	// warp if underwater
	contents = CG_PointContents( cg.refdef.vieworg, -1 );

	if ( contents & ( CONTENTS_WATER | CONTENTS_SLIME | CONTENTS_LAVA ) )
	{
		phase = cg.time / 1000.0f * WAVE_FREQUENCY * M_PI * 2.0f;
		v = WAVE_AMPLITUDE * sin( phase );
		fov_x += v;
		fov_y -= v;
		inwater = qtrue;
	}
	else
	{
		inwater = qfalse;
	}

	// set it
	cg.refdef.fov_x = fov_x;
	cg.refdef.fov_y = fov_y;

	if ( !cg.zoomed )
	{
		cg.zoomSensitivity = 1.0f;
	}
	else
	{
		cg.zoomSensitivity = cg.refdef.fov_y / 75.0f;
	}

	return inwater;
}
示例#27
0
//angle in degrees
float FVector::angle(FVector p) {
  float ang = RAD2DEG(acos(((*this)*p)/(this->magnitude()*p.magnitude())));
  //if((p-(*this)).x>=0)ang+=90.0f;
  return ang;
}
示例#28
0
void CPlayerRotation::ProcessFlyingZeroG()
{
	bool bEnableGyroVerticalFade = (g_pGameCVars->pl_zeroGEnableGyroFade > 0);
	bool bEnableGyroSpeedFade = (g_pGameCVars->pl_zeroGEnableGyroFade < 2);

	//thats necessary when passing from groundG to normalG
	m_baseQuat = m_viewQuat;
	assert(m_baseQuat.IsValid());
	//m_baseQuat = Quat::CreateSlerp(m_viewQuat,m_player.GetEntity()->GetRotation() * Quat::CreateRotationZ(gf_PI),0.5f);

	Ang3 desiredAngVel(m_deltaAngles.x,m_deltaAngles.y * 0.3f,m_deltaAngles.z);

	//view recoil in zeroG cause the player to rotate
	desiredAngVel.x += m_viewAnglesOffset.x * 0.1f;
	desiredAngVel.z += m_viewAnglesOffset.z * 0.1f;

	//so once used reset it.
	m_viewAnglesOffset.Set(0,0,0);

	//gyroscope: the gyroscope just apply the right roll speed to compensate the rotation, that way effects like
	//propulsion particles and such can be done easily just by using the angularVel
	float rotInertia(g_pGameCVars->pl_zeroGAimResponsiveness);

	if(m_player.GravityBootsOn() && m_stats.gBootsSpotNormal.len2()>0.01f)
	{
		Vec3 vRef(m_baseQuat.GetInverted() * m_stats.gBootsSpotNormal);
		Ang3 alignAngle(0,0,0);
		alignAngle.y = cry_atan2f(vRef.x, vRef.z);
		alignAngle.x = cry_atan2f(vRef.y, vRef.z);

		desiredAngVel.y += alignAngle.y * 0.05f;
		desiredAngVel.x -= alignAngle.x * 0.05f;
	}

	if(m_actions & ACTION_GYROSCOPE && desiredAngVel.y==0)
	{
		// we want to fade out the gyroscopic effect
		Vec3 vRef(m_baseQuat.GetInverted() * m_stats.zeroGUp);
		Ang3 alignAngle(0,0,0);
		alignAngle.y = cry_atan2f(vRef.x,vRef.z);

		float gyroFade = 1.0f;

		if(bEnableGyroVerticalFade)
		{
			float gyroFadeAngleInner = g_pGameCVars->pl_zeroGGyroFadeAngleInner;
			float gyroFadeAngleOuter = g_pGameCVars->pl_zeroGGyroFadeAngleOuter;
			float gyroFadeAngleSpan = gyroFadeAngleOuter - gyroFadeAngleInner;
			float gyroFadeAngleSpanInv = 1.0f / gyroFadeAngleSpan;
			float viewVerticalAlignment = abs(m_viewQuat.GetFwdZ());
			float viewVerticalAngle = RAD2DEG(cry_asinf(viewVerticalAlignment));
			gyroFade = 1.0f - CLAMP((viewVerticalAngle - gyroFadeAngleInner) * gyroFadeAngleSpanInv, 0.0f, 1.0f);
			gyroFade = cry_powf(gyroFade, g_pGameCVars->pl_zeroGGyroFadeExp);
		}

		float speedFade = 1.0f;

		if(bEnableGyroSpeedFade)
		{
			float speed = m_player.GetLastRequestedVelocity().GetLength();
			speedFade = 1.0f - std::min(1.0f, speed / 5.0f);
		}

		desiredAngVel.y += alignAngle.y * speedFade * gyroFade * m_frameTime * g_pGameCVars->pl_zeroGGyroStrength;

		//rotInertia = 3.0f;
	}

	m_absRoll = fabs(desiredAngVel.y);

	Interpolate(m_angularVel,desiredAngVel,rotInertia,m_frameTime);
	Ang3 finalAngle(m_angularVel + m_angularImpulseDelta);

	m_baseQuat *= Quat::CreateRotationZ(finalAngle.z) * Quat::CreateRotationX(finalAngle.x) * Quat::CreateRotationY(finalAngle.y);
	m_baseQuat.NormalizeSafe();

	/*IEntity *pEnt = m_player.GetEntity();
	Vec3 offsetToCenter(Vec3(0,0,m_player.GetStanceInfo(m_player.GetStance())->heightCollider));
	Vec3 finalPos(pEnt->GetWorldTM() * offsetToCenter);
	Quat newBaseQuat(m_baseQuat * Quat::CreateRotationZ(finalAngle.z) * Quat::CreateRotationX(finalAngle.x) * Quat::CreateRotationY(finalAngle.y));
	Vec3 newPos(pEnt->GetWorldPos() + m_baseQuat * offsetToCenter);
	pEnt->SetPos(pEnt->GetWorldPos() + (finalPos - newPos),ENTITY_XFORM_USER);*/

	//CHECKQNAN_MAT33(m_baseMtx);

	m_viewQuat = m_baseQuat;
	m_viewRoll = 0;
	m_upVector = m_baseQuat.GetColumn2();
}
示例#29
0
EMovementTransitionState CMovementTransitions::Update(
	const uint8 allowedTransitionFlags,
	const Lineseg& safeLine,
	const CTimeValue runningDuration,
	const bool bHasLockedBodyTarget,
	const Vec3& playerPos,
	const SMovementTransitionsSample& oldSample,
	const SMovementTransitionsSample& newSample,
	const float entitySpeed2D,
	const float entitySpeed2DAvg,
	const SExactPositioningTarget*const pExactPositioningTarget,

	CMovementTransitionsController*const pController,
	CPlayer*const pPlayer,
	CMovementRequest*const pRequest,
	SActorFrameMovementParams*const pMoveParams,
	float*const pJukeTurnRateFraction,
	Vec3*const pBodyTarget,
	const char**const pBodyTargetType ) const
{
	if (!g_pGame->GetCVars()->g_movementTransitions_enable)
		return eMTS_None;

	if (!m_isDataValid)
		return eMTS_None;

	Vec3 targetBodyDirection = (*pBodyTarget - playerPos).GetNormalizedSafe(newSample.bodyDirection);

	const EStance upcomingStance = pController->GetUpcomingStance();
	const EStance stance = (upcomingStance == STANCE_NULL) ? pPlayer->GetStance() : upcomingStance;

	STransitionSelectionParams transParams(
		*this, *pPlayer, *pRequest, playerPos, oldSample, newSample, bHasLockedBodyTarget, targetBodyDirection, safeLine, runningDuration, allowedTransitionFlags, entitySpeed2D, entitySpeed2DAvg, pExactPositioningTarget, stance,
		pMoveParams);

	const STransition* pTransition = NULL;
	int index = -1;
	STransitionMatch bestMatch;
	EMovementTransitionState newState = eMTS_None;

	if (transParams.m_transitionType != eTT_None)
	{
		FindBestMatch(transParams, &pTransition, &index, &bestMatch);

		if (pTransition)
		{
			newState = pTransition->Update(*this, transParams, bestMatch, playerPos, oldSample.moveDirection, newSample.moveDirection, pJukeTurnRateFraction, pBodyTarget, pBodyTargetType, pPlayer, pController);
		}
	}

#ifndef _RELEASE
	{
		bool bSignaled = (newState == eMTS_Requesting_Succeeded);

		// Log
		if (g_pGame->GetCVars()->g_movementTransitions_log && pTransition && bSignaled)
		{
			CRY_ASSERT(index < (int)m_transitions.size());
			CryLog("Transition\tentity=%s\tindex=%i\t%s\t%s", 
				pPlayer->GetEntity()->GetName(), 
				index, 
				pTransition->GetDescription().c_str(),
				transParams.m_bPredicted?"Predicted":"Immediate");
		}

		// Debug
		if (g_pGame->GetCVars()->g_movementTransitions_debug)
		{
			if (MovementTransitionsDebug::s_debug_frame != gEnv->pRenderer->GetFrameID())
			{
				MovementTransitionsDebug::s_debug_frame = gEnv->pRenderer->GetFrameID();
				MovementTransitionsDebug::s_debug_y = 50;
			}

			float dist = transParams.m_transitionDistance;
			if (pRequest->HasMoveTarget())
			{
				dist = sqrtf(pRequest->GetMoveTarget().GetSquaredDistance2D(playerPos));
			}

			gEnv->pRenderer->Draw2dLabel( 8.f, (float)MovementTransitionsDebug::s_debug_y, 1.5f, bSignaled ? MovementTransitionsDebug::s_dbg_color_signaled : MovementTransitionsDebug::s_dbg_color_unsignaled, false, 
				"entity=%s\ttype=%s\tspeed=%s\tdist=%3.2f\tTAngle=%3.2f\tarrivalAngle=%3.2f\ttargTAngle=%3.2f\tjukeAngle=%3.2f\tflags=%d",
					pPlayer->GetEntity()->GetName(),
					MovementTransitionsDebug::GetTransitionTypeName(transParams.m_transitionType),
					MovementTransitionsDebug::GetPseudoSpeedName(transParams.m_pseudoSpeed),
					dist,
					RAD2DEG(transParams.m_travelAngle),
					RAD2DEG(transParams.m_arrivalAngle),
					RAD2DEG(transParams.m_targetTravelAngle),
					RAD2DEG(transParams.m_jukeAngle),
					(const unsigned int)allowedTransitionFlags
				);
			MovementTransitionsDebug::s_debug_y += 14;

			if (pTransition)
			{
				gEnv->pRenderer->Draw2dLabel( 8.f, (float)MovementTransitionsDebug::s_debug_y, 1.5f, bSignaled ? MovementTransitionsDebug::s_dbg_color_signaled : MovementTransitionsDebug::s_dbg_color_unsignaled, false, 
					"Transition\tindex=%i\t%s\tangleDiff=%3.2f", index, pTransition->GetDescription().c_str(), RAD2DEG(bestMatch.angleDifference) );
			}
			MovementTransitionsDebug::s_debug_y += 14;
		}
	}
#endif

	return newState;
}
示例#30
0
/**
 * Get the horizontal field of view [degree].
 *
 * @return		the horizontal field of view [degree]
 */
float PPC::GetHFOV() const {
	return RAD2DEG(2.0f * atan2f((float)w / 2.0f * a.Length(), GetFocalLength()));
}