Esempio n. 1
0
void CRagDollInPlaneConstraint::Apply(uint32 nPosIndex)
{
	//find the plane pivot point (the connection between both points forming the first plane)
	LTVector& vPivot = m_pPlaneCenter->m_vPosition[nPosIndex];

	//find the vector that goes from the pivot point to the hinge point
	LTVector vToHinge = m_pHinge->m_vPosition[nPosIndex] - vPivot;
	LTVector vToOther = m_pPlaneOther->m_vPosition[nPosIndex] - vPivot;

	//find the plane normal
	LTVector vPlaneNormal = vToOther.Cross(vToHinge);
	vPlaneNormal.Normalize();

	vPlaneNormal = vToHinge.Cross(vPlaneNormal) * m_fNormalScale;
	vPlaneNormal.Normalize();

	//move the point into the plane
	LTVector& vConstrain = m_pConstrain->m_vPosition[nPosIndex];

	float fDot = vPlaneNormal.Dot(vConstrain - vPivot);

	if(fDot < -m_fTolerance)
	{
		fDot += m_fTolerance;
		vConstrain -= vPlaneNormal * fDot;
	}
	else if(fDot > m_fTolerance)
	{
		fDot -= m_fTolerance;
		vConstrain -= vPlaneNormal * fDot;
	}

	//success
}
Esempio n. 2
0
bool FindNearestPointOnLine( const LTVector& l0, const LTVector& l1, const LTVector& vPos, LTVector* pvPosNearest )
{
	// Sanity check.

	if( !pvPosNearest )
	{
		return false;
	}

	// Find the line's normal.

	LTVector vUp( 0.f, 1.f, 0.f );
	LTVector vDir = l1 - l0;
	vDir.Normalize();

	LTVector vNormal = vDir.Cross( vUp );
	vNormal.Normalize();

	// Find the nearest intersection point between the point and the line.

	LTVector vRay0 = vPos + ( vNormal * 100000.f );
	LTVector vRay1 = vPos - ( vNormal * 100000.f );

	return ( kRayIntersect_Failure != RayIntersectLineSegment( l0, l1, vRay0, vRay1, true, pvPosNearest ) );
}
Esempio n. 3
0
bool AIUtil_PositionShootable(CAI* pAI, const LTVector& vTargetOrigin)
{
	if (NULL == pAI 
		|| NULL == pAI->GetAIWeaponMgr()
		|| NULL == pAI->GetAIWeaponMgr()->GetCurrentWeapon())
	{
		return false;
	}

	LTVector vAIWeaponPosition = pAI->GetWeaponPosition(pAI->GetAIWeaponMgr()->GetCurrentWeapon(), false);

	// Bail if the AIs target is in the same position as the weapon.

	if (vTargetOrigin == vAIWeaponPosition)
	{
		return false;
	}

	// Bail if the combat opportunity is too far above or below the AI
	// (must be within the aiming range). 
	//
	// TODO: Determine what a good FOV without hardcoding this value. The
	// selected FOV fixed out cases, but this is animation driven, there
	// is no guarantee this is the ideal value.

	LTVector vToTargetUnit3D = (vTargetOrigin - vAIWeaponPosition).GetUnit();
	vToTargetUnit3D.y = fabs(vToTargetUnit3D.y);
	float fDotUp = LTVector(0.0f, 1.0f, 0.0f).Dot( vToTargetUnit3D );
	if (fDotUp >= c_fFOV60)
	{
		return false;
	}

	// Bail if the AI has to turn his back on his enemy/target to fire at this
	// position.

	LTVector vDirCombatOp = vTargetOrigin - vAIWeaponPosition;
	vDirCombatOp.y = 0.f;
	vDirCombatOp.Normalize();

	LTVector vDirTargetChar = pAI->GetAIBlackBoard()->GetBBTargetPosition() - vAIWeaponPosition;
	vDirTargetChar.y = 0.f;
	vDirTargetChar.Normalize();

	float fHorizontalDp = vDirCombatOp.Dot( vDirTargetChar );
	if( fHorizontalDp <= c_fFOV140 )
	{
		return false;
	}

	// Position is shootable.

	return true;
}
Esempio n. 4
0
void DebugLineSystem::AddArrow( const LTVector & vStart, const LTVector & vEnd,
								const DebugLine::Color & color /* = Color::White */,
								uint8 nAlpha /* = 255 */ )
{
	const float fHeadSize = 4.0f;
	LTVector vStartToEnd = vEnd - vStart;
	float fLen = vStartToEnd.Mag();
	if( vStartToEnd != LTVector::GetIdentity() )
	{
		vStartToEnd.Normalize();
	}

	AddLine( vStart, vEnd, color, nAlpha);

	LTVector vArrow = vStart + ( ( fLen * 0.9f ) * vStartToEnd );

	LTVector vUp( 0.f, 1.f, 0.f );
	LTVector vNorm;
	if( vStartToEnd != vUp )
	{
		vNorm = vStartToEnd.Cross( vUp );
	}
	else {
		vNorm = LTVector( 1.f, 0.f, 0.f );
	}

	vNorm *= ( fHeadSize/2.0f );
	AddLine( vArrow - vNorm, vArrow + vNorm, color, nAlpha);

	AddLine( vArrow + vNorm, vEnd, color, nAlpha);
	AddLine( vArrow - vNorm, vEnd, color, nAlpha);
}
Esempio n. 5
0
void GetContouringInfo( LTVector &vForward, LTVector &vNormal, 
					   float &fOutAmount, float &fOutPitchPercent, float &fOutRollPercent )
{
	LTVector	vPlaneF = (vNormal.y >= 1.0f) ? vForward : vNormal;
	
	vPlaneF.y = 0.0f;
	vPlaneF.Normalize();

	LTRotation	rPlaneRot( vPlaneF, LTVector(0, 1, 0));
	LTVector	vPlaneR = rPlaneRot.Right();
	
	// Calculate how much pitch and roll we should apply...

	fOutPitchPercent	= vForward.Dot( vPlaneF );
	fOutRollPercent		= vForward.Dot( vPlaneR );

	// Figure out the length of the foward vector projected on the xz plane.  This
	// is needed because Euler angles are calculated cummulatively, not just based
	// on the global coordinate axis.

	float fXZLen = (float)sqrt( 1.0f - vNormal.y * vNormal.y );

	// Subtract the pitch from 90 degrees cause we want to be parallel to the plane

	fOutAmount = MATH_HALFPI - (float)atan2( vNormal.y, fXZLen );
}
Esempio n. 6
0
LTVector CAIWeaponMelee::GetFirePosition(CAI* pAI)
{
	// Force fire position to come from the edge of the target's radius.
	// This should ensure a successful melee hit.

	if( pAI && m_bForceHit )
	{
		HOBJECT hTarget = pAI->GetAIBlackBoard()->GetBBTargetObject();
		if( IsCharacter( hTarget ) )
		{
			LTVector vTargetPos;
			g_pLTServer->GetObjectPos( hTarget, &vTargetPos );

			LTVector vDir = vTargetPos - pAI->GetPosition();
			vDir.Normalize();

			CCharacter* pChar = (CCharacter*)g_pLTServer->HandleToObject( hTarget );
			LTVector vFirePos = vTargetPos - ( vDir * pChar->GetRadius() );

			return vFirePos;
		}
	}

	// Default behavior.

	return DefaultGetFirePosition(pAI);
}
Esempio n. 7
0
//given a capsule specified with a transform, two points, and a radius, this will approximate how much is submerged
//and distribute that force to the appropriate points on the capsule
static bool ApplyCapsuleBuoyancy(const LTVector& vPt1, const LTVector& vPt2, float fLength, float fRadius, 
								 const LTPlane& WSPlane, float& fVolume, LTVector& vApplyAt,
								 float& fSurfaceArea)
{
	//convert the capsule to an OBB and apply it
	
	//determine information about the main axis
	LTVector vMainAxis = vPt2 - vPt1;
	LTASSERT( fLength > 0.0f, "Invalid capsule length." );
	LTVector vUnitAxis = vMainAxis / fLength;

	//we can now build up a rotation given the plane normal and the axis to build our transform
	LTVector vUp = WSPlane.Normal();
	if(fabsf(vUp.Dot(vUnitAxis)) > 0.99f)
	{
		//too close to use, built an arbitrary orthonormal
		vUp = vUnitAxis.BuildOrthonormal();
	}

	LTMatrix3x4 mTemp;
	LTVector vRight = vUnitAxis.Cross(vUp);
	vRight.Normalize( );
	LTVector vTrueUp = vRight.Cross( vUnitAxis );
	mTemp.SetBasisVectors(vRight, vTrueUp, vUnitAxis);

	LTRotation rRot;
	rRot.ConvertFromMatrix(mTemp);

	//now we can form our transform
	LTRigidTransform tTransform((vPt1 + vPt2) * 0.5f, rRot);
	LTVector vHalfDims(fRadius, fRadius, fLength * 0.5f + fRadius);

	return ApplyOBBBuoyancy(tTransform, vHalfDims, WSPlane, fVolume, vApplyAt, fSurfaceArea);
}
Esempio n. 8
0
bool CAIActionAttackTurret::TargetInFOV( CAI* pAI )
{
	// Sanity check.

	if( !pAI )
	{
		return false;
	}

	// No ranged weapon!

	CWeapon* pWeapon = pAI->GetAIWeaponMgr()->GetWeaponOfType( kAIWeaponType_Ranged );
	if( !pWeapon )
	{
		return false;
	}

	// Compare the weapon's forward to the target direction.

	LTVector vWeaponForward = pAI->GetWeaponForward( pWeapon );
	LTVector vDirToTarget = pAI->GetAIBlackBoard()->GetBBTargetPosition() - pAI->GetPosition();
	vDirToTarget.Normalize();

	// Target is outside of the weapon's FOV.

	float fFOV20 = cos( DEG2RAD( 10.f ) );
	if( vDirToTarget.Dot( vWeaponForward ) <= fFOV20 )
	{
		return false;
	}

	// Target is inside of the weapon's FOV.

	return true;
}
Esempio n. 9
0
bool CAutoTargetMgr::IsPointInCone(const LTVector &vTargetPos)
{
	// convert angle to radians
	float radAngle = (float)(m_fAngle * MATH_PI / 180); //angle;
	
	// divide by 2 because we're taking the half angle left or right of the forward vector
	float cosOfAngle = (float)cos(radAngle/2); //angle / 2.0f;
	
	float fOffset = GetConsoleFloat("AutoTargetOffset",300.0f);

	//check range
	float fDist = m_vFirePos.DistSqr(vTargetPos);
	if  (fDist > m_fRangeSqr || fDist < kfMinRangeSqr)
		return false;


	//make sure it's on screen too
	LTVector vecD = vTargetPos - m_vFirePos;
	vecD.Normalize();
	float MaxScreenAngle = (float)cos( DEG2RAD(g_vtFOVYNormal.GetFloat()) / 2.0);
	if (g_pInterfaceResMgr->IsWidescreen())
	{
		MaxScreenAngle = (float)cos( DEG2RAD(g_vtFOVYWide.GetFloat()) / 2.0);
	}

	if (m_vForward.Dot(vecD) < MaxScreenAngle)
	{
		return false;
	}



	LTVector vNewOrigin = m_vFirePos + (m_vForward * -fOffset);

	LTVector vecDiff = vTargetPos - vNewOrigin;
	vecDiff.Normalize();

	//check the angle
	if (m_vForward.Dot(vecDiff) >= cosOfAngle)
	{
		return true;
	}

	return false;
}
void CAIHumanStateAttackMove::SelectMoveAnim()
{
	// Only turn around once.
	// This prevents the AI from flipping out when the players runs around him.

	if( m_bTurnedAround )
	{
		return;
	}

	LTVector vDestDir = m_vAttackMoveDest - GetAI()->GetPosition();
	vDestDir.y = 0.f;
	vDestDir.Normalize();
	LTVector vTargetDir = GetAI()->GetTarget()->GetVisiblePosition() - GetAI()->GetPosition();
	vTargetDir.y = 0.f;
	vTargetDir.Normalize();

	// Should the AI turn around and face forward?

	if( m_pStrategyFollowPath->GetMovement() == kAP_BackUp )
	{
		LTFLOAT fDot = vDestDir.Dot( vTargetDir );	
		if( fDot > c_fFOV160 )
		{
			GetAI()->FaceTargetRotImmediately();
			m_pStrategyFollowPath->SetMovement( kAP_Run );
			m_bTurnedAround = LTTRUE;
		}
	}

	// Should the AI turn around and face backward?

	else {
		vDestDir = -vDestDir;
		LTFLOAT fDot = vDestDir.Dot( vTargetDir );	
		if( fDot > c_fFOV160 )
		{
			GetAI()->FaceTargetRotImmediately();
			m_pStrategyFollowPath->SetMovement( kAP_BackUp );
			m_bTurnedAround = LTTRUE;
		}
	}
}
Esempio n. 11
0
void CRagDollAbovePlaneOnEdgeConstraint::Apply(uint32 nPosIndex)
{
	const LTVector& vPtInPlane = m_pPt1->m_vPosition[nPosIndex];

	LTVector vEdge = m_pPt2->m_vPosition[nPosIndex] - vPtInPlane;

	//we first off need to caclulate the normal of the plane
	LTVector vNormal = vEdge.Cross(m_pPt3->m_vPosition[nPosIndex] - vPtInPlane);
	vNormal.Normalize();

	//now we need to take that plane, and find the perpindicular plane that passes through the first edge
	LTVector vEdgeNormal = vNormal.Cross(vEdge) * m_fNormalScale;
	vEdgeNormal.Normalize();

	//ok, now we see if the point is above the plane
	LTVector& vConstrain = m_pConstrain->m_vPosition[nPosIndex];
	float fDot = vEdgeNormal.Dot(vConstrain - vPtInPlane);

	if(fDot < 0.0f)
	{
		vConstrain -= fDot * vEdgeNormal;
	}
}
Esempio n. 12
0
void CRagDollAbovePlane3Constraint::Apply(uint32 nPosIndex)
{
	const LTVector& vPtInPlane = m_pPt1->m_vPosition[nPosIndex];

	//we first off need to caclulate the normal of the plane
	LTVector vNormal = (m_pPt2->m_vPosition[nPosIndex] - vPtInPlane).Cross(m_pPt3->m_vPosition[nPosIndex] - vPtInPlane) * m_fNormalScale;
	vNormal.Normalize();

	//ok, now we see if the point is above the plane
	LTVector& vConstrain = m_pConstrain->m_vPosition[nPosIndex];
	float fDot = vNormal.Dot(vConstrain - vPtInPlane) - m_fOffset;

	if(fDot < 0.0f)
	{
		vConstrain -= fDot * vNormal;
	}
}
Esempio n. 13
0
bool SAINAVMESHGEN_PLANE::RayIntersectNMGPlane( const LTVector& vRay0, const LTVector& vRay1, LTVector* pvIntersect )
{
	LTVector vNMGPlaneNormal;
	CAINavMeshGen::GetAINavMeshGen()->GetActualNormalFromPool( eNMGPlaneNormal, &vNMGPlaneNormal );

	LTVector vRayDir = vRay1 - vRay0;
	vRayDir.Normalize();

	// Determine if ray endpoints lie on opposite side of the plane.

	float fSign1 = ( ( vNMGPlaneNormal.x * vRay0.x ) +
					 ( vNMGPlaneNormal.y * vRay0.y ) +
				     ( vNMGPlaneNormal.z * vRay0.z ) + D );

	float fSign2 = ( ( vNMGPlaneNormal.x * vRay1.x ) +
				     ( vNMGPlaneNormal.y * vRay1.y ) +
				     ( vNMGPlaneNormal.z * vRay1.z ) + D );

	if( fSign1 * fSign2 >= 0.f )
	{
		return false;
	}

	// Determine if ray runs parallel to the plane.


	float fDenom = ( ( vNMGPlaneNormal.x * vRayDir.x ) + 
				     ( vNMGPlaneNormal.y * vRayDir.y ) + 
			  	     ( vNMGPlaneNormal.z * vRayDir.z ) );

	if( fDenom == 0.f )
	{
		return false;
	}

	// Calculate the point of intersection between the ray and the plane.

	float fNum = -( ( vNMGPlaneNormal.x * vRay0.x ) +
				    ( vNMGPlaneNormal.y * vRay0.y ) +
				    ( vNMGPlaneNormal.z * vRay0.z ) + D );

	float d = fNum / fDenom;
	*pvIntersect = ( vRay0 + ( vRayDir * d ) );

	return true;
}
Esempio n. 14
0
static void GeneratePolyGridFresnelAlphaAndCamera(const LTVector& vViewPos, CPolyGridBumpVertex* pVerts, LTPolyGrid* pGrid, uint32 nNumVerts)
{
	//we need to transform the camera position into our view space
	LTMatrix mInvWorldTrans;

	mInvWorldTrans.Identity();
	mInvWorldTrans.SetTranslation(-pGrid->GetPos());

	LTMatrix mOrientation;
	pGrid->m_Rotation.ConvertToMatrix(mOrientation);

	mInvWorldTrans = mOrientation * mInvWorldTrans;

	LTVector vCameraPos = mInvWorldTrans * vViewPos;

	//now generate the internals of the polygrid
	CPolyGridBumpVertex* pCurrVert	= pVerts;
	CPolyGridBumpVertex* pEnd		= pCurrVert + nNumVerts;

	//determine the fresnel table that we are going to be using
	const CFresnelTable* pTable = g_FresnelCache.GetTable(LTMAX(1.0003f, pGrid->m_fFresnelVolumeIOR), pGrid->m_fBaseReflection);

	//use a vector from the camera to the center of the grid to base our approximations off of. The further
	//we get to the edges the more likely this error will be, but it is better than another sqrt per vert
	LTVector vToPGPt;

	while(pCurrVert < pEnd)
	{
		//the correct but slow way, so only do it every once in a while
		//if((pCurrVert - g_TriVertList) % 4 == 0)
		{
			vToPGPt = vCameraPos - pCurrVert->m_Vec;
			vToPGPt.Normalize();
		}

		pCurrVert->m_fEyeX = vToPGPt.x;
		pCurrVert->m_fEyeY = vToPGPt.y;
		pCurrVert->m_fEyeZ = vToPGPt.z;

		pCurrVert->m_nColor |= pTable->GetValue(vToPGPt.Dot(pCurrVert->m_vBasisUp));
		++pCurrVert;
	}
}
Esempio n. 15
0
bool AINodeValidatorAIOutsideFOV::Evaluate( uint32 dwFilteredStatusFlags, const LTVector& vAIPos, const LTVector& vNodePos, const LTVector& vNodeForward, bool bIgnoreDir ) const
{
	if( dwFilteredStatusFlags & kNodeStatus_AIOutsideFOV )
	{
		if( !bIgnoreDir )
		{
			LTVector vAIDir = vAIPos - vNodePos;
			vAIDir.y = 0.f;
			vAIDir.Normalize();

			if ( vAIDir.Dot(vNodeForward) <= m_fFovDp )
			{
				return false;
			}
		}
	}

	return true;
}
//----------------------------------------------------------------------------
//              
//	ROUTINE:	CAIHumanStrategyShootStream::UpdateAiming()
//              
//	PURPOSE:	
//              
//----------------------------------------------------------------------------
/*virtual*/ void CAIHumanStrategyShootStream::UpdateAiming(HOBJECT hTarget)
{
	if ( m_flStreamTime < g_pLTServer->GetTime() )
	{
		// Don't calculate new stream time until finished firing animation.
		if( !GetAnimationContext()->IsLocked() )
		{
			CalculateStreamTime();
		}

		Aim();
	}
	else
	{
		// We're done waiting, fire if we're at a reasonable angle

		if ( m_bIgnoreFOV )
		{
			Fire();
		}
		else
		{
			LTVector vTargetPos;
			g_pLTServer->GetObjectPos(hTarget, &vTargetPos);

			LTVector vDir = vTargetPos - GetAI()->GetPosition();
			vDir.y = 0.0f;
			vDir.Normalize();

			if ( vDir.Dot(GetAI()->GetTorsoForward()) < 0.70f )
			{
				Aim();
			}
			else
			{
				Fire();
			}
		}
	}
}
Esempio n. 17
0
// ----------------------------------------------------------------------- //
//
//	ROUTINE:	CAutoTargetMgr::InterpolateAim()
//
//	PURPOSE:	Rotate our current aim vector to match our target vector
//
// ----------------------------------------------------------------------- //
void CAutoTargetMgr::InterpolateAim()
{
	//we are currently aimed at m_vCurTarget
	//we want to aim at m_vTarget
	float fActualToTargetAng = m_vTarget.Dot(m_vCurTarget);

	//get the maximum angle we can move
	float fMaxAngVel = ObjectContextTimer( g_pMoveMgr->GetServerObject( )).GetTimerElapsedS( );
	if (IsMultiplayerGameClient())
	{
		fMaxAngVel *= ClientDB::Instance( ).GetMPAutoTargetSpeed();
	}
	else
	{
		fMaxAngVel *= ClientDB::Instance( ).GetAutoTargetSpeed();
	}
	float fCosMaxAngVel = (float)cos(fMaxAngVel);

	//if we are at 180 degrees difference, much can go wrong, so ensure that we aren't,
	//but if we are, we just want to keep looking in the direction that we currently
	//are
	if(fActualToTargetAng >= fCosMaxAngVel)
	{
		//the look target is within our reach, so just go there
		m_vCurTarget = m_vTarget;
	}
	else
	{
		//form a right vector that passes through the arc that we are interpolating
		//upon 
		LTVector vRight = m_vCurTarget - (m_vTarget - m_vCurTarget) / (fActualToTargetAng - 1.0f);
		vRight.Normalize();

		//now we can get our values based upon that space
		m_vCurTarget =  fCosMaxAngVel * m_vCurTarget + (float)sin(fMaxAngVel) * vRight;
		m_vCurTarget.Normalize();
		
	}	

}
Esempio n. 18
0
static void CreateServerExitMark(const CLIENTWEAPONFX & theStruct)
{
	SURFACE* pSurf = g_pSurfaceMgr->GetSurface((SurfaceType)theStruct.nSurfaceType);
	if (!pSurf || !pSurf->bCanShootThrough) return;

	int nMaxThickness = pSurf->nMaxShootThroughThickness;
	if (nMaxThickness < 1) return;

	// Determine if there is an "exit" surface...

	IntersectQuery qInfo;
	IntersectInfo iInfo;

    LTVector vDir = theStruct.vPos - theStruct.vFirePos;
	vDir.Normalize();

    qInfo.m_From = theStruct.vPos + (vDir * (LTFLOAT)(nMaxThickness + 1));
	qInfo.m_To   = theStruct.vPos;

	qInfo.m_Flags = INTERSECT_OBJECTS | IGNORE_NONSOLID | INTERSECT_HPOLY;

	SurfaceType eType = ST_UNKNOWN;

    if (g_pLTServer->IntersectSegment(&qInfo, &iInfo))
	{
		eType = GetSurfaceType(iInfo);
		if (ShowsMark(eType))
		{
            LTRotation rNormRot(iInfo.m_Plane.m_Normal, LTVector(0.0f, 1.0f, 0.0f));

			CLIENTWEAPONFX exitStruct = theStruct;
			exitStruct.vPos = iInfo.m_Point + vDir;
			exitStruct.vSurfaceNormal = iInfo.m_Plane.m_Normal;

            CreateServerMark(exitStruct);
		}
	}
}
Esempio n. 19
0
//function that handles the custom rendering
void CTracerFX::RenderTracer(ILTCustomRenderCallback* pInterface, const LTRigidTransform& tCamera)
{
	//track our performance
	CTimedSystemBlock TimingBlock(g_tsClientFXTracer);

	//first determine the length, position, and U range for this tracer (this allows for some
	//early outs)
	float fTracerLen	= GetTracerLength();
	float fTracerStart	= m_fRayPosition;
	float fTracerEnd	= fTracerStart - fTracerLen;

	if((fTracerStart <= 0.0f) || (fTracerEnd >= m_fRayLength))
	{
		//the tracer has fully gone through the ray, don't render
		return;
	}

	//now we need to clip the extents to the range [0..ray length], and handle cropping of the texture
	float fUMin = 0.0f;
	float fUMax = 1.0f;

	if(fTracerEnd < 0.0f)
	{
		//adjust the U max if we are cropping
		if(GetProps()->m_bCropTexture)
		{
			fUMax += fTracerEnd / fTracerLen;
		}
		fTracerEnd = 0.0f;
	}
	if(fTracerStart >= m_fRayLength)
	{
		//adjust the U min if we are cropping
		if(GetProps()->m_bCropTexture)
		{
			fUMin += (fTracerStart - m_fRayLength) / fTracerLen;
		}
		fTracerStart = m_fRayLength;
	}

	//setup our vertex declaration
	if(pInterface->SetVertexDeclaration(g_ClientFXVertexDecl.GetTexTangentSpaceDecl()) != LT_OK)
		return;

	//bind a quad index stream
	if(pInterface->BindQuadIndexStream() != LT_OK)
		return;

	//sanity check to ensure that we can at least render a sprite
	LTASSERT(QUAD_RENDER_INDEX_STREAM_SIZE >= 6, "Error: Quad index list is too small to render a tracer");
	LTASSERT(DYNAMIC_RENDER_VERTEX_STREAM_SIZE / sizeof(STexTangentSpaceVert) > 4, "Error: Dynamic vertex buffer size is too small to render a tracer");

	//we need to determine the facing of this tracer. This is formed by creating a plane from the points
	//Camera, Start, and another point on the ray. The plane normal is then the up, the right is the ray
	//direction, and the normal is ray cross plane normal. 
	LTVector vStartToCamera = m_vStartPos - tCamera.m_vPos;
	float fMag = vStartToCamera.Mag( );
	if( fMag < 0.00001f )
	{
		vStartToCamera = tCamera.m_rRot.Forward();
	}
	else
	{
		vStartToCamera /= fMag;
	}

	//determine the up vector
	LTVector vUp = vStartToCamera.Cross(m_vDirection);
	vUp.Normalize();

	//now determine the actual normal (doesn't need to be normalized since the vectors are
	//unit length and orthogonal)
	LTVector vNormal = vUp.Cross(m_vDirection);

	//lock down our buffer for rendering
	SDynamicVertexBufferLockRequest LockRequest;
	if(pInterface->LockDynamicVertexBuffer(4, LockRequest) != LT_OK)
		return;

	//fill in our sprite vertices
	STexTangentSpaceVert* pCurrOut = (STexTangentSpaceVert*)LockRequest.m_pData;

	//determine the color of this tracer
	float fUnitLifetime = GetUnitLifetime();
	uint32 nColor = CFxProp_Color4f::ToColor(GetProps()->m_cfcColor.GetValue(fUnitLifetime));

	//calculate the front of the tracer in world space
	LTVector vFront = m_vStartPos + m_vDirection * fTracerStart;
	LTVector vBack	= m_vStartPos + m_vDirection * fTracerEnd;

	//and the thickness vector
	float fThickness = GetProps()->m_ffcThickness.GetValue(fUnitLifetime);
	LTVector vThickness = vUp * (fThickness * 0.5f);

	//fill in the particle vertices
	pCurrOut[0].m_vPos = vFront + vThickness;
	pCurrOut[0].m_vUV.Init(fUMin, 0.0f);
	
	pCurrOut[1].m_vPos = vBack + vThickness;
	pCurrOut[1].m_vUV.Init(fUMax, 0.0f);
	
	pCurrOut[2].m_vPos = vBack - vThickness;
	pCurrOut[2].m_vUV.Init(fUMax, 1.0f);

	pCurrOut[3].m_vPos = vFront - vThickness;
	pCurrOut[3].m_vUV.Init(fUMin, 1.0f);

	//setup the remaining vertex components
	for(uint32 nCurrVert = 0; nCurrVert < 4; nCurrVert++)
	{
		pCurrOut[nCurrVert].m_nPackedColor	= nColor;
		pCurrOut[nCurrVert].m_vNormal		= vNormal;
		pCurrOut[nCurrVert].m_vTangent		= vUp;
		pCurrOut[nCurrVert].m_vBinormal		= m_vDirection;
	}

	//unlock and render the batch
	pInterface->UnlockAndBindDynamicVertexBuffer(LockRequest);
	pInterface->RenderIndexed(	eCustomRenderPrimType_TriangleList, 
								0, 6, LockRequest.m_nStartIndex, 
								0, 4);
}
Esempio n. 20
0
bool RayIntersectBox(const LTVector& vBoxMin,
					  const LTVector& vBoxMax,
					  const LTVector& vOrigin,
					  const LTVector& vDest,
					  LTVector* pvIntersection)
{
	const uint8 kNumDimensions = 3;

	// Calculate direction of ray.

	LTVector vDir = vDest - vOrigin;
	vDir.Normalize();

	// Algorithm taken from Graphics Gems p.736.

	enum EnumQuadrant
	{
		kQuad_Right,
		kQuad_Left,
		kQuad_Middle,
	};

	bool bInside = true;
	EnumQuadrant eQuad[kNumDimensions];
	float fCandidatePlane[kNumDimensions];

	// Find candidate planes.
	uint32 iDim;
	for( iDim=0; iDim < kNumDimensions; ++iDim )
	{
		if( vOrigin[iDim] < vBoxMin[iDim] )
		{
			if( vDest[iDim] < vBoxMin[iDim] )
			{
				return false;
			}

			eQuad[iDim] = kQuad_Left;
			fCandidatePlane[iDim] = vBoxMin[iDim];
			bInside = false;
		}
		else if( vOrigin[iDim] > vBoxMax[iDim] )
		{
			if( vDest[iDim] > vBoxMax[iDim] )
			{
				return false;
			}

			eQuad[iDim] = kQuad_Right;
			fCandidatePlane[iDim] = vBoxMax[iDim];
			bInside = false;
		}
		else {
			eQuad[iDim] = kQuad_Middle;
		}
	}

	// Ray origin is inside volume.

	if( bInside )
	{
		*pvIntersection = vOrigin;
		return true;
	}

	uint32 nWhichPlane = 0;
	float fMaxT[kNumDimensions];

	// Calculate T distances to candidate planes.

	for( iDim=0; iDim < kNumDimensions; ++iDim )
	{
		if( ( eQuad[iDim] != kQuad_Middle ) && ( vDir[iDim] != 0.f ) )
		{
			fMaxT[iDim] = ( fCandidatePlane[iDim] - vOrigin[iDim] ) / vDir[iDim];
		}
		else {
			fMaxT[iDim] = -1.f;
		}
	}

	// Get largest of the maxT's for final choice of intersection.

	for( iDim=1; iDim < kNumDimensions; ++iDim )
	{
		if( fMaxT[nWhichPlane] < fMaxT[iDim] )
		{
			nWhichPlane = iDim;
		}
	}

	// Check final candidate actually inside volume.

	if( fMaxT[nWhichPlane] < 0.f )
	{
		return false;
	}

	for( iDim=0; iDim < kNumDimensions; ++iDim )
	{
		if( nWhichPlane != iDim )
		{
			(*pvIntersection)[iDim] = vOrigin[iDim] + fMaxT[nWhichPlane] * vDir[iDim];
			if( ((*pvIntersection)[iDim] < vBoxMin[iDim]) ||
				((*pvIntersection)[iDim] > vBoxMax[iDim]) )
			{
				return false;
			}
		}
		else {
			(*pvIntersection)[iDim] = fCandidatePlane[iDim];
		}
	}

	return true;
}
Esempio n. 21
0
bool TrimLineSegmentByRadius( float fRadius, LTVector* pv0, LTVector* pv1, bool bTrimV0, bool bTrimV1 )
{
	// Sanity check.

	if( !( pv0 && pv1 ) )
	{
		return false;
	}

	// Do not trim anything.

	if( !( bTrimV0 || bTrimV1 ) )
	{
		return false;
	}

	// Line segment is smaller than the minimum radius or diameter.
	// Using the center or endpoint of the segment is the best we can do.

	LTVector vDir = *pv1 - *pv0;
	float fDiameter = fRadius * 2.f;
	float fDistSqr = pv0->DistSqr( *pv1 );

	if( !bTrimV0 )
	{
		if( fDistSqr <= fRadius * fRadius )
		{
			*pv1 = *pv0;
			return true;
		}
	}
	else if( !bTrimV1 )
	{
		if( fDistSqr <= fRadius * fRadius )
		{
			*pv0 = *pv1;
			return true;
		}
	}
	else if( fDistSqr <= fDiameter * fDiameter )
	{
		*pv0 = *pv0 + ( vDir * 0.5f );
		*pv1 = *pv0;
		return true;
	}

	// Trim the line segment.
	// Trimming by epsilon is a hack that keeps the rest of the AI systems running.
	// Otherwise, we end up with duplicate waypoints.

	vDir.Normalize();
	float fEpsilon = 0.01f;

	if( bTrimV0 )
	{
		*pv0 += vDir * fRadius;
	}
	else {
		*pv0 += vDir * fEpsilon;
	}

	if( bTrimV1 )
	{
		*pv1 -= vDir * fRadius;
	}
	else {
		*pv1 -= vDir * fEpsilon;
	}

	return true;
}
Esempio n. 22
0
LTVector GetContouringNormal( LTVector &vPos, LTVector &vDims, LTVector &vForward, LTVector &vRight, HOBJECT *pFilterList )
{
	LTVector	avPt[4];		// points we are casting the rays from
	LTVector	avInterPt[4];	// points of intersection
	LTVector	avNormal[4];	// normals constructed from the points of intersection
	LTVector	avEdge[4];

	// Develop the points we wish to cast rays from...
	// We keep the height from the object incase the vehicle has clipped into the world.

	avPt[0] = vPos + ( vForward ) - ( vRight );	// 0----1
	avPt[1] = vPos + ( vForward ) + ( vRight );	// |    |
	avPt[2] = vPos - ( vForward ) + ( vRight );	// |    |
	avPt[3] = vPos - ( vForward ) - ( vRight );	// 3----2

	// Find the point of intersection that is under the vehicle...
	// If none was found just use the point with the height factored in.
	
	if( !GetIntersectionUnderPoint( avPt[0], pFilterList, avNormal[0], avInterPt[0] ) )
	{
		avInterPt[0] = avPt[0];
		avInterPt[0].y -= vDims.y;
	}

	if( !GetIntersectionUnderPoint( avPt[1], pFilterList, avNormal[1], avInterPt[1] ) )
	{
		avInterPt[1] = avPt[1];
		avInterPt[1].y -= vDims.y;
	}

	if( !GetIntersectionUnderPoint( avPt[2], pFilterList, avNormal[2], avInterPt[2] ) )
	{
		avInterPt[2] = avPt[2];
		avInterPt[2].y -= vDims.y;
	}

	if( !GetIntersectionUnderPoint( avPt[3], pFilterList, avNormal[3], avInterPt[3] ) )
	{
		avInterPt[3] = avPt[3];
		avInterPt[3].y -= vDims.y;
	}

	// Move the points to the origin...

	avInterPt[0] -= vPos;
	avInterPt[1] -= vPos;
	avInterPt[2] -= vPos;
	avInterPt[3] -= vPos;

	// Develop the vectors that will construct the 4 planes...

	avEdge[0] = (avInterPt[1] - avInterPt[0]).GetUnit();
	avEdge[1] = (avInterPt[2] - avInterPt[1]).GetUnit();
	avEdge[2] = (avInterPt[3] - avInterPt[2]).GetUnit();
	avEdge[3] = (avInterPt[0] - avInterPt[3]).GetUnit();

	// Find the normals of the planes...

	avNormal[0] = -avEdge[3].Cross( avEdge[0] );
	avNormal[1] = -avEdge[0].Cross( avEdge[1] );
	avNormal[2] = -avEdge[1].Cross( avEdge[2] );
	avNormal[3] = -avEdge[2].Cross( avEdge[3] );

	avNormal[0].Normalize();
	avNormal[1].Normalize();
	avNormal[2].Normalize();
	avNormal[3].Normalize();

	// Average the normals...
	
	LTVector vNormal = avNormal[0] + avNormal[1] + avNormal[2] + avNormal[3];
	vNormal.Normalize();

	return vNormal;
}
Esempio n. 23
0
bool AINavMeshLinkPlayer::FindNearestNavMeshPos( CAI* pAI, const LTVector& vPos, LTVector* pvNavMeshPos, ENUM_NMPolyID* peNavMeshPoly )
{
	// Sanity check.

	if( !( pAI && pvNavMeshPos && peNavMeshPoly ) )
	{
		return false;
	}

	// Bail if Link's poly is invalid.

	CAINavMeshPoly* pPoly = g_pAINavMesh->GetNMPoly( m_eNMPolyID );
	if( !pPoly )
	{
		return false;
	}

	// Iterate over Link's edges, searching for edge nearest 
	// to the specified position.

	ENUM_NMPolyID eNeighborPoly;
	CAINavMeshEdge* pEdge;
	CAINavMeshEdge* pEdgeNearest = NULL;
	float fDistSqr;
	float fMinDistSqr = FLT_MAX;
	uint32 cEdges = pPoly->GetNumNMPolyEdges();
	for( uint32 iEdge=0; iEdge < cEdges; ++iEdge )
	{
		// Skip edge if doesn't exist.

		pEdge = pPoly->GetNMPolyEdge( iEdge );
		if( !pEdge )
		{
			continue;
		}

		// Skip edge if AI cannot pathfind to the neighboring poly.

		eNeighborPoly = ( pEdge->GetNMPolyIDA() != m_eNMPolyID ) ? pEdge->GetNMPolyIDA() : pEdge->GetNMPolyIDB();
		if( ( eNeighborPoly == kNMPoly_Invalid ) ||
			( !g_pAIPathMgrNavMesh->HasPath( pAI, pAI->GetCharTypeMask(), eNeighborPoly ) ) )
		{
			continue;
		}

		// Keep track of the edge nearest to the specified position.

		fDistSqr = vPos.DistSqr( pEdge->GetNMEdgeMidPt() );
		if( fDistSqr < fMinDistSqr )
		{
			fMinDistSqr = fDistSqr;
			pEdgeNearest = pEdge;
		}
	}

	// Bail if no valid edge was found.

	if( !pEdgeNearest )
	{
		return false;
	}

	// Return success.

	if( FindNearestPointOnLine( pEdgeNearest->GetNMEdge0(), pEdgeNearest->GetNMEdge1(), vPos, pvNavMeshPos ) )
	{
		// Push the point a small amount out of the link.
		
		LTVector vDir = *pvNavMeshPos - vPos;
		vDir.Normalize();
		*pvNavMeshPos += vDir * 1.f;

		*peNavMeshPoly = ( pEdgeNearest->GetNMPolyIDA() != m_eNMPolyID ) ? pEdgeNearest->GetNMPolyIDA() : pEdgeNearest->GetNMPolyIDB();
		return true;
	}

	// No intersection found.

	return false;
}
Esempio n. 24
0
inline bool i_BoundingBoxTest(const LTVector& Point1, const LTVector& Point2, const LTObject *pServerObj, 
    LTVector *pIntersectPt, LTPlane *pIntersectPlane)
{
    float t;
    float testCoords[2];
	const LTVector& min = pServerObj->GetBBoxMin();
	const LTVector& max = pServerObj->GetBBoxMax();

    // Left/Right.
    if (Point1.x < min.x) 
	{
        if (Point2.x < min.x) 
		{
            return false;
        }
    
        DO_PLANE_TEST_X(min.x, x, y, z, -1.0f);
    }
    else if (Point1.x > max.x) 
	{
        if (Point2.x > max.x) 
		{
            return false;
        }

        DO_PLANE_TEST_X(max.x, x, y, z, 1.0f);
    }

    // Top/Bottom.
    if (Point1.y < min.y) 
	{
        if (Point2.y < min.y) 
		{
            return false;
        }
    
        DO_PLANE_TEST_Y(min.y, y, x, z, -1.0f);
    }
    else if (Point1.y > max.y) 
	{
        if (Point2.y > max.y) 
		{
            return false;
        }

        DO_PLANE_TEST_Y(max.y, y, x, z, 1.0f);
    }

    // Front/Back.
    if (Point1.z < min.z) 
	{
        if (Point2.z < min.z) 
		{
            return false;
        }
    
        DO_PLANE_TEST_Z(min.z, z, x, y, -1.0f);
    }
    else if (Point1.z > max.z) 
	{
        if (Point2.z > max.z) 
		{
            return false;
        }

        DO_PLANE_TEST_Z(max.z, z, x, y, 1.0f);
    }


	// If we get here and our hackish backwards compatibility flag is set, we need to check
	// to see if Point1 is completely inside the dims.  The above checks don't catch this case...

	if (g_bCheckIfFromPointIsInsideObject)
	{
		if ( (min.x <= Point1.x && Point1.x <= max.x) &&
			 (min.y <= Point1.y && Point1.y <= max.y) &&
			 (min.z <= Point1.z && Point1.z <= max.z) )
		{
			LTVector vNormal = (Point1 - Point2);
			vNormal.Normalize();

			LTPlane pPlane;
			pPlane.Init(vNormal, Point1);

			*pIntersectPt = Point1;
			*pIntersectPlane = pPlane;

			return true;
		}
	}

    return false;
}
bool CAIActionAttackLungeUncloaked::ValidateContextPreconditions( CAI* pAI, CAIWorldState& wsWorldStateGoal, bool bIsPlanning )
{
	// AI doesn't have a target.

	if (!pAI->HasTarget( kTarget_Character ))
	{
		return false;
	}

	// Target is not visible.

	if( !pAI->GetAIBlackBoard()->GetBBTargetVisibleFromWeapon() )
	{
		return false;
	}

	// AI does not have a weapon of the correct type

	if (!AIWeaponUtils::HasWeaponType(pAI, GetWeaponType(), bIsPlanning))
	{
		return false;
	}

	// AI does not have any ammo for this weapon.

	if ( !AIWeaponUtils::HasAmmo( pAI, GetWeaponType(), bIsPlanning ) )
	{
		return false;
	}

	// Someone else is lunging.  Only one AI may lunge at a time.

	CAIWMFact factQuery;
	factQuery.SetFactType( kFact_Knowledge );
	factQuery.SetKnowledgeType( kKnowledge_Lunging );
	CAIWMFact* pFact = g_pAIWorkingMemoryCentral->FindWMFact(factQuery);
	if( pFact )
	{
		// Clear records of dead AI.

		if( IsDeadAI( pFact->GetSourceObject() ) )
		{
			g_pAIWorkingMemoryCentral->ClearWMFacts( factQuery );
		}
		else return false;
	}

	// Bail if the Action's SmartObject record does not exist.

	AIDB_SmartObjectRecord* pSmartObjectRecord = g_pAIDB->GetAISmartObjectRecord( m_pActionRecord->eSmartObjectID );
	if( !pSmartObjectRecord )
	{
		return false;
	}


	// Someone has lunged too recently.

	if( pSmartObjectRecord->fTimeout > 0.f )
	{
		factQuery.SetFactType( kFact_Knowledge );
		factQuery.SetKnowledgeType( kKnowledge_NextLungeTime );
		pFact = g_pAIWorkingMemoryCentral->FindWMFact(factQuery);
		if( pFact && ( pFact->GetTime() > g_pLTServer->GetTime() ) )
		{
			return false;
		}
	}

	// Bail if the AI does not have the desire to lunge.
	// The desire indicates the max range of the lunge.

	CAIWMFact factDesireQuery;
	factDesireQuery.SetFactType( kFact_Desire );
	factDesireQuery.SetDesireType( kDesire_Lunge );
	CAIWMFact* pDesireFact = pAI->GetAIWorkingMemory()->FindWMFact( factDesireQuery );
	if( !pDesireFact )
	{
		return false;
	}

	// Target must be in range.

	LTVector vTarget = pAI->GetAIBlackBoard()->GetBBTargetPosition();
	float fDistSqr = vTarget.DistSqr( pAI->GetPosition() );

	// Target too close.

	if( fDistSqr < pSmartObjectRecord->fMinDist * pSmartObjectRecord->fMinDist )
	{
		return false;
	}

	// Target too far.

	float fMaxDist = GetLungeMaxDist( pAI, pDesireFact );
	if( fDistSqr > fMaxDist * fMaxDist )
	{
		return false;
	}

	// No straight path to the target.

	LTVector vDir = vTarget - pAI->GetPosition();
	vDir.Normalize();
	LTVector vDest = pAI->GetPosition() + ( vDir * ( fMaxDist + 50.f ) );
	if( !g_pAIPathMgrNavMesh->StraightPathExists( pAI, pAI->GetCharTypeMask(), pAI->GetPosition(), vDest, pAI->GetAIBlackBoard()->GetBBTargetReachableNavMeshPoly(), pAI->GetRadius() ) )
	{
		return false;
	}

	// Lunge!

	return true;
}
Esempio n. 26
0
// ----------------------------------------------------------------------- //
//
//	ROUTINE:	CTronPlayerObj::GetDefensePercentage()
//
//	PURPOSE:	How much of the attack damage has our defenese prevented
//
//	NOTES:		There are actually 2 defense percentages.  One is based
//				on timing, i.e. player's reation speed, animation speed,
//				etc.  The other is based on the player's orientation to
//				the incoming projectile.  If the vector parameter is
//				specified, BOTH percentages will be computed and the
//				combined result will be returned.  If the vector is NOT
//				specified, only the timing will be computed.
//
// ----------------------------------------------------------------------- //
float CTronPlayerObj::GetDefensePercentage( LTVector const *pIncomingProjectilePosition /*=0*/) const
{
	if ( TRONPLAYEROBJ_NO_DEFEND == m_cDefendType )
	{
		// not blocking
		return 0.0f;
	}

	//
	// Do this in 2 passes.  The first pass willl determine
	// the contribution to the defense percentage due to the
	// timing of the player/animations.  The second pass
	// will add the contribution of the player's orientation.
	//
	float fDefenseTimingPercentage;
	float fDefenseOrientationPercentage;

	// get the weapon
	AMMO const *pAmmoData = g_pWeaponMgr->GetAmmo( m_cDefendAmmoId );
	ASSERT( 0 != pAmmoData );
	ASSERT( 0 != pAmmoData->pProjectileFX );

	// get the ammo specific data
	DISCCLASSDATA *pDiscData =
		dynamic_cast< DISCCLASSDATA* >(
			pAmmoData->pProjectileFX->pClassData
		);
	ASSERT( 0 != pDiscData );


	//
	// Determine Timing Percentage
	//

	switch ( m_cDefendType )
	{
		case MPROJ_START_SWAT_BLOCK:
		case MPROJ_START_ARM_BLOCK:
		{
			// get the current server time
			float fCurrentServerTime = g_pLTServer->GetTime() * 1000.0f;

			// make sure we're within the range of the block
			if ( ( static_cast< int >( fCurrentServerTime ) -
			       m_nDefendServerTimeStarted ) > m_nDefendDuration )
			{
				// nope, the block is over
				return 0.0f;
			}

			// Swat and Arm defenses are similar, so fill out these
			// variables uniquely (depending on which case we are
			// handling), then use the common generic math to figure
			// out the answer.
			float fMidpointTime;
			float fMaxStartTime;
			float fMaxEndTime;
			float fStartDefendPercentage;
			float fMaxDefendPercentage;
			float fEndDefendPercentage;

			if ( MPROJ_START_SWAT_BLOCK == m_cDefendType )
			{
				// determine at exactly what time the midpoint takes place
				// NOTE: this is relative time, not absolute
				fMidpointTime = 
					( pDiscData->fSwatDefendMidpoint * m_nDefendDuration );

				// determine at exactly what time the max starts
				// NOTE: this is relative time, not absolute
				fMaxStartTime =
					fMidpointTime - 
					fMidpointTime * pDiscData->fSwatDefendStartMaxDefendPercentage;

				// determine at exactly what time the max ends
				// NOTE: this is relative time, not absolute
				fMaxEndTime =
					fMidpointTime +
					(
						( m_nDefendDuration - fMidpointTime ) *
						pDiscData->fSwatDefendEndMaxDefendPercentage
					);

				// determine the starting defend percentage
				fStartDefendPercentage = pDiscData->fSwatDefendStartDefendPercentage;

				// detecmine the max defend percentage					
				fMaxDefendPercentage = pDiscData->fSwatDefendMaxDefendPercentage;

				// determine the ending defend percentage
				fEndDefendPercentage = pDiscData->fSwatDefendEndDefendPercentage;
			}
			else if ( MPROJ_START_ARM_BLOCK == m_cDefendType )
			{
				// Not implemented yet.  The main question I haven't figured
				// out yet is where does the information come from?
				fMidpointTime = 0.0f;
				fMaxStartTime = 0.0f;
				fMaxEndTime = 0.0f;
				fStartDefendPercentage = 0.0f;
				fMaxDefendPercentage = 0.0f;
				fEndDefendPercentage = 0.0f;
			}

			// determine at exactly how much time we've been in the block
			// NOTE: this is relative time, not absolute
			float fBlockTime = 
				fCurrentServerTime - m_nDefendServerTimeStarted;

			if ( ( -MATH_EPSILON <= fBlockTime ) && 
			     ( fBlockTime <= fMaxStartTime ) )
			{
				// somewhere on the uprise
				fDefenseTimingPercentage =
					fStartDefendPercentage + 
						(
							(
								( 
									fMaxDefendPercentage -
									fStartDefendPercentage
								) /
								fMaxStartTime 
							) * 
							fBlockTime
						);
			}
			else if ( ( fMaxStartTime < fBlockTime ) &&
			          ( fBlockTime <= fMaxEndTime ) )
			{
				// within the max range
				fDefenseTimingPercentage = fMaxDefendPercentage;
			}
			else if ( ( fMaxEndTime < fBlockTime ) &&
			          ( fBlockTime <= m_nDefendDuration ) )
			{
				// somewhere on the downfall
				fDefenseTimingPercentage =
					fMaxDefendPercentage - 
						(
							(
								( 
									fMaxDefendPercentage -
									fEndDefendPercentage
								) /
								( m_nDefendDuration - fMaxEndTime )
							) * 
							( fBlockTime - fMaxEndTime )
						);
			}
			else
			{
				// math problem, we should be here if we are outside
				// the bounds of the defense
				ASSERT( 0 );
				fDefenseTimingPercentage = 0.0f;
			}
		}
		break;


		case MPROJ_START_HOLD_BLOCK:
		{
			// get the current server time
			float fCurrentServerTime = g_pLTServer->GetTime() * 1000.0f;

			// determine the starting defend percentage
			float fStartDefendPercentage = pDiscData->fHoldDefendStartDefendPercentage;

			// determine the max defend percentage					
			float fMaxDefendPercentage = pDiscData->fHoldDefendMaxDefendPercentage;

			// determine at exactly how much time we've been in the block
			// NOTE: this is relative time, not absolute
			float fBlockTime = 
				fCurrentServerTime - m_nDefendServerTimeStarted;

			if ( ( -MATH_EPSILON <= fBlockTime ) && 
			     ( fBlockTime <= m_nDefendDuration ) )
			{
				// somewhere on the uprise
				fDefenseTimingPercentage =
					fStartDefendPercentage + 
						(
							(
								( 
									fMaxDefendPercentage -
									fStartDefendPercentage
								) /
								static_cast< float >( m_nDefendDuration )
							) * 
							fBlockTime
						);
			}
			else if ( m_nDefendDuration < fBlockTime )
			{
				// within the max range
				fDefenseTimingPercentage = fMaxDefendPercentage;
			}
			else
			{
				// math problem, we should be here if we are outside
				// the bounds of the defense
				ASSERT( 0 );
				fDefenseTimingPercentage = 0.0f;
			}
		}
		break;

		case MPROJ_END_HOLD_BLOCK:
		{
			// get the current server time
			float fCurrentServerTime = g_pLTServer->GetTime() * 1000.0f;

			// make sure we're within the range of the block
			if ( ( static_cast< int >( fCurrentServerTime ) -
			       m_nDefendServerTimeStarted ) > m_nDefendDuration )
			{
				// nope, the block is over
				return 0.0f;
			}

			// detecmine the max defend percentage					
			float fMaxDefendPercentage = pDiscData->fHoldDefendMaxDefendPercentage;

			// determine the ending defend percentage
			float fEndDefendPercentage = pDiscData->fHoldDefendEndDefendPercentage;

			// determine at exactly how much time we've been in the block
			// NOTE: this is relative time, not absolute
			float fBlockTime = 
				fCurrentServerTime - m_nDefendServerTimeStarted;

			// somewhere on the downfall
			fDefenseTimingPercentage =
				fMaxDefendPercentage - 
					(
						(
							( 
								fMaxDefendPercentage -
								fEndDefendPercentage
							) /
							( m_nDefendDuration )
						) * 
						( fBlockTime )
					);
		}
		break;

		default:
		{
			// There is some type of block defined that we
			// are not handling, and we SHOULD be handling it.
			ASSERT( 0 );

			return 0.0f;
		}
		break;
	};


	//TODO: skip this section of the camera position is too old?

	// check if the oriention percentage should be computed
	if ( 0 == pIncomingProjectilePosition )
	{
		// No vector specifed, there is no way
		// to compute orientation defense.
		return fDefenseTimingPercentage;
	}

	//
	// Determine Orientation percentage
	//

	// The 3 cases are the same, but they could have different
	// control values.  Figure out what the specific variables
	// are (depending on the specific type of block), then apply
	// the generic equations.

	float fOrientationMinDefendPercentage;
	float fOrientationMaxDefendPercentage;
	float fOrientationDeadZone;
	float fOrientationMaxZone;

	switch ( m_cDefendType )
	{
		case MPROJ_START_SWAT_BLOCK:
		{
			fOrientationMinDefendPercentage  = pDiscData->fSwatDefendOrientationMinDefendPercentage;
			fOrientationMaxDefendPercentage  = pDiscData->fSwatDefendOrientationMaxDefendPercentage;
			fOrientationDeadZone             = MATH_PI - pDiscData->fSwatDefendOrientationDeadZone;
			fOrientationMaxZone              = pDiscData->fSwatDefendOrientationMaxZone;
		}
		break;

		case MPROJ_START_HOLD_BLOCK:
		case MPROJ_END_HOLD_BLOCK:
		{
			fOrientationMinDefendPercentage  = pDiscData->fHoldDefendOrientationMinDefendPercentage;
			fOrientationMaxDefendPercentage  = pDiscData->fHoldDefendOrientationMaxDefendPercentage;
			fOrientationDeadZone             = MATH_PI - pDiscData->fHoldDefendOrientationDeadZone;
			fOrientationMaxZone              = pDiscData->fHoldDefendOrientationMaxZone;
		}
		break;

		case MPROJ_START_ARM_BLOCK:
		{
			// Not implemented yet.  The main question I haven't figured
			// out yet is where does the information come from?
			fOrientationMinDefendPercentage  = 0.0f;
			fOrientationMaxDefendPercentage  = 0.0f;
			fOrientationDeadZone             = 0.0f;
			fOrientationMaxZone              = 0.0f;
		}
		break;

		default:
		{
			// There is some type of block defined that we
			// are not handling, and we SHOULD be handling it.
			ASSERT( 0 );

			return 0.0f;
		}
		break;
	};

	LTRESULT ltResult;
	LTVector vDefendPos;
	LTVector vDefendPosToProjectile;

	// get the player's position
	ltResult = g_pLTServer->GetObjectPos( m_hObject, &vDefendPos );
	ASSERT( LT_OK == ltResult );

	// REMOVE THIS CODE FOR FINAL RELEASE, BUT LEAVE FOR TESTING MULTIPLAYER
	// print a warning if the time is new enough
	if ( TRONPLAYEROBJ_CLIENT_CAMERA_TIME_OLD_THRESHOLD < ( g_pLTServer->GetTime() - m_nClientCameraOffsetTimeReceivedMS ) )
	{
		g_pLTServer->CPrint( "Client Camera Offset time is low, possible lag\n" );
		g_pLTServer->CPrint( "   condition that will affect defensive accuracy.\n" );
		g_pLTServer->CPrint( "   Currnt value is %5.3f units old.\n", ( g_pLTServer->GetTime() - m_nClientCameraOffsetTimeReceivedMS ) );

	}
	
	// add the camera offset vDefendPos += m_vClientCameraOffset;

	// find a unit vector from us to the projectile
	vDefendPosToProjectile  = *pIncomingProjectilePosition -
	                         ( m_vClientCameraOffset +
	                           vDefendPos );
	vDefendPosToProjectile.y = 0;
	vDefendPosToProjectile.Normalize();

	// determine a forward vector reprensenting the direction the
	// player is facing
	LTRotation vPlayerViewOrientation;
	ltResult = g_pLTServer->GetObjectRotation( m_hObject, &vPlayerViewOrientation );
	ASSERT( LT_OK == ltResult );

	LTVector vPlayerViewForward = vPlayerViewOrientation.Forward();
	vPlayerViewForward.y = 0;
	vPlayerViewForward.Normalize();

	float fDotProd = vPlayerViewForward.Dot( vDefendPosToProjectile );

	// find the angle between the two vectors
	float fDefenseAngle = ltacosf( vPlayerViewForward.Dot( vDefendPosToProjectile ) );

	if ( ( MATH_EPSILON <= fDefenseAngle) && ( fDefenseAngle <= fOrientationMaxZone ) )
	{
		// it's within the max zone
		fDefenseOrientationPercentage = fOrientationMaxDefendPercentage;
	}
	else if ( ( fOrientationMaxZone < fDefenseAngle) && ( fDefenseAngle <= fOrientationDeadZone ) )
	{
		// it's within the dropoff range
		fDefenseOrientationPercentage = 
			fOrientationMaxDefendPercentage +
			( fDefenseAngle - fOrientationMaxZone ) *
				( fOrientationMinDefendPercentage - fOrientationMaxDefendPercentage ) /
				( fOrientationDeadZone - fOrientationMaxZone );
	}
	else if ( fOrientationDeadZone <= fDefenseAngle)
	{
		// it's within the dead zone
		fDefenseOrientationPercentage = 0.0f;
	}

	//
	// Final Defense Result
	//

	float fFinalDefensePercentage = fDefenseTimingPercentage -
	                                fDefenseOrientationPercentage;
	return fFinalDefensePercentage;
}
Esempio n. 27
0
//performs a ray intersection. This will return false if nothing is hit, or true if something
//is. If something is hit, it will fill out the intersection property and the alignment
//vector according to the properties that the user has setup. If an object is hit, it will
//fill out the hit object, and specify the output transform relative to the hit object's space
bool CCreateRayFX::DetermineIntersection(	const LTVector& vObjPos, const LTVector& vObjForward, 
											HOBJECT& hOutObj, LTRigidTransform& tOutTrans)
{
	//default our output parameters to reasonable values
	hOutObj = NULL;
	tOutTrans.Init();

	//perform a ray intersection from our position along our Y axis and see if we hit anything.
	//If we do, create the effect there facing along the specified vector, randomly twisted

	//find the starting and ending points
	LTVector vStart = vObjPos + vObjForward * GetProps()->m_fMinDist;
	LTVector vEnd   = vObjPos + vObjForward * GetProps()->m_fMaxDist;

	//we now need to perform an intersection using these endpoints and see if we hit anything
	IntersectQuery	iQuery;
	IntersectInfo	iInfo;

	iQuery.m_Flags		= INTERSECT_HPOLY | IGNORE_NONSOLID;
	iQuery.m_FilterFn	= NULL;
	iQuery.m_pUserData	= NULL;
	iQuery.m_From		= vStart;
	iQuery.m_To			= vEnd;

	if( !g_pLTClient->IntersectSegment( iQuery, &iInfo ) )
	{
		//we didn't intersect anything, don't create an effect
		return false;
	}

	//determine if we hit the sky
	if(IsSkyPoly(iInfo.m_hPoly))
	{
		//never create an effect on the sky
		return false;
	}

	//we now need to determine the normal of intersection
	LTVector vHitNormal = iInfo.m_Plane.Normal();

	//we hit something, so we can now at least determine the point of intersection
	tOutTrans.m_vPos = iInfo.m_Point + vHitNormal * GetProps()->m_fOffset;

	//the primary vector we wish to align to
	LTVector vAlignment;

	//determine what our dominant axis should be
	switch (GetProps()->m_eAlignment)
	{
	default:
	case CCreateRayProps::eAlign_ToSource:
		vAlignment = -vObjForward;
		break;
	case CCreateRayProps::eAlign_Normal:
		vAlignment = vHitNormal;
		break;
	case CCreateRayProps::eAlign_Outgoing:
		vAlignment = vObjForward - (2.0f * vObjForward.Dot(vHitNormal)) * vHitNormal;
		vAlignment.Normalize();
		break;
	case CCreateRayProps::eAlign_ToViewer:
		{
			LTVector vCameraPos;
			g_pLTClient->GetObjectPos(m_pFxMgr->GetCamera(), &vCameraPos);
			vAlignment = vCameraPos - tOutTrans.m_vPos;
			vAlignment.Normalize();
		}
		break;
	}

	//and generate a randomly twisted orientation around our dominant axis
	tOutTrans.m_rRot = LTRotation(vAlignment, LTVector(0.0f, 1.0f, 0.0f));
	tOutTrans.m_rRot.Rotate(vAlignment, GetRandom(0.0f, MATH_CIRCLE));

	//now if we hit an object, make sure to store that object, and convert the transform into
	//the object's space
	if(iInfo.m_hObject && (g_pLTClient->Physics()->IsWorldObject(iInfo.m_hObject) == LT_NO))
	{
		//store this as our hit object
		hOutObj = iInfo.m_hObject;

		//and convert the transform into that object's space
		LTRigidTransform tObjTrans;
		g_pLTClient->GetObjectTransform(hOutObj, &tObjTrans);
		tOutTrans = tOutTrans.GetInverse() * tOutTrans;
	}

	//success
	return true;
}
Esempio n. 28
0
void CLightningFX::EmitBolts( float tmFrameTime )
{
	// Make sure enough time between emissions has passed...
	m_tmElapsedEmission += tmFrameTime;

	if( m_fDelay < m_tmElapsedEmission )
	{
		LTransform		lTrans;
		LTVector		vAttractorPos;
		ILTModel		*pModelLT = m_pLTClient->GetModelLT();

		uint32	nActiveBolts = GetRandom( (int)GetProps()->m_nMinNumBolts, (int)GetProps()->m_nMaxNumBolts );
		uint32	nBolt;

		bool	bCanUseAttractors = (m_lstAttractors.size() > 0);
		bool	bCanUseRadius = (GetProps()->m_fOmniDirectionalRadius >= 1.0f);

		CLightningBolt *pBolt = LTNULL;
		LightningBolts::iterator iter;

		for( nBolt = 0, iter = m_lstBolts.begin(); iter != m_lstBolts.end(), nBolt < nActiveBolts; ++iter, ++nBolt )
		{
			pBolt = *iter;
			
			pBolt->m_fWidth = GetRandom( GetProps()->m_fMinBoltWidth, GetProps()->m_fMaxBoltWidth );
			pBolt->m_fLifetime = GetRandom( GetProps()->m_fMinLifetime, GetProps()->m_fMaxLifetime );
			pBolt->m_tmElapsed = 0.0f;
			pBolt->m_bActive = true;
			
			// Grab the position of the object to compensate for offset
		
			if( m_hTarget )
			{
				m_pLTClient->GetObjectPos( m_hTarget, &vAttractorPos );
			}
			else
			{
				vAttractorPos = m_vTargetPos;
			}

			// Decide if we should use an attractor or radius for the end pos...
			
			if( bCanUseAttractors && (!bCanUseRadius || GetRandom(0,1)) )
			{
				uint8	nIndex = GetRandom( 0, (int)(m_lstAttractors.size()) - 1 );
				CAttractor cAttractor = m_lstAttractors[nIndex];

				if( cAttractor.GetTransform( lTrans, true ) == LT_OK )
				{
					vAttractorPos = lTrans.m_Pos;
				}
			}	
			else if( bCanUseRadius )
			{
				LTVector vRandomPos;
				vRandomPos.x = GetRandom( -1.0f, 1.0f );
				vRandomPos.y = GetRandom( -1.0f, 1.0f );
				vRandomPos.z = GetRandom( -1.0f, 1.0f );
				
				vRandomPos.Normalize();
				vRandomPos *= GetRandom( -GetProps()->m_fOmniDirectionalRadius, GetProps()->m_fOmniDirectionalRadius );

				vAttractorPos = m_vPos + vRandomPos;

				IntersectQuery	iQuery;
				IntersectInfo	iInfo;

				iQuery.m_From	= m_vPos;
				iQuery.m_To		= vAttractorPos;

				if( m_pLTClient->IntersectSegment( &iQuery, &iInfo ))
				{
					vAttractorPos = iInfo.m_Point;
				}
			}

			
			LTVector vNew = m_vPos;
			LTVector vDir = vAttractorPos - vNew;
						
			float fStep = vDir.Length() / (float)pBolt->m_nNumSegments;
			float fPerturb = GetRandom( GetProps()->m_fMinPerturb, GetProps()->m_fMaxPerturb );
			
			vDir.Normalize();
			LTRotation rRot = LTRotation( vDir, LTVector( 0.0f, 1.0f, 0.0f ));
				
			CLinkListNode<PT_TRAIL_SECTION> *pNode = pBolt->m_collPathPts.GetHead();
			while( pNode )
			{
				pNode->m_Data.m_vPos = vNew;
				pNode->m_Data.m_tmElapsed = 0.0f;
				pNode->m_Data.m_vBisector.Init();
												
				// Add in some perturb going in the direction of the attractor pos for the next section...
				
				vNew +=	(rRot.Forward() * fStep );
				vNew += (rRot.Up() * GetRandom( -fPerturb, fPerturb ));
				vNew += (rRot.Right() * GetRandom( -fPerturb, fPerturb ));

				// Make sure the last section goes to the end pos...

				if( !pNode->m_pNext )
					pNode->m_Data.m_vPos = vAttractorPos;

				pNode = pNode->m_pNext;
			}
		}

		// Decide when the next emission will be...

		m_tmElapsedEmission = 0.0f;
		m_fDelay = GetRandom( GetProps()->m_fMinDelay, GetProps()->m_fMaxDelay );
	}
}
Esempio n. 29
0
bool CLTBModelFX::Init(ILTClient *pClientDE, FX_BASEDATA *pBaseData, const CBaseFXProps *pProps)
{
	// Perform base class initialisation

	if (!CBaseFX::Init(pClientDE, pBaseData, pProps)) 
		return false;

	// Use the "target" Normal instead, if one was specified...

	LTVector vNorm = GetProps()->m_vNorm;

	if( pBaseData->m_vTargetNorm.LengthSquared() > MATH_EPSILON )
	{
		vNorm = pBaseData->m_vTargetNorm;
		vNorm.Normalize();
	}

	// Develop the Right and Up vectors based off the Forward...

	LTVector vR, vU;
	if( (1.0f == vNorm.y) || (-1.0f == vNorm.y) )
	{
		vR = LTVector( 1.0f, 0.0f, 0.0f ).Cross( vNorm );
	}
	else
	{
		vR = LTVector( 0.0f, 1.0f, 0.0f ).Cross( vNorm );
	}

	vU = vNorm.Cross( vR );
	m_rNormalRot = LTRotation( vNorm, vU );


	ObjectCreateStruct ocs;

	// Combine the direction we would like to face with our parents rotation...

	if( m_hParent )
	{
		m_pLTClient->GetObjectRotation( m_hParent, &ocs.m_Rotation );
	}
	else
	{
		ocs.m_Rotation = m_rCreateRot;
	}

	ocs.m_Rotation = ocs.m_Rotation * m_rNormalRot;	

	ocs.m_ObjectType		= OT_MODEL;
	ocs.m_Flags				|= pBaseData->m_dwObjectFlags |	(GetProps()->m_bShadow ? FLAG_SHADOW : 0 );
	ocs.m_Flags2			|= pBaseData->m_dwObjectFlags2;
	
	// Calculate the position with the offset in 'local' coordinate space...

	LTMatrix mMat;
	ocs.m_Rotation.ConvertToMatrix( mMat );

	m_vPos = ocs.m_Pos = m_vCreatePos + (mMat * GetProps()->m_vOffset);

	
	SAFE_STRCPY( ocs.m_Filename, GetProps()->m_szModelName );
	
	SAFE_STRCPY( ocs.m_SkinNames[0], GetProps()->m_szSkinName[0] );
	SAFE_STRCPY( ocs.m_SkinNames[1], GetProps()->m_szSkinName[1] );
	SAFE_STRCPY( ocs.m_SkinNames[2], GetProps()->m_szSkinName[2] );
	SAFE_STRCPY( ocs.m_SkinNames[3], GetProps()->m_szSkinName[3] );
	SAFE_STRCPY( ocs.m_SkinNames[4], GetProps()->m_szSkinName[4] );
	
	SAFE_STRCPY( ocs.m_RenderStyleNames[0], GetProps()->m_szRenderStyle[0] );
	SAFE_STRCPY( ocs.m_RenderStyleNames[1], GetProps()->m_szRenderStyle[1] );
	SAFE_STRCPY( ocs.m_RenderStyleNames[2], GetProps()->m_szRenderStyle[2] );
	SAFE_STRCPY( ocs.m_RenderStyleNames[3], GetProps()->m_szRenderStyle[3] );

	m_hObject = m_pLTClient->CreateObject(&ocs);
	if( !m_hObject ) 
		return LTFALSE;

	ILTModel		*pLTModel = m_pLTClient->GetModelLT();
	ANIMTRACKERID	nTracker;
	
	pLTModel->GetMainTracker( m_hObject, nTracker );

	//setup the animation if the user specified one
	if( strlen(GetProps()->m_szAnimName) > 0)
	{
		//ok, we need to set this
		HMODELANIM hAnim = m_pLTClient->GetAnimIndex(m_hObject, GetProps()->m_szAnimName);

		if(hAnim != INVALID_MODEL_ANIM)
		{
			//ok, lets set this animation
			pLTModel->SetCurAnim(m_hObject, nTracker, hAnim);
			pLTModel->ResetAnim(m_hObject, nTracker);
		}
	}
	//disable looping on this animation (so we can actually stop!)
	pLTModel->SetLooping(m_hObject, nTracker, false);

	// Setup the initial data needed to override the models animation length...
	if( GetProps()->m_bOverrideAniLength )
	{
		uint32 nAnimLength;

		pLTModel->GetCurAnimLength( m_hObject, nTracker, nAnimLength );
		pLTModel->SetCurAnimTime( m_hObject, nTracker, 0 );

		float fAniLength = (GetProps()->m_fAniLength < MATH_EPSILON) ? GetProps()->m_tmLifespan : GetProps()->m_fAniLength;
		
		if( fAniLength >= MATH_EPSILON || fAniLength <= -MATH_EPSILON )
			m_fAniRate = (nAnimLength * 0.001f) / fAniLength;

		pLTModel->SetAnimRate( m_hObject, nTracker, m_fAniRate );
	}

	// Success !!

	return LTTRUE;
}
Esempio n. 30
0
bool CTrackedNodeMgr::SetNodeConstraints(	HTRACKEDNODE ID,
							const LTVector& vMovConeAxis, 
							const LTVector& vMovConeUp,
							float fXDiscomfortAngle,
							float fYDiscomfortAngle,
							float fXMaxAngle,
							float fYMaxAngle,
							float fMaxAngVel
						)
{
	//sanity checks
	if(!CheckValidity(ID))
		return false;

	//ok, we have a valid ID, so let us setup the parameters
	CTrackedNode* pNode = (CTrackedNode*)ID;

	//see if the up and forward vectors are valid
	LTVector vForward	= vMovConeAxis;
	LTVector vUp		= vMovConeUp;

	//ensure proper scale
	vForward.Normalize();
	vUp.Normalize();

	//ensure they form a valid space (and not a plane)
	if(vUp.Dot(vForward) > 0.99f)
	{
		//not valid, we need to try a different up, our preference is the world up
		vUp.Init(0.0f, 1.0f, 0.0f);

		if(vUp.Dot(vForward) > 0.99f)
		{
			//ok, forward is already taking the up....so, tilt us back
			vUp.Init(0.0f, 0.0f, -1.0f);
		}
	}

	//now generate the right, and ensure orthogonality
	LTVector vRight = vForward.Cross(vUp);
	vUp = vRight.Cross(vForward);

	vRight.Normalize();
	vUp.Normalize();

	//setup this as the basis space
	pNode->m_mInvTargetTransform.SetBasisVectors(&vRight, &vUp, &vForward);
	pNode->m_mInvTargetTransform.Transpose();

	//we need to make sure that their angular constraints are valid (meaning that they are positive and
	//less than 90 deg)
	fXMaxAngle			= LTCLAMP(fXMaxAngle,			0.0f, DEG2RAD(89.0f));
	fYMaxAngle			= LTCLAMP(fYMaxAngle,			0.0f, DEG2RAD(89.0f));
	fXDiscomfortAngle	= LTCLAMP(fXDiscomfortAngle,	0.0f, fXMaxAngle);
	fYDiscomfortAngle	= LTCLAMP(fYDiscomfortAngle,	0.0f, fYMaxAngle);

	//now precompute the tangent of those values (used for finding the height of the cone created which
	//is used in the threshold determination code)
	pNode->m_fTanXDiscomfort = (float)tan(fXDiscomfortAngle);
	pNode->m_fTanYDiscomfort = (float)tan(fYDiscomfortAngle);
	pNode->m_fTanXThreshold  = (float)tan(fXMaxAngle);
	pNode->m_fTanYThreshold  = (float)tan(fYMaxAngle);

	//handle setting up the maximum angular velocity
	pNode->m_fMaxAngVel		= (float)fabs(fMaxAngVel);

	//and we are ready for primetime
	return true;
}