void CRagDollInPlaneConstraint::Apply(uint32 nPosIndex) { //find the plane pivot point (the connection between both points forming the first plane) LTVector& vPivot = m_pPlaneCenter->m_vPosition[nPosIndex]; //find the vector that goes from the pivot point to the hinge point LTVector vToHinge = m_pHinge->m_vPosition[nPosIndex] - vPivot; LTVector vToOther = m_pPlaneOther->m_vPosition[nPosIndex] - vPivot; //find the plane normal LTVector vPlaneNormal = vToOther.Cross(vToHinge); vPlaneNormal.Normalize(); vPlaneNormal = vToHinge.Cross(vPlaneNormal) * m_fNormalScale; vPlaneNormal.Normalize(); //move the point into the plane LTVector& vConstrain = m_pConstrain->m_vPosition[nPosIndex]; float fDot = vPlaneNormal.Dot(vConstrain - vPivot); if(fDot < -m_fTolerance) { fDot += m_fTolerance; vConstrain -= vPlaneNormal * fDot; } else if(fDot > m_fTolerance) { fDot -= m_fTolerance; vConstrain -= vPlaneNormal * fDot; } //success }
bool CAIActionAttackTurret::TargetInFOV( CAI* pAI ) { // Sanity check. if( !pAI ) { return false; } // No ranged weapon! CWeapon* pWeapon = pAI->GetAIWeaponMgr()->GetWeaponOfType( kAIWeaponType_Ranged ); if( !pWeapon ) { return false; } // Compare the weapon's forward to the target direction. LTVector vWeaponForward = pAI->GetWeaponForward( pWeapon ); LTVector vDirToTarget = pAI->GetAIBlackBoard()->GetBBTargetPosition() - pAI->GetPosition(); vDirToTarget.Normalize(); // Target is outside of the weapon's FOV. float fFOV20 = cos( DEG2RAD( 10.f ) ); if( vDirToTarget.Dot( vWeaponForward ) <= fFOV20 ) { return false; } // Target is inside of the weapon's FOV. return true; }
inline bool i_QuickSphereTest2(const LTVector &pVector, const float fRadius) { // Find the closest point to the line. // Here's the equation for t: // P = point1 // V = point2 - point1 (ie: direction vector) // S = point you're testing // t = parametric (P + Vt) // t = -(-VS + VP) / VV //dirVec = *pPoint2 - *pPoint1; //t = pServerObj->m_Pos.Dot(dirVec) - dirVec.Dot(*pPoint1); //t /= dirVec.Dot(dirVec); float t = g_VTimesInvVV.Dot(pVector) - g_VPTimesInvVV; if (t < -fRadius || t > (g_LineLen + fRadius)) { return false; } // Now see if it's within range. LTVector vecTo = g_pCurQuery->m_From + g_V * t - pVector; return vecTo.MagSqr() < (fRadius * fRadius); }
inline bool i_QuickSphereTest(const LTObject *pServerObj) { // Find the closest point to the line. // Here's the equation for t: // P = point1 // V = point2 - point1 (ie: direction vector) // S = point you're testing // t = parametric (P + Vt) // t = -(-VS + VP) / VV //dirVec = *pPoint2 - *pPoint1; //t = pServerObj->m_Pos.Dot(dirVec) - dirVec.Dot(*pPoint1); //t /= dirVec.Dot(dirVec); float t = g_VTimesInvVV.Dot(pServerObj->GetPos()) - g_VPTimesInvVV; //cache this radius since it is a virtual function call and can be somewhat expensive on some //object types float fRadius = pServerObj->GetRadius(); if (t < -fRadius || t > (g_LineLen + fRadius)) { return false; } // Now see if it's within range. LTVector vecTo = g_pCurQuery->m_From + g_V * t - pServerObj->GetPos(); return vecTo.MagSqr() < pServerObj->GetRadiusSquared(); }
void GetContouringInfo( LTVector &vForward, LTVector &vNormal, float &fOutAmount, float &fOutPitchPercent, float &fOutRollPercent ) { LTVector vPlaneF = (vNormal.y >= 1.0f) ? vForward : vNormal; vPlaneF.y = 0.0f; vPlaneF.Normalize(); LTRotation rPlaneRot( vPlaneF, LTVector(0, 1, 0)); LTVector vPlaneR = rPlaneRot.Right(); // Calculate how much pitch and roll we should apply... fOutPitchPercent = vForward.Dot( vPlaneF ); fOutRollPercent = vForward.Dot( vPlaneR ); // Figure out the length of the foward vector projected on the xz plane. This // is needed because Euler angles are calculated cummulatively, not just based // on the global coordinate axis. float fXZLen = (float)sqrt( 1.0f - vNormal.y * vNormal.y ); // Subtract the pitch from 90 degrees cause we want to be parallel to the plane fOutAmount = MATH_HALFPI - (float)atan2( vNormal.y, fXZLen ); }
//given a capsule specified with a transform, two points, and a radius, this will approximate how much is submerged //and distribute that force to the appropriate points on the capsule static bool ApplyCapsuleBuoyancy(const LTVector& vPt1, const LTVector& vPt2, float fLength, float fRadius, const LTPlane& WSPlane, float& fVolume, LTVector& vApplyAt, float& fSurfaceArea) { //convert the capsule to an OBB and apply it //determine information about the main axis LTVector vMainAxis = vPt2 - vPt1; LTASSERT( fLength > 0.0f, "Invalid capsule length." ); LTVector vUnitAxis = vMainAxis / fLength; //we can now build up a rotation given the plane normal and the axis to build our transform LTVector vUp = WSPlane.Normal(); if(fabsf(vUp.Dot(vUnitAxis)) > 0.99f) { //too close to use, built an arbitrary orthonormal vUp = vUnitAxis.BuildOrthonormal(); } LTMatrix3x4 mTemp; LTVector vRight = vUnitAxis.Cross(vUp); vRight.Normalize( ); LTVector vTrueUp = vRight.Cross( vUnitAxis ); mTemp.SetBasisVectors(vRight, vTrueUp, vUnitAxis); LTRotation rRot; rRot.ConvertFromMatrix(mTemp); //now we can form our transform LTRigidTransform tTransform((vPt1 + vPt2) * 0.5f, rRot); LTVector vHalfDims(fRadius, fRadius, fLength * 0.5f + fRadius); return ApplyOBBBuoyancy(tTransform, vHalfDims, WSPlane, fVolume, vApplyAt, fSurfaceArea); }
LTBOOL CScanner::CanSeeObject(ObjectFilterFn ofn, HOBJECT hObject) { _ASSERT(hObject); if (!hObject) return LTFALSE; if (g_pGameServerShell->GetGameType() == COOPERATIVE_ASSAULT && m_nPlayerTeamFilter && IsPlayer(hObject)) { CPlayerObj* pPlayer = (CPlayerObj*)g_pLTServer->HandleToObject(hObject); if (pPlayer->GetTeamID() != m_nPlayerTeamFilter) return LTFALSE; } LTVector vPos; g_pLTServer->GetObjectPos(hObject, &vPos); LTVector vDir; vDir = vPos - GetScanPosition(); if (VEC_MAGSQR(vDir) >= m_fVisualRange) { return LTFALSE; } vDir.Norm(); LTRotation rRot = GetScanRotation(); LTVector vUp, vRight, vForward; g_pLTServer->GetRotationVectors(&rRot, &vUp, &vRight, &vForward); LTFLOAT fDp = vDir.Dot(vForward); if (fDp < m_fFOV) { return LTFALSE; } // See if we can see the position in question IntersectQuery IQuery; IntersectInfo IInfo; VEC_COPY(IQuery.m_From, GetScanPosition()); VEC_COPY(IQuery.m_To, vPos); IQuery.m_Flags = INTERSECT_OBJECTS | IGNORE_NONSOLID; IQuery.m_FilterFn = ofn; if (g_pLTServer->IntersectSegment(&IQuery, &IInfo)) { if (IInfo.m_hObject == hObject) { return LTTRUE; } } return LTFALSE; }
void CAIHumanStateAttackMove::SelectMoveAnim() { // Only turn around once. // This prevents the AI from flipping out when the players runs around him. if( m_bTurnedAround ) { return; } LTVector vDestDir = m_vAttackMoveDest - GetAI()->GetPosition(); vDestDir.y = 0.f; vDestDir.Normalize(); LTVector vTargetDir = GetAI()->GetTarget()->GetVisiblePosition() - GetAI()->GetPosition(); vTargetDir.y = 0.f; vTargetDir.Normalize(); // Should the AI turn around and face forward? if( m_pStrategyFollowPath->GetMovement() == kAP_BackUp ) { LTFLOAT fDot = vDestDir.Dot( vTargetDir ); if( fDot > c_fFOV160 ) { GetAI()->FaceTargetRotImmediately(); m_pStrategyFollowPath->SetMovement( kAP_Run ); m_bTurnedAround = LTTRUE; } } // Should the AI turn around and face backward? else { vDestDir = -vDestDir; LTFLOAT fDot = vDestDir.Dot( vTargetDir ); if( fDot > c_fFOV160 ) { GetAI()->FaceTargetRotImmediately(); m_pStrategyFollowPath->SetMovement( kAP_BackUp ); m_bTurnedAround = LTTRUE; } } }
bool AIUtil_PositionShootable(CAI* pAI, const LTVector& vTargetOrigin) { if (NULL == pAI || NULL == pAI->GetAIWeaponMgr() || NULL == pAI->GetAIWeaponMgr()->GetCurrentWeapon()) { return false; } LTVector vAIWeaponPosition = pAI->GetWeaponPosition(pAI->GetAIWeaponMgr()->GetCurrentWeapon(), false); // Bail if the AIs target is in the same position as the weapon. if (vTargetOrigin == vAIWeaponPosition) { return false; } // Bail if the combat opportunity is too far above or below the AI // (must be within the aiming range). // // TODO: Determine what a good FOV without hardcoding this value. The // selected FOV fixed out cases, but this is animation driven, there // is no guarantee this is the ideal value. LTVector vToTargetUnit3D = (vTargetOrigin - vAIWeaponPosition).GetUnit(); vToTargetUnit3D.y = fabs(vToTargetUnit3D.y); float fDotUp = LTVector(0.0f, 1.0f, 0.0f).Dot( vToTargetUnit3D ); if (fDotUp >= c_fFOV60) { return false; } // Bail if the AI has to turn his back on his enemy/target to fire at this // position. LTVector vDirCombatOp = vTargetOrigin - vAIWeaponPosition; vDirCombatOp.y = 0.f; vDirCombatOp.Normalize(); LTVector vDirTargetChar = pAI->GetAIBlackBoard()->GetBBTargetPosition() - vAIWeaponPosition; vDirTargetChar.y = 0.f; vDirTargetChar.Normalize(); float fHorizontalDp = vDirCombatOp.Dot( vDirTargetChar ); if( fHorizontalDp <= c_fFOV140 ) { return false; } // Position is shootable. return true; }
void CRagDollAbovePlane3Constraint::Apply(uint32 nPosIndex) { const LTVector& vPtInPlane = m_pPt1->m_vPosition[nPosIndex]; //we first off need to caclulate the normal of the plane LTVector vNormal = (m_pPt2->m_vPosition[nPosIndex] - vPtInPlane).Cross(m_pPt3->m_vPosition[nPosIndex] - vPtInPlane) * m_fNormalScale; vNormal.Normalize(); //ok, now we see if the point is above the plane LTVector& vConstrain = m_pConstrain->m_vPosition[nPosIndex]; float fDot = vNormal.Dot(vConstrain - vPtInPlane) - m_fOffset; if(fDot < 0.0f) { vConstrain -= fDot * vNormal; } }
void CWeaponFX::PlayBulletFlyBySound() { if (!m_pWeapon || !m_pAmmo) return; if (m_pAmmo->eType != VECTOR) return; // Camera pos HOBJECT hCamera = g_pGameClientShell->GetCamera(); LTVector vPos; g_pLTClient->GetObjectPos(hCamera, &vPos); // We only play the flyby sound if we won't hear an impact or // fire sound... LTVector vDist = m_vFirePos - vPos; if (vDist.Mag() < m_pWeapon->nFireSoundRadius) return; if (m_pAmmo->pImpactFX) { vDist = m_vPos - vPos; if (vDist.Mag() < m_pAmmo->pImpactFX->nSoundRadius) return; } // See if the camera is close enough to the bullet path to hear the // bullet... LTFLOAT fRadius = g_cvarFlyByRadius.GetFloat(); LTVector vDir = m_vDir; const LTVector vRelativePos = vPos - m_vFirePos; const LTFLOAT fRayDist = vDir.Dot(vRelativePos); LTVector vBulletDir = (vDir*fRayDist - vRelativePos); const LTFLOAT fDistSqr = vBulletDir.MagSqr(); if (fDistSqr < fRadius*fRadius) { vPos += vBulletDir; g_pClientSoundMgr->PlaySoundFromPos(vPos, "Guns\\Snd\\flyby.wav", g_cvarFlyBySoundRadius.GetFloat(), SOUNDPRIORITY_MISC_LOW); } }
static void GeneratePolyGridFresnelAlphaAndCamera(const LTVector& vViewPos, CPolyGridBumpVertex* pVerts, LTPolyGrid* pGrid, uint32 nNumVerts) { //we need to transform the camera position into our view space LTMatrix mInvWorldTrans; mInvWorldTrans.Identity(); mInvWorldTrans.SetTranslation(-pGrid->GetPos()); LTMatrix mOrientation; pGrid->m_Rotation.ConvertToMatrix(mOrientation); mInvWorldTrans = mOrientation * mInvWorldTrans; LTVector vCameraPos = mInvWorldTrans * vViewPos; //now generate the internals of the polygrid CPolyGridBumpVertex* pCurrVert = pVerts; CPolyGridBumpVertex* pEnd = pCurrVert + nNumVerts; //determine the fresnel table that we are going to be using const CFresnelTable* pTable = g_FresnelCache.GetTable(LTMAX(1.0003f, pGrid->m_fFresnelVolumeIOR), pGrid->m_fBaseReflection); //use a vector from the camera to the center of the grid to base our approximations off of. The further //we get to the edges the more likely this error will be, but it is better than another sqrt per vert LTVector vToPGPt; while(pCurrVert < pEnd) { //the correct but slow way, so only do it every once in a while //if((pCurrVert - g_TriVertList) % 4 == 0) { vToPGPt = vCameraPos - pCurrVert->m_Vec; vToPGPt.Normalize(); } pCurrVert->m_fEyeX = vToPGPt.x; pCurrVert->m_fEyeY = vToPGPt.y; pCurrVert->m_fEyeZ = vToPGPt.z; pCurrVert->m_nColor |= pTable->GetValue(vToPGPt.Dot(pCurrVert->m_vBasisUp)); ++pCurrVert; } }
bool AINodeValidatorAIOutsideFOV::Evaluate( uint32 dwFilteredStatusFlags, const LTVector& vAIPos, const LTVector& vNodePos, const LTVector& vNodeForward, bool bIgnoreDir ) const { if( dwFilteredStatusFlags & kNodeStatus_AIOutsideFOV ) { if( !bIgnoreDir ) { LTVector vAIDir = vAIPos - vNodePos; vAIDir.y = 0.f; vAIDir.Normalize(); if ( vAIDir.Dot(vNodeForward) <= m_fFovDp ) { return false; } } } return true; }
//given a point on the screen, it will calculate the point on the edit grid that should //be used for the brush LTVector CRVTrackerDrawPoly::CalcCurrMouseVert(CPoint& point) { LTVector vFinalVec; // Move the current poly's last vertex. // Invalidate the largest rect from where it was, its current position, and the previous vertex. if( m_bVertSnap ) { CVertRef Vert = m_pView->GetClosestVert( point, false, NULL, false ); if( Vert.IsValid() ) { //we now have a vertex, but we need to make sure that it intersects the edit grid, //so we want to shoot a ray straight towards the grid from the vertex and find //out where it hits. That will then be our point. //find out the distance of this point away from the grid CReal fDist = m_pView->EditGrid().Normal().Dot(Vert()) - m_pView->EditGrid().Dist(); //now move it onto the grid vFinalVec = Vert() - m_pView->EditGrid().Normal() * fDist; } } else { LTVector vIntersection; if( m_pView->GetVertexFromPoint(point, vIntersection) && (m_pView->DrawingBrush().m_Points > 0) ) { if( !m_pView->IsPerspectiveViewType( )) { LTVector vOffset; vOffset = m_pView->EditGrid( ).Forward( ); vOffset = m_pView->EditGrid( ).Forward( ) * vOffset.Dot( m_pView->GetRegion()->m_vMarker ); vIntersection += vOffset; } vFinalVec = vIntersection; } } return vFinalVec; }
LTBOOL CScanner::CanSeePos(ObjectFilterFn ofn, const LTVector& vPos) { LTVector vDir; vDir = vPos - GetScanPosition(); if (VEC_MAGSQR(vDir) >= m_fVisualRange) { return LTFALSE; } vDir.Norm(); LTRotation rRot = GetScanRotation(); LTVector vUp, vRight, vForward; g_pLTServer->GetRotationVectors(&rRot, &vUp, &vRight, &vForward); LTFLOAT fDp = vDir.Dot(vForward); if (fDp < m_fFOV) { return LTFALSE; } // See if we can see the position in question IntersectQuery IQuery; IntersectInfo IInfo; VEC_COPY(IQuery.m_From, GetScanPosition()); VEC_COPY(IQuery.m_To, vPos); IQuery.m_Flags = INTERSECT_OBJECTS | IGNORE_NONSOLID; IQuery.m_FilterFn = ofn; if (!g_pLTServer->IntersectSegment(&IQuery, &IInfo)) { return LTTRUE; } return LTFALSE; }
//determines if this polygon is concave or not bool CEditPoly::IsConcave() { uint32 nNumPts = NumVerts(); if(nNumPts <= 3) { return false; } //get the normal for this polygon LTVector vNormal = Normal(); //now go through every edge uint32 nPrevPt = nNumPts - 1; for(uint32 nCurrPt = 0; nCurrPt < nNumPts; nPrevPt = nCurrPt, nCurrPt++) { //build the edge normal LTVector vEdge = m_pBrush->m_Points[Index(nCurrPt)] - m_pBrush->m_Points[Index(nPrevPt)]; //find the normal LTVector vEdgeNorm = vNormal.Cross(vEdge); vEdgeNorm.Norm(); //now run through all the other points for(uint32 nTestPt = 0; nTestPt < nNumPts; nTestPt++) { //ignore the points on the edge if((nTestPt == nCurrPt) || (nTestPt == nPrevPt)) continue; //see if it is on the correct side if(vEdgeNorm.Dot(m_pBrush->m_Points[Index(nTestPt)] - m_pBrush->m_Points[Index(nCurrPt)]) > 0.001f) { return true; } } } return false; }
//---------------------------------------------------------------------------- // // ROUTINE: CAIHumanStrategyShootStream::UpdateAiming() // // PURPOSE: // //---------------------------------------------------------------------------- /*virtual*/ void CAIHumanStrategyShootStream::UpdateAiming(HOBJECT hTarget) { if ( m_flStreamTime < g_pLTServer->GetTime() ) { // Don't calculate new stream time until finished firing animation. if( !GetAnimationContext()->IsLocked() ) { CalculateStreamTime(); } Aim(); } else { // We're done waiting, fire if we're at a reasonable angle if ( m_bIgnoreFOV ) { Fire(); } else { LTVector vTargetPos; g_pLTServer->GetObjectPos(hTarget, &vTargetPos); LTVector vDir = vTargetPos - GetAI()->GetPosition(); vDir.y = 0.0f; vDir.Normalize(); if ( vDir.Dot(GetAI()->GetTorsoForward()) < 0.70f ) { Aim(); } else { Fire(); } } } }
bool AINodeValidatorAIBackToNode::Evaluate( uint32 dwFilteredStatusFlags, CAI* pAI, const LTVector& vNodePos ) const { // Sanity check. if( !pAI ) { return false; } // AI's back is to the node. if( dwFilteredStatusFlags & kNodeStatus_AIBackToNode ) { LTVector vToNode = vNodePos - pAI->GetPosition(); if( vToNode.Dot( pAI->GetForwardVector() ) < 0.f ) { return false; } } return true; }
void CRagDollAbovePlaneOnEdgeConstraint::Apply(uint32 nPosIndex) { const LTVector& vPtInPlane = m_pPt1->m_vPosition[nPosIndex]; LTVector vEdge = m_pPt2->m_vPosition[nPosIndex] - vPtInPlane; //we first off need to caclulate the normal of the plane LTVector vNormal = vEdge.Cross(m_pPt3->m_vPosition[nPosIndex] - vPtInPlane); vNormal.Normalize(); //now we need to take that plane, and find the perpindicular plane that passes through the first edge LTVector vEdgeNormal = vNormal.Cross(vEdge) * m_fNormalScale; vEdgeNormal.Normalize(); //ok, now we see if the point is above the plane LTVector& vConstrain = m_pConstrain->m_vPosition[nPosIndex]; float fDot = vEdgeNormal.Dot(vConstrain - vPtInPlane); if(fDot < 0.0f) { vConstrain -= fDot * vEdgeNormal; } }
bool AINodeValidatorPathBlocked::Evaluate( uint32 dwFilteredStatusFlags, const LTVector& vNodePos, const LTVector& vAIPos, const LTVector& vThreatPos ) const { // Threat blocking path. if( dwFilteredStatusFlags & kNodeStatus_ThreatBlockingPath ) { // Check if the threat is too close to the AI, // and is blocking the path to the node. if ( vAIPos.DistSqr(vThreatPos) < g_pAIDB->GetAIConstantsRecord()->fThreatTooCloseDistanceSqr ) { LTVector vToThreat = vThreatPos - vAIPos; LTVector vToNode = vNodePos - vAIPos; if( vToThreat.Dot( vToNode ) > c_fFOV60 ) { return false; } } } return true; }
bool CLTBBouncyChunkFX::Update(float tmFrameTime) { // Base class update first if (!CBaseFX::Update(tmFrameTime)) return false; if ((m_hImpactSound) && (m_pLTClient->IsDone(m_hImpactSound))) { m_pLTClient->SoundMgr()->KillSound(m_hImpactSound); m_hImpactSound = NULL; } // Set the object scale LTVector vScale(m_scale, m_scale, m_scale); m_pLTClient->SetObjectScale(m_hBouncyChunk, &vScale); LTVector vCur; m_pLTClient->GetObjectPos(m_hBouncyChunk, &vCur); // Compute the new position of the chunk LTVector vNew = vCur; vNew += m_vVel * tmFrameTime; m_vVel += GetProps()->m_vGravity * tmFrameTime; // Move the object and collide against the world ClientIntersectQuery ciq; ClientIntersectInfo cii; ciq.m_From = vCur; ciq.m_To = vNew; if (m_pLTClient->IntersectSegment(&ciq, &cii)) { vNew = cii.m_Point + cii.m_Plane.m_Normal; vCur = vNew; // Compute the reflected velocity LTVector N = cii.m_Plane.m_Normal; LTVector L = m_vVel; L.x = -L.x; L.y = -L.y; L.z = -L.z; LTVector vReflected = N * 2.0f; vReflected *= (N.Dot(L)); vReflected -= L; vReflected.Norm(); vReflected *= (m_vVel.Mag() * 0.7f); m_vVel = vReflected; const char *sImpactSound = GetProps()->m_sImpactSound; if (sImpactSound[0] != '.') { // Play the bounce sound PlaySoundInfo psi; memset(&psi, 0, sizeof(PlaySoundInfo)); psi.m_dwFlags = PLAYSOUND_GETHANDLE | PLAYSOUND_CTRL_VOL | PLAYSOUND_CLIENT | PLAYSOUND_TIME | PLAYSOUND_3D | PLAYSOUND_REVERB; psi.m_nVolume = 50; strcpy(psi.m_szSoundName, GetProps()->m_sImpactSound); psi.m_nPriority = 0; psi.m_vPosition = m_vPos; psi.m_fInnerRadius = 100; psi.m_fOuterRadius = 300; if (!m_hImpactSound) { if (m_pLTClient->SoundMgr()->PlaySound(&psi, m_hImpactSound) == LT_OK) { m_hImpactSound = psi.m_hSound; } } } } m_pLTClient->SetObjectPos(m_hBouncyChunk, &vNew); m_pLTClient->SetObjectColor(m_hBouncyChunk, m_red, m_green, m_blue, m_alpha); m_pLTClient->SetObjectPos(m_hObject, &vNew); // Success !! return true; }
LTBOOL AISpatialNeighbor::Init(AISpatialRepresentation* pThis, AISpatialRepresentation* pNeighbor) { m_pVolume = pNeighbor; // Compute the 2d intersection of the two volumes, and compute important // things about the geometry of the connection LTVector vFrontLeft(0,0,0); LTVector vFrontRight(0,0,0); LTVector vBackLeft(0,0,0); LTVector vBackRight(0,0,0); vFrontLeft.x = Max<LTFLOAT>(pThis->GetFrontTopLeft().x, pNeighbor->GetFrontTopLeft().x); vFrontLeft.z = Min<LTFLOAT>(pThis->GetFrontTopLeft().z, pNeighbor->GetFrontTopLeft().z); vFrontRight.x = Min<LTFLOAT>(pThis->GetFrontTopRight().x, pNeighbor->GetFrontTopRight().x); vFrontRight.z = Min<LTFLOAT>(pThis->GetFrontTopRight().z, pNeighbor->GetFrontTopRight().z); vBackLeft.x = Max<LTFLOAT>(pThis->GetBackTopLeft().x, pNeighbor->GetBackTopLeft().x); vBackLeft.z = Max<LTFLOAT>(pThis->GetBackTopLeft().z, pNeighbor->GetBackTopLeft().z); vBackRight.x = Min<LTFLOAT>(pThis->GetBackTopRight().x, pNeighbor->GetBackTopRight().x); vBackRight.z = Max<LTFLOAT>(pThis->GetBackTopRight().z, pNeighbor->GetBackTopRight().z); // We know connection position (the center of the intersection) easily. m_vConnectionPos = (vFrontLeft+vFrontRight+vBackLeft+vBackRight)/4.0f; // We need y for vertical movement #define _A_b pThis->GetFrontBottomRight().y #define _A_t pThis->GetFrontTopRight().y #define _B_b pNeighbor->GetFrontBottomRight().y #define _B_t pNeighbor->GetFrontTopRight().y if ( (_A_t >= _B_t) && (_A_t >= _B_b) && (_A_b >= _B_t) && (_A_b >= _B_b) ) { m_vConnectionPos.y = _A_b; // or _B_t } else if ( (_A_t <= _B_t) && (_A_t <= _B_b) && (_A_b <= _B_t) && (_A_b <= _B_b) ) { m_vConnectionPos.y = _A_t; // or _B_b } else if ( (_A_t >= _B_t) && (_A_t >= _B_b) && (_A_b <= _B_t) && (_A_b >= _B_b) ) { m_vConnectionPos.y = (_A_b + _B_t)/2.0f; } else if ( (_A_t <= _B_t) && (_A_t >= _B_b) && (_A_b <= _B_t) && (_A_b <= _B_b) ) { m_vConnectionPos.y = (_A_t + _B_b)/2.0f; } else if ( (_A_t >= _B_t) && (_A_t >= _B_b) && (_A_b <= _B_t) && (_A_b <= _B_b) ) { m_vConnectionPos.y = (_B_b + _B_t)/2.0f; } else if ( (_A_t <= _B_t) && (_A_t >= _B_b) && (_A_b <= _B_t) && (_A_b >= _B_b) ) { m_vConnectionPos.y = (_A_b + _A_t)/2.0f; } else { m_vConnectionPos.y = -float(INT_MAX); DANGER(g_pLTServer, blong); } // Find the endpoints of the line across the connection, and the vector perpendicular to this if ( pThis->InsideMasked(pNeighbor->GetFrontTopLeft(), eAxisAll) || pThis->InsideMasked(pNeighbor->GetBackTopRight(), eAxisAll) || pThis->InsideMasked(pNeighbor->GetFrontBottomLeft(), eAxisAll) || pThis->InsideMasked(pNeighbor->GetBackBottomRight(), eAxisAll) ) { m_avConnectionEndpoints[0] = vFrontRight + LTVector(0, m_vConnectionPos.y, 0); m_avConnectionEndpoints[1] = vBackLeft + LTVector(0, m_vConnectionPos.y, 0); m_vConnectionPerpDir = vFrontRight - vBackLeft; m_vConnectionDir = m_avConnectionEndpoints[1] - m_avConnectionEndpoints[0]; m_vConnectionDir.y = 0.0f; m_fConnectionLength = VEC_MAG(m_vConnectionDir); m_vConnectionDir.Normalize(); } else { m_avConnectionEndpoints[0] = vFrontLeft + LTVector(0, m_vConnectionPos.y, 0); m_avConnectionEndpoints[1] = vBackRight + LTVector(0, m_vConnectionPos.y, 0); m_vConnectionPerpDir = vFrontLeft - vBackRight; m_vConnectionDir = m_avConnectionEndpoints[1] - m_avConnectionEndpoints[0]; m_vConnectionDir.y = 0.0f; m_fConnectionLength = VEC_MAG(m_vConnectionDir); m_vConnectionDir.Normalize(); } m_vConnectionMidpoint = m_avConnectionEndpoints[0] + ( m_vConnectionDir * ( m_fConnectionLength * 0.5f ) ); LTFLOAT fTemp = m_vConnectionPerpDir[0]; m_vConnectionPerpDir[0] = m_vConnectionPerpDir[2]; m_vConnectionPerpDir[2] = fTemp; m_vConnectionPerpDir.Normalize(); // Ensure that perp dir is axis-aligned. RoundVector( m_vConnectionPerpDir ); // Make sure it points into this volume LTVector vThisCenter = (pThis->GetFrontTopLeft()+pThis->GetBackTopRight())/2.0f; LTVector vThisCenterDir = vThisCenter - m_vConnectionPos; vThisCenterDir.y = 0; vThisCenterDir.Normalize(); if ( vThisCenterDir.Dot(m_vConnectionPerpDir) < 0.0f ) { m_vConnectionPerpDir = -m_vConnectionPerpDir; } m_cGates = (uint32)(m_fConnectionLength/48.0f); // Check for invalid neighbors. if(m_cGates == 0) { AIError("Volume has Invalid Neighbor %s -> %s. Connection < 48 units!", pThis->GetName(), pNeighbor->GetName() ); return LTFALSE; } m_vecfGateOccupancy.resize(m_cGates); for ( uint32 iGate = 0 ; iGate < m_cGates ; iGate++ ) { m_vecfGateOccupancy[iGate] = 0.0f; } if( m_avConnectionEndpoints[0].z == m_avConnectionEndpoints[1].z ) { m_eVolumeConnectionType = eVolumeConnectionTypeHorizontal; if( m_pVolume->GetCenter().z < m_vConnectionPos.z ) { m_eVolumeConnectionLocation = eVolumeConnectionLocationFront; } else { m_eVolumeConnectionLocation = eVolumeConnectionLocationBack; } } else { m_eVolumeConnectionType = eVolumeConnectionTypeVertical; if( m_pVolume->GetCenter().x < m_vConnectionPos.x ) { m_eVolumeConnectionLocation = eVolumeConnectionLocationRight; } else { m_eVolumeConnectionLocation = eVolumeConnectionLocationLeft; } } //g_pLTServer->CPrint("cxn @ %f,%f,%f in %f,%f,%f : %f,%f,%f", // EXPANDVEC(m_vConnectionPos), EXPANDVEC(vThisCenter), EXPANDVEC(m_vConnectionPerpDir)); return LTTRUE; }
//called to handle updating of a batch of particles given the appropriate properties void CParticleSystemGroup::UpdateParticles(float tmFrame, const LTVector& vGravity, float fFrictionCoef, const LTRigidTransform& tObjTrans) { LTASSERT(m_pProps, "Error: Called UpdateParticles on an uninitialized particle group"); //track our performance CTimedSystemBlock TimingBlock(g_tsClientFXParticles); //get an iterator to our list of particles CParticleReverseIterator itParticles = m_Particles.GetReverseIterator(); //bail if we have no particles if(itParticles.IsDone()) return; //find the coefficient of restitution to use for these particles in case they bounce float fCOR = m_pProps->m_fBounceStrength; //do our particles have infinite lifetime? bool bInfiniteLife = m_pProps->m_bInfiniteLife; //initialize our particle bounding box to extreme extents static const float kfInfinity = FLT_MAX; LTVector vMin = LTVector(kfInfinity, kfInfinity, kfInfinity); LTVector vMax = LTVector(-kfInfinity, -kfInfinity, -kfInfinity); LTVector vDefaultGravity = vGravity * tmFrame; float fDefaultFriction = powf(fFrictionCoef, tmFrame); //the current gravity and friction for us to use LTVector vCurrGravity = vDefaultGravity; float fCurrFriction = fDefaultFriction; float fCurrUpdateTime = tmFrame; //we now need to handle updating the particles. For performance reasons, this is broken apart //into two update loops, one that handles bouncing/splat, another that doesn't if(m_nNumRayTestParticles == 0) { //this is the non-bouncing update loop while(!itParticles.IsDone()) { SParticle* pParticle = (SParticle*)itParticles.GetParticle(); //update the lifetime pParticle->m_fLifetime -= fCurrUpdateTime; // Check for expiration if( pParticle->m_fLifetime <= 0.0f ) { if(pParticle->m_nUserData & PARTICLE_BATCH_MARKER) { //restore our defaults if(pParticle->m_nUserData & PARTICLE_DEFAULT_BATCH) { //restore our defaults vCurrGravity = vDefaultGravity; fCurrFriction = fDefaultFriction; fCurrUpdateTime = tmFrame; } else { //compute new values for us to use vCurrGravity = vGravity * pParticle->m_fTotalLifetime; fCurrFriction = powf(fFrictionCoef, pParticle->m_fTotalLifetime); fCurrUpdateTime = pParticle->m_fTotalLifetime; } //do the direct remove (we know batch markers don't have bounce or splat) itParticles = m_Particles.RemoveParticle(itParticles); continue; } else if(bInfiniteLife) { //this particle has died, but resurrect it since it lives forever pParticle->m_fLifetime = pParticle->m_fTotalLifetime - fmodf(-pParticle->m_fLifetime, pParticle->m_fTotalLifetime); } else { //remove the dead particle (can do direct version since we know we don't have splat or bounce) itParticles = m_Particles.RemoveParticle(itParticles); continue; } } // Give the particle an update //update the velocity, applying gravity and friction pParticle->m_Velocity = pParticle->m_Velocity * fCurrFriction + vCurrGravity; pParticle->m_Pos += pParticle->m_Velocity * fCurrUpdateTime; // Update the angle if appropriate pParticle->m_fAngle += pParticle->m_fAngularVelocity * fCurrUpdateTime; //extend the bounding box vMin.Min(pParticle->m_Pos); vMax.Max(pParticle->m_Pos); if(m_pProps->m_bStreak) { LTVector vStreakPt = pParticle->m_Pos - pParticle->m_Velocity * m_pProps->m_fStreakScale; vMin.Min(vStreakPt); vMax.Max(vStreakPt); } //and move onto the next particle itParticles.Prev(); } } else { //this is the bouncing/splat update loop IntersectQuery iQuery; IntersectInfo iInfo; //get the main world that we are going to test against HOBJECT hMainWorld = g_pLTClient->GetMainWorldModel(); //cache the inverse object transform LTRigidTransform tInvObjTrans = tObjTrans.GetInverse(); while(!itParticles.IsDone()) { SParticle* pParticle = (SParticle*)itParticles.GetParticle(); //update the lifetime pParticle->m_fLifetime -= fCurrUpdateTime; // Check for expiration if( pParticle->m_fLifetime <= 0.0f ) { if(pParticle->m_nUserData & PARTICLE_BATCH_MARKER) { //restore our defaults if(pParticle->m_nUserData & PARTICLE_DEFAULT_BATCH) { //restore our defaults vCurrGravity = vDefaultGravity; fCurrFriction = fDefaultFriction; fCurrUpdateTime = tmFrame; } else { //compute new values for us to use vCurrGravity = vGravity * pParticle->m_fTotalLifetime; fCurrFriction = powf(fFrictionCoef, pParticle->m_fTotalLifetime); fCurrUpdateTime = pParticle->m_fTotalLifetime; } //do the direct remove (we know batch markers don't have bounce or splat) itParticles = m_Particles.RemoveParticle(itParticles); continue; } else if(bInfiniteLife) { //this particle has died, but resurrect it since it lives forever pParticle->m_fLifetime = pParticle->m_fTotalLifetime - fmodf(-pParticle->m_fLifetime, pParticle->m_fTotalLifetime); } else { //remove the dead particle (can't do direct version since it might have splat or bounce) itParticles = RemoveParticle(itParticles); continue; } } // Give the particle an update //update the velocity, applying gravity and friction pParticle->m_Velocity = pParticle->m_Velocity * fCurrFriction + vCurrGravity; // Update the angle if appropriate pParticle->m_fAngle += pParticle->m_fAngularVelocity * fCurrUpdateTime; //determine where the particle should be moving to LTVector vDestPos = pParticle->m_Pos + pParticle->m_Velocity * fCurrUpdateTime; //we now need to compute the new position of the particle if(pParticle->m_nUserData & (PARTICLE_BOUNCE | PARTICLE_SPLAT)) { LTVector vParticlePos = pParticle->m_Pos; LTVector vParticleDest = vDestPos; //do all intersections in world space if(m_pProps->m_bObjectSpace) { tObjTrans.Transform(pParticle->m_Pos, vParticlePos); tObjTrans.Transform(vDestPos, vParticleDest); } iQuery.m_From = vParticlePos; iQuery.m_To = vParticleDest; if( g_pLTClient->IntersectSegmentAgainst( iQuery, &iInfo, hMainWorld ) ) { //handle bounce if(pParticle->m_nUserData & PARTICLE_BOUNCE) { //move our particle to the position of the intersection, but offset based upon //the normal slightly to avoid tunnelling vDestPos = iInfo.m_Point + iInfo.m_Plane.m_Normal * 0.1f; //and handle transforming back into object space if appropriate if(m_pProps->m_bObjectSpace) { vDestPos = tInvObjTrans * vDestPos; } LTVector& vVel = pParticle->m_Velocity; LTVector vNormal = iInfo.m_Plane.m_Normal; if(m_pProps->m_bObjectSpace) { vNormal = tInvObjTrans.m_rRot.RotateVector(vNormal); } //reflect the velocity over the normal vVel -= vNormal * (2.0f * vVel.Dot(vNormal)); //apply the coefficient of restitution vVel *= fCOR; } //handle splat if(pParticle->m_nUserData & PARTICLE_SPLAT) { //alright, we now need to create a splat effect //create a random rotation around the plane that we hit LTRotation rSplatRot(iInfo.m_Plane.m_Normal, LTVector(0.0f, 1.0f, 0.0f)); rSplatRot.Rotate(iInfo.m_Plane.m_Normal, GetRandom(0.0f, MATH_TWOPI)); LTRigidTransform tSplatTrans(iInfo.m_Point, rSplatRot); //now handle if we hit an object, we need to convert spaces and set that as our parent if(iInfo.m_hObject) { //convert the transform into a relative object space transform LTRigidTransform tHitObjTrans; g_pLTClient->GetObjectTransform(iInfo.m_hObject, &tHitObjTrans); tSplatTrans = tHitObjTrans.GetInverse() * tSplatTrans; } //and create the actual new object CLIENTFX_CREATESTRUCT CreateStruct("", 0, iInfo.m_hObject, tSplatTrans); CreateNewFX(m_pFxMgr, m_pProps->m_pszSplatEffect, CreateStruct, true); //we need to kill the particle itParticles = RemoveParticle(itParticles); continue; } } } //move the particle to the destination position that we calculated pParticle->m_Pos = vDestPos; //and update our extents box to match accordingly vMin.Min(pParticle->m_Pos); vMax.Max(pParticle->m_Pos); if(m_pProps->m_bStreak) { LTVector vStreakPt = pParticle->m_Pos - pParticle->m_Velocity * m_pProps->m_fStreakScale; vMin.Min(vStreakPt); vMax.Max(vStreakPt); } //and move onto our next particle itParticles.Prev(); } } //handle the case where we didn't hit any particles and therefore need to clear out our min and //max (note we only check one component for speed) if(vMin.x == kfInfinity) { vMin = tObjTrans.m_vPos; vMax = tObjTrans.m_vPos; } //expand the bounding box out by the largest particle size times the square root of two //to handle rotating particles that are at 45 degrees float fExpandAmount = m_pProps->m_fMaxParticlePadding; vMin -= LTVector(fExpandAmount, fExpandAmount, fExpandAmount); vMax += LTVector(fExpandAmount, fExpandAmount, fExpandAmount); //handle the case of when the particles are in object space, in such a case, we need to convert //the AABB from object space to an AABB in world space if(m_pProps->m_bObjectSpace) { //transform the object-space AABB to a world space AABB by projecting the dims onto the object basis vectors LTVector vRight, vUp, vForward; tObjTrans.m_rRot.GetVectors(vRight, vUp, vForward); LTVector vObjHalfDims = (vMax - vMin) * 0.5f; LTVector vWorldHalfDims; vWorldHalfDims.x = vObjHalfDims.Dot(LTVector(fabsf(vRight.x), fabsf(vUp.x), fabsf(vForward.x))); vWorldHalfDims.y = vObjHalfDims.Dot(LTVector(fabsf(vRight.y), fabsf(vUp.y), fabsf(vForward.y))); vWorldHalfDims.z = vObjHalfDims.Dot(LTVector(fabsf(vRight.z), fabsf(vUp.z), fabsf(vForward.z))); LTVector vObjCenter = (vMin + vMax) * 0.5f; LTVector vWorldCenter; tObjTrans.Transform(vObjCenter, vWorldCenter); //save the transformed results vMin = vWorldCenter - vWorldHalfDims; vMax = vWorldCenter + vWorldHalfDims; } //update the visibility box of the object g_pLTClient->GetCustomRender()->SetVisBoundingBox(m_hCustomRender, vMin - tObjTrans.m_vPos, vMax - tObjTrans.m_vPos); //we also need to update the transform of our object to follow g_pLTClient->SetObjectTransform(m_hCustomRender, tObjTrans); }
// Wrap the textures, starting at a poly index void CRVTrackerTextureWrap::WrapTexture(CTWPolyInfo *pPoly, const CVector &vWrapDir, CTextExtents &cExtents) const { // Mark this poly as wrapped pPoly->m_bTouched = TRUE; CTexturedPlane& Texture = pPoly->m_pPoly->GetTexture(GetCurrTexture()); // Get the texture space LTVector vWrapO = Texture.GetO(); LTVector vWrapP = Texture.GetP(); LTVector vWrapQ = Texture.GetQ(); // Get the texture offset projections float fWrapOdotP = vWrapO.Dot(vWrapP); float fWrapOdotQ = vWrapO.Dot(vWrapQ); // Update the texturing extents for (uint32 nExtentLoop = 0; nExtentLoop < pPoly->m_aEdges.GetSize(); ++nExtentLoop) { LTVector vEdgePt = pPoly->m_aEdges[nExtentLoop]->m_aPt[0]; float fCurU = vWrapP.Dot(vEdgePt) - fWrapOdotP; float fCurV = vWrapQ.Dot(vEdgePt) - fWrapOdotQ; cExtents.m_fMinU = LTMIN(fCurU, cExtents.m_fMinU); cExtents.m_fMaxU = LTMAX(fCurU, cExtents.m_fMaxU); cExtents.m_fMinV = LTMIN(fCurV, cExtents.m_fMinV); cExtents.m_fMaxV = LTMAX(fCurV, cExtents.m_fMaxV); } CMoArray<uint32> aNeighbors; CMoArray<float> aDots; // Insert the neighbors into a list in dot-product order for (uint32 nNeighborLoop = 0; nNeighborLoop < pPoly->m_aNeighbors.GetSize(); ++nNeighborLoop) { CTWPolyInfo *pNeighbor = pPoly->m_aNeighbors[nNeighborLoop]; // Skip edges that don't have a neighbor if (!pNeighbor) continue; // Skip neighbors that are already wrapped if (pNeighbor->m_bTouched) continue; // Get our dot product float fCurDot = vWrapDir.Dot(pPoly->m_aEdges[nNeighborLoop]->m_Plane.m_Normal); if ((m_bRestrictWalkDir) && (fCurDot < 0.707f)) continue; // Mark this neighbor as touched (to avoid later polygons pushing it onto the stack) pNeighbor->m_bTouched = TRUE; // Insert it into the list for (uint32 nInsertLoop = 0; nInsertLoop < aNeighbors.GetSize(); ++nInsertLoop) { if (fCurDot > aDots[nInsertLoop]) break; } aDots.Insert(nInsertLoop, fCurDot); aNeighbors.Insert(nInsertLoop, nNeighborLoop); } // Recurse through its neighbors for (uint32 nWrapLoop = 0; nWrapLoop < aNeighbors.GetSize(); ++nWrapLoop) { CTWPolyInfo *pNeighbor = pPoly->m_aNeighbors[aNeighbors[nWrapLoop]]; CTWEdgeInfo *pEdge = pPoly->m_aEdges[aNeighbors[nWrapLoop]]; ////////////////////////////////////////////////////////////////////////////// // Wrap this neighbor // Create a matrix representing the basis of the polygon in relation to this edge LTMatrix mPolyBasis; mPolyBasis.SetTranslation(0.0f, 0.0f, 0.0f); mPolyBasis.SetBasisVectors(&pEdge->m_vDir, &pPoly->m_pPoly->m_Plane.m_Normal, &pEdge->m_Plane.m_Normal); // Create a new basis for the neighbor polygon LTMatrix mNeighborBasis; LTVector vNeighborForward; vNeighborForward = pNeighbor->m_pPoly->m_Plane.m_Normal.Cross(pEdge->m_vDir); // Just to be sure.. vNeighborForward.Norm(); mNeighborBasis.SetTranslation(0.0f, 0.0f, 0.0f); mNeighborBasis.SetBasisVectors(&pEdge->m_vDir, &pNeighbor->m_pPoly->m_Plane.m_Normal, &vNeighborForward); // Create a rotation matrix from here to there LTMatrix mRotation; mRotation = mNeighborBasis * ~mPolyBasis; // Rotate the various vectors LTVector vNewP; LTVector vNewQ; LTVector vNewDir; mRotation.Apply3x3(vWrapP, vNewP); mRotation.Apply3x3(vWrapQ, vNewQ); mRotation.Apply3x3(vWrapDir, vNewDir); // Rotate the texture basis if we're following a path if (m_nWrapStyle == k_WrapPath) { LTVector vNeighborEdgeDir; if (GetSimilarEdgeDir(pNeighbor, vNewDir, vNeighborEdgeDir, 0.707f)) { LTMatrix mRotatedNeighbor; LTVector vNeighborRight; vNeighborRight = vNeighborEdgeDir.Cross(pNeighbor->m_pPoly->m_Plane.m_Normal); vNeighborRight.Norm(); // Make sure we're pointing the right way... if (vNeighborRight.Dot(pEdge->m_vDir) < 0.0f) vNeighborRight = -vNeighborRight; mRotatedNeighbor.SetTranslation(0.0f, 0.0f, 0.0f); mRotatedNeighbor.SetBasisVectors(&vNeighborRight, &pNeighbor->m_pPoly->m_Plane.m_Normal, &vNeighborEdgeDir); // Build a basis based on an edge from the current polygon LTVector vBestPolyEdge; GetSimilarEdgeDir(pPoly, vWrapDir, vBestPolyEdge); LTVector vPolyRight = vBestPolyEdge.Cross(pNeighbor->m_pPoly->m_Plane.m_Normal); vPolyRight.Norm(); // Make sure we're pointing the right way... if (vPolyRight.Dot(pEdge->m_vDir) < 0.0f) vPolyRight = -vPolyRight; // Build the poly edge matrix LTMatrix mPolyEdgeBasis; mPolyEdgeBasis.SetTranslation(0.0f, 0.0f, 0.0f); mPolyEdgeBasis.SetBasisVectors(&vPolyRight, &pNeighbor->m_pPoly->m_Plane.m_Normal, &vBestPolyEdge); // Get a matrix from here to there LTMatrix mRotator; mRotator = mRotatedNeighbor * ~mPolyEdgeBasis; // Rotate the texture basis mRotator.Apply3x3(vNewP); mRotator.Apply3x3(vNewQ); // And use the new edge as the new direction vNewDir = vNeighborEdgeDir; } // Remove skew from vNewP/vNewQ if ((float)fabs(vNewP.Dot(vNewQ)) > 0.001f) { float fMagP = vNewP.Mag(); float fMagQ = vNewQ.Mag(); vNewQ *= 1.0f / fMagQ; vNewP -= vNewQ * vNewQ.Dot(vNewP); vNewP.Norm(fMagP); vNewQ *= fMagQ; } } // Get the first edge point.. CVector vEdgePt = pEdge->m_aPt[0]; // Calculate the texture coordinate at this point float fWrapU = vWrapP.Dot(vEdgePt) - fWrapOdotP; float fWrapV = vWrapQ.Dot(vEdgePt) - fWrapOdotQ; // Build the new offset float fNewOdotP = vNewP.Dot(vEdgePt) - fWrapU; float fNewOdotQ = vNewQ.Dot(vEdgePt) - fWrapV; LTVector vNewO; vNewO.Init(); float fNewPMag = vNewP.MagSqr(); if (fNewPMag > 0.0f) vNewO += vNewP * (fNewOdotP / fNewPMag); float fNewQMag = vNewQ.MagSqr(); if (fNewQMag > 0.0f) vNewO += vNewQ * (fNewOdotQ / fNewQMag); pNeighbor->m_pPoly->SetTextureSpace(GetCurrTexture(), vNewO, vNewP, vNewQ); // Recurse into this neighbor WrapTexture(pNeighbor, vNewDir, cExtents); } }
bool i_IntersectSegment(IntersectQuery *pQuery, IntersectInfo *pInfo, WorldTree *pWorldTree) { float InvVV, VP, testMag; ++g_nIntersectCalls; CountAdder cTicks_Intersect(&g_Ticks_Intersect); // Init.. g_pCurQuery = pQuery; g_pIntersection = LTNULL; g_pWorldIntersection = LTNULL; g_hWorldPoly = INVALID_HPOLY; g_hModelNode = INVALID_MODEL_NODE; g_bProcessNonSolid = !(pQuery->m_Flags & IGNORE_NONSOLID); g_bProcessObjects = !!(pQuery->m_Flags & INTERSECT_OBJECTS); g_bCheckIfFromPointIsInsideObject = !!(pQuery->m_Flags & CHECK_FROM_POINT_INSIDE_OBJECTS); g_bProcessModelObbs = !!(pQuery->m_Flags & INTERSECT_MODELOBBS); g_IntersectionBestDistSqr = (pQuery->m_From - pQuery->m_To).MagSqr() + 1.0f; // Precalculate stuff to totally accelerate i_QuickSphereTest. g_VOrigin = pQuery->m_From; g_V = pQuery->m_To - pQuery->m_From; // Calc Direction g_VDir = g_V.Unit(); g_LineLen = g_V.Mag(); g_V /= g_LineLen; // Was it too short? testMag = g_V.MagSqr(); if (testMag < 0.5f || testMag > 2.0f) { return false; } VP = g_V.Dot(pQuery->m_From); InvVV = 1.0f / g_V.MagSqr(); g_VTimesInvVV = g_V * InvVV; g_VPTimesInvVV = VP * InvVV; if (pQuery->m_Flags & INTERSECT_HPOLY) { g_FindIntersectionsFn = i_FindIntersectionsHPoly; } else { g_FindIntersectionsFn = i_FindIntersections; } // Start at the world tree. pWorldTree->IntersectSegment((LTVector*)&pQuery->m_From, (LTVector*)&pQuery->m_To, i_ISCallback, LTNULL); // If an object was hit, use it! if (g_pIntersection) { pInfo->m_Point = g_IntersectionPos; pInfo->m_Plane = g_IntersectionPlane; pInfo->m_hObject = (HOBJECT)g_pIntersection; pInfo->m_hPoly = g_hWorldPoly; pInfo->m_hNode = g_hModelNode; if (g_pWorldIntersection) { pInfo->m_SurfaceFlags = g_pWorldIntersection->m_pPoly->GetSurface()->m_TextureFlags; } else { pInfo->m_SurfaceFlags = 0; } return true; } else { return false; } }
// ----------------------------------------------------------------------- // // // ROUTINE: CTronPlayerObj::GetDefensePercentage() // // PURPOSE: How much of the attack damage has our defenese prevented // // NOTES: There are actually 2 defense percentages. One is based // on timing, i.e. player's reation speed, animation speed, // etc. The other is based on the player's orientation to // the incoming projectile. If the vector parameter is // specified, BOTH percentages will be computed and the // combined result will be returned. If the vector is NOT // specified, only the timing will be computed. // // ----------------------------------------------------------------------- // float CTronPlayerObj::GetDefensePercentage( LTVector const *pIncomingProjectilePosition /*=0*/) const { if ( TRONPLAYEROBJ_NO_DEFEND == m_cDefendType ) { // not blocking return 0.0f; } // // Do this in 2 passes. The first pass willl determine // the contribution to the defense percentage due to the // timing of the player/animations. The second pass // will add the contribution of the player's orientation. // float fDefenseTimingPercentage; float fDefenseOrientationPercentage; // get the weapon AMMO const *pAmmoData = g_pWeaponMgr->GetAmmo( m_cDefendAmmoId ); ASSERT( 0 != pAmmoData ); ASSERT( 0 != pAmmoData->pProjectileFX ); // get the ammo specific data DISCCLASSDATA *pDiscData = dynamic_cast< DISCCLASSDATA* >( pAmmoData->pProjectileFX->pClassData ); ASSERT( 0 != pDiscData ); // // Determine Timing Percentage // switch ( m_cDefendType ) { case MPROJ_START_SWAT_BLOCK: case MPROJ_START_ARM_BLOCK: { // get the current server time float fCurrentServerTime = g_pLTServer->GetTime() * 1000.0f; // make sure we're within the range of the block if ( ( static_cast< int >( fCurrentServerTime ) - m_nDefendServerTimeStarted ) > m_nDefendDuration ) { // nope, the block is over return 0.0f; } // Swat and Arm defenses are similar, so fill out these // variables uniquely (depending on which case we are // handling), then use the common generic math to figure // out the answer. float fMidpointTime; float fMaxStartTime; float fMaxEndTime; float fStartDefendPercentage; float fMaxDefendPercentage; float fEndDefendPercentage; if ( MPROJ_START_SWAT_BLOCK == m_cDefendType ) { // determine at exactly what time the midpoint takes place // NOTE: this is relative time, not absolute fMidpointTime = ( pDiscData->fSwatDefendMidpoint * m_nDefendDuration ); // determine at exactly what time the max starts // NOTE: this is relative time, not absolute fMaxStartTime = fMidpointTime - fMidpointTime * pDiscData->fSwatDefendStartMaxDefendPercentage; // determine at exactly what time the max ends // NOTE: this is relative time, not absolute fMaxEndTime = fMidpointTime + ( ( m_nDefendDuration - fMidpointTime ) * pDiscData->fSwatDefendEndMaxDefendPercentage ); // determine the starting defend percentage fStartDefendPercentage = pDiscData->fSwatDefendStartDefendPercentage; // detecmine the max defend percentage fMaxDefendPercentage = pDiscData->fSwatDefendMaxDefendPercentage; // determine the ending defend percentage fEndDefendPercentage = pDiscData->fSwatDefendEndDefendPercentage; } else if ( MPROJ_START_ARM_BLOCK == m_cDefendType ) { // Not implemented yet. The main question I haven't figured // out yet is where does the information come from? fMidpointTime = 0.0f; fMaxStartTime = 0.0f; fMaxEndTime = 0.0f; fStartDefendPercentage = 0.0f; fMaxDefendPercentage = 0.0f; fEndDefendPercentage = 0.0f; } // determine at exactly how much time we've been in the block // NOTE: this is relative time, not absolute float fBlockTime = fCurrentServerTime - m_nDefendServerTimeStarted; if ( ( -MATH_EPSILON <= fBlockTime ) && ( fBlockTime <= fMaxStartTime ) ) { // somewhere on the uprise fDefenseTimingPercentage = fStartDefendPercentage + ( ( ( fMaxDefendPercentage - fStartDefendPercentage ) / fMaxStartTime ) * fBlockTime ); } else if ( ( fMaxStartTime < fBlockTime ) && ( fBlockTime <= fMaxEndTime ) ) { // within the max range fDefenseTimingPercentage = fMaxDefendPercentage; } else if ( ( fMaxEndTime < fBlockTime ) && ( fBlockTime <= m_nDefendDuration ) ) { // somewhere on the downfall fDefenseTimingPercentage = fMaxDefendPercentage - ( ( ( fMaxDefendPercentage - fEndDefendPercentage ) / ( m_nDefendDuration - fMaxEndTime ) ) * ( fBlockTime - fMaxEndTime ) ); } else { // math problem, we should be here if we are outside // the bounds of the defense ASSERT( 0 ); fDefenseTimingPercentage = 0.0f; } } break; case MPROJ_START_HOLD_BLOCK: { // get the current server time float fCurrentServerTime = g_pLTServer->GetTime() * 1000.0f; // determine the starting defend percentage float fStartDefendPercentage = pDiscData->fHoldDefendStartDefendPercentage; // determine the max defend percentage float fMaxDefendPercentage = pDiscData->fHoldDefendMaxDefendPercentage; // determine at exactly how much time we've been in the block // NOTE: this is relative time, not absolute float fBlockTime = fCurrentServerTime - m_nDefendServerTimeStarted; if ( ( -MATH_EPSILON <= fBlockTime ) && ( fBlockTime <= m_nDefendDuration ) ) { // somewhere on the uprise fDefenseTimingPercentage = fStartDefendPercentage + ( ( ( fMaxDefendPercentage - fStartDefendPercentage ) / static_cast< float >( m_nDefendDuration ) ) * fBlockTime ); } else if ( m_nDefendDuration < fBlockTime ) { // within the max range fDefenseTimingPercentage = fMaxDefendPercentage; } else { // math problem, we should be here if we are outside // the bounds of the defense ASSERT( 0 ); fDefenseTimingPercentage = 0.0f; } } break; case MPROJ_END_HOLD_BLOCK: { // get the current server time float fCurrentServerTime = g_pLTServer->GetTime() * 1000.0f; // make sure we're within the range of the block if ( ( static_cast< int >( fCurrentServerTime ) - m_nDefendServerTimeStarted ) > m_nDefendDuration ) { // nope, the block is over return 0.0f; } // detecmine the max defend percentage float fMaxDefendPercentage = pDiscData->fHoldDefendMaxDefendPercentage; // determine the ending defend percentage float fEndDefendPercentage = pDiscData->fHoldDefendEndDefendPercentage; // determine at exactly how much time we've been in the block // NOTE: this is relative time, not absolute float fBlockTime = fCurrentServerTime - m_nDefendServerTimeStarted; // somewhere on the downfall fDefenseTimingPercentage = fMaxDefendPercentage - ( ( ( fMaxDefendPercentage - fEndDefendPercentage ) / ( m_nDefendDuration ) ) * ( fBlockTime ) ); } break; default: { // There is some type of block defined that we // are not handling, and we SHOULD be handling it. ASSERT( 0 ); return 0.0f; } break; }; //TODO: skip this section of the camera position is too old? // check if the oriention percentage should be computed if ( 0 == pIncomingProjectilePosition ) { // No vector specifed, there is no way // to compute orientation defense. return fDefenseTimingPercentage; } // // Determine Orientation percentage // // The 3 cases are the same, but they could have different // control values. Figure out what the specific variables // are (depending on the specific type of block), then apply // the generic equations. float fOrientationMinDefendPercentage; float fOrientationMaxDefendPercentage; float fOrientationDeadZone; float fOrientationMaxZone; switch ( m_cDefendType ) { case MPROJ_START_SWAT_BLOCK: { fOrientationMinDefendPercentage = pDiscData->fSwatDefendOrientationMinDefendPercentage; fOrientationMaxDefendPercentage = pDiscData->fSwatDefendOrientationMaxDefendPercentage; fOrientationDeadZone = MATH_PI - pDiscData->fSwatDefendOrientationDeadZone; fOrientationMaxZone = pDiscData->fSwatDefendOrientationMaxZone; } break; case MPROJ_START_HOLD_BLOCK: case MPROJ_END_HOLD_BLOCK: { fOrientationMinDefendPercentage = pDiscData->fHoldDefendOrientationMinDefendPercentage; fOrientationMaxDefendPercentage = pDiscData->fHoldDefendOrientationMaxDefendPercentage; fOrientationDeadZone = MATH_PI - pDiscData->fHoldDefendOrientationDeadZone; fOrientationMaxZone = pDiscData->fHoldDefendOrientationMaxZone; } break; case MPROJ_START_ARM_BLOCK: { // Not implemented yet. The main question I haven't figured // out yet is where does the information come from? fOrientationMinDefendPercentage = 0.0f; fOrientationMaxDefendPercentage = 0.0f; fOrientationDeadZone = 0.0f; fOrientationMaxZone = 0.0f; } break; default: { // There is some type of block defined that we // are not handling, and we SHOULD be handling it. ASSERT( 0 ); return 0.0f; } break; }; LTRESULT ltResult; LTVector vDefendPos; LTVector vDefendPosToProjectile; // get the player's position ltResult = g_pLTServer->GetObjectPos( m_hObject, &vDefendPos ); ASSERT( LT_OK == ltResult ); // REMOVE THIS CODE FOR FINAL RELEASE, BUT LEAVE FOR TESTING MULTIPLAYER // print a warning if the time is new enough if ( TRONPLAYEROBJ_CLIENT_CAMERA_TIME_OLD_THRESHOLD < ( g_pLTServer->GetTime() - m_nClientCameraOffsetTimeReceivedMS ) ) { g_pLTServer->CPrint( "Client Camera Offset time is low, possible lag\n" ); g_pLTServer->CPrint( " condition that will affect defensive accuracy.\n" ); g_pLTServer->CPrint( " Currnt value is %5.3f units old.\n", ( g_pLTServer->GetTime() - m_nClientCameraOffsetTimeReceivedMS ) ); } // add the camera offset vDefendPos += m_vClientCameraOffset; // find a unit vector from us to the projectile vDefendPosToProjectile = *pIncomingProjectilePosition - ( m_vClientCameraOffset + vDefendPos ); vDefendPosToProjectile.y = 0; vDefendPosToProjectile.Normalize(); // determine a forward vector reprensenting the direction the // player is facing LTRotation vPlayerViewOrientation; ltResult = g_pLTServer->GetObjectRotation( m_hObject, &vPlayerViewOrientation ); ASSERT( LT_OK == ltResult ); LTVector vPlayerViewForward = vPlayerViewOrientation.Forward(); vPlayerViewForward.y = 0; vPlayerViewForward.Normalize(); float fDotProd = vPlayerViewForward.Dot( vDefendPosToProjectile ); // find the angle between the two vectors float fDefenseAngle = ltacosf( vPlayerViewForward.Dot( vDefendPosToProjectile ) ); if ( ( MATH_EPSILON <= fDefenseAngle) && ( fDefenseAngle <= fOrientationMaxZone ) ) { // it's within the max zone fDefenseOrientationPercentage = fOrientationMaxDefendPercentage; } else if ( ( fOrientationMaxZone < fDefenseAngle) && ( fDefenseAngle <= fOrientationDeadZone ) ) { // it's within the dropoff range fDefenseOrientationPercentage = fOrientationMaxDefendPercentage + ( fDefenseAngle - fOrientationMaxZone ) * ( fOrientationMinDefendPercentage - fOrientationMaxDefendPercentage ) / ( fOrientationDeadZone - fOrientationMaxZone ); } else if ( fOrientationDeadZone <= fDefenseAngle) { // it's within the dead zone fDefenseOrientationPercentage = 0.0f; } // // Final Defense Result // float fFinalDefensePercentage = fDefenseTimingPercentage - fDefenseOrientationPercentage; return fFinalDefensePercentage; }
//performs a ray intersection. This will return false if nothing is hit, or true if something //is. If something is hit, it will fill out the intersection property and the alignment //vector according to the properties that the user has setup. If an object is hit, it will //fill out the hit object, and specify the output transform relative to the hit object's space bool CCreateRayFX::DetermineIntersection( const LTVector& vObjPos, const LTVector& vObjForward, HOBJECT& hOutObj, LTRigidTransform& tOutTrans) { //default our output parameters to reasonable values hOutObj = NULL; tOutTrans.Init(); //perform a ray intersection from our position along our Y axis and see if we hit anything. //If we do, create the effect there facing along the specified vector, randomly twisted //find the starting and ending points LTVector vStart = vObjPos + vObjForward * GetProps()->m_fMinDist; LTVector vEnd = vObjPos + vObjForward * GetProps()->m_fMaxDist; //we now need to perform an intersection using these endpoints and see if we hit anything IntersectQuery iQuery; IntersectInfo iInfo; iQuery.m_Flags = INTERSECT_HPOLY | IGNORE_NONSOLID; iQuery.m_FilterFn = NULL; iQuery.m_pUserData = NULL; iQuery.m_From = vStart; iQuery.m_To = vEnd; if( !g_pLTClient->IntersectSegment( iQuery, &iInfo ) ) { //we didn't intersect anything, don't create an effect return false; } //determine if we hit the sky if(IsSkyPoly(iInfo.m_hPoly)) { //never create an effect on the sky return false; } //we now need to determine the normal of intersection LTVector vHitNormal = iInfo.m_Plane.Normal(); //we hit something, so we can now at least determine the point of intersection tOutTrans.m_vPos = iInfo.m_Point + vHitNormal * GetProps()->m_fOffset; //the primary vector we wish to align to LTVector vAlignment; //determine what our dominant axis should be switch (GetProps()->m_eAlignment) { default: case CCreateRayProps::eAlign_ToSource: vAlignment = -vObjForward; break; case CCreateRayProps::eAlign_Normal: vAlignment = vHitNormal; break; case CCreateRayProps::eAlign_Outgoing: vAlignment = vObjForward - (2.0f * vObjForward.Dot(vHitNormal)) * vHitNormal; vAlignment.Normalize(); break; case CCreateRayProps::eAlign_ToViewer: { LTVector vCameraPos; g_pLTClient->GetObjectPos(m_pFxMgr->GetCamera(), &vCameraPos); vAlignment = vCameraPos - tOutTrans.m_vPos; vAlignment.Normalize(); } break; } //and generate a randomly twisted orientation around our dominant axis tOutTrans.m_rRot = LTRotation(vAlignment, LTVector(0.0f, 1.0f, 0.0f)); tOutTrans.m_rRot.Rotate(vAlignment, GetRandom(0.0f, MATH_CIRCLE)); //now if we hit an object, make sure to store that object, and convert the transform into //the object's space if(iInfo.m_hObject && (g_pLTClient->Physics()->IsWorldObject(iInfo.m_hObject) == LT_NO)) { //store this as our hit object hOutObj = iInfo.m_hObject; //and convert the transform into that object's space LTRigidTransform tObjTrans; g_pLTClient->GetObjectTransform(hOutObj, &tObjTrans); tOutTrans = tOutTrans.GetInverse() * tOutTrans; } //success return true; }
static void d3d_DrawRotatableSprite(const ViewParams& Params, SpriteInstance *pInstance, SharedTexture *pShared) { if(!d3d_SetTexture(pShared, 0, eFS_SpriteTexMemory)) return; float fWidth = (float)((RTexture*)pShared->m_pRenderData)->GetBaseWidth(); float fHeight = (float)((RTexture*)pShared->m_pRenderData)->GetBaseHeight(); //cache the object position LTVector vPos = pInstance->GetPos(); LTMatrix mRotation; d3d_SetupTransformation(&vPos, (float*)&pInstance->m_Rotation, &pInstance->m_Scale, &mRotation); //get our basis vectors LTVector vRight, vUp, vForward; mRotation.GetBasisVectors(&vRight, &vUp, &vForward); //scale the vectors to be the appropriate size vRight *= fWidth; vUp *= fHeight; // Setup the points. RGBColor Color; d3d_GetSpriteColor(pInstance, &Color); uint32 nColor = Color.color; CSpriteVertex SpriteVerts[4]; SpriteVerts[0].SetupVert(vPos + vUp - vRight, nColor, 0.0f, 0.0f); SpriteVerts[1].SetupVert(vPos + vUp + vRight, nColor, 1.0f, 0.0f); SpriteVerts[2].SetupVert(vPos + vRight - vUp, nColor, 1.0f, 1.0f); SpriteVerts[3].SetupVert(vPos - vRight - vUp, nColor, 0.0f, 1.0f); //figure out our final vertices to use CSpriteVertex *pPoints; uint32 nPoints; CSpriteVertex ClippedSpriteVerts[40 + 5]; if(pInstance->m_ClipperPoly != INVALID_HPOLY) { if(!d3d_ClipSprite(pInstance, pInstance->m_ClipperPoly, &pPoints, &nPoints, ClippedSpriteVerts)) { return; } } else { pPoints = SpriteVerts; nPoints = 4; } if((pInstance->m_Flags & FLAG_SPRITEBIAS) && !(pInstance->m_Flags & FLAG_REALLYCLOSE)) { //adjust the points for(uint32 nCurrPt = 0; nCurrPt < nPoints; nCurrPt++) { //get the sprite vertex that we are modifying LTVector& vPt = SpriteVerts[nCurrPt].m_Vec; //find a point relative to the viewer position LTVector vPtRelCamera = vPt - Params.m_Pos; //determine the distance from the camera float fZ = vPtRelCamera.Dot(Params.m_Forward); if(fZ <= NEARZ) continue; //find the bias, up to, but not including the near plane float fBiasDist = SPRITE_POSITION_ZBIAS; if((fZ + fBiasDist) < NEARZ) fBiasDist = NEARZ - fZ; //now adjust our vectors accordingly so that we can move it forward //but have it be the same size float fScale = 1 + fBiasDist / fZ; vPt = Params.m_Right * vPtRelCamera.Dot(Params.m_Right) * fScale + Params.m_Up * vPtRelCamera.Dot(Params.m_Up) * fScale + (fZ + fBiasDist) * Params.m_Forward + Params.m_Pos; } } LTEffectImpl* pEffect = (LTEffectImpl*)LTEffectShaderMgr::GetSingleton().GetEffectShader(pInstance->m_nEffectShaderID); if(pEffect) { pEffect->UploadVertexDeclaration(); ID3DXEffect* pD3DEffect = pEffect->GetEffect(); if(pD3DEffect) { RTexture* pTexture = (RTexture*)pShared->m_pRenderData; pD3DEffect->SetTexture("texture0", pTexture->m_pD3DTexture); i_client_shell->OnEffectShaderSetParams(pEffect, NULL, NULL, LTShaderDeviceStateImp::GetSingleton()); UINT nPasses = 0; pD3DEffect->Begin(&nPasses, 0); for(UINT i = 0; i < nPasses; ++i) { pD3DEffect->BeginPass(i); D3D_CALL(PD3DDEVICE->DrawPrimitiveUP(D3DPT_TRIANGLEFAN, nPoints-2, pPoints, sizeof(CSpriteVertex))); pD3DEffect->EndPass(); } pD3DEffect->End(); } } else { D3D_CALL(PD3DDEVICE->SetVertexShader(NULL)); D3D_CALL(PD3DDEVICE->SetFVF(SPRITEVERTEX_FORMAT)); D3D_CALL(PD3DDEVICE->DrawPrimitiveUP(D3DPT_TRIANGLEFAN, nPoints-2, pPoints, sizeof(CSpriteVertex))); } d3d_DisableTexture(0); }
bool CTrackedNodeMgr::SetNodeConstraints( HTRACKEDNODE ID, const LTVector& vMovConeAxis, const LTVector& vMovConeUp, float fXDiscomfortAngle, float fYDiscomfortAngle, float fXMaxAngle, float fYMaxAngle, float fMaxAngVel ) { //sanity checks if(!CheckValidity(ID)) return false; //ok, we have a valid ID, so let us setup the parameters CTrackedNode* pNode = (CTrackedNode*)ID; //see if the up and forward vectors are valid LTVector vForward = vMovConeAxis; LTVector vUp = vMovConeUp; //ensure proper scale vForward.Normalize(); vUp.Normalize(); //ensure they form a valid space (and not a plane) if(vUp.Dot(vForward) > 0.99f) { //not valid, we need to try a different up, our preference is the world up vUp.Init(0.0f, 1.0f, 0.0f); if(vUp.Dot(vForward) > 0.99f) { //ok, forward is already taking the up....so, tilt us back vUp.Init(0.0f, 0.0f, -1.0f); } } //now generate the right, and ensure orthogonality LTVector vRight = vForward.Cross(vUp); vUp = vRight.Cross(vForward); vRight.Normalize(); vUp.Normalize(); //setup this as the basis space pNode->m_mInvTargetTransform.SetBasisVectors(&vRight, &vUp, &vForward); pNode->m_mInvTargetTransform.Transpose(); //we need to make sure that their angular constraints are valid (meaning that they are positive and //less than 90 deg) fXMaxAngle = LTCLAMP(fXMaxAngle, 0.0f, DEG2RAD(89.0f)); fYMaxAngle = LTCLAMP(fYMaxAngle, 0.0f, DEG2RAD(89.0f)); fXDiscomfortAngle = LTCLAMP(fXDiscomfortAngle, 0.0f, fXMaxAngle); fYDiscomfortAngle = LTCLAMP(fYDiscomfortAngle, 0.0f, fYMaxAngle); //now precompute the tangent of those values (used for finding the height of the cone created which //is used in the threshold determination code) pNode->m_fTanXDiscomfort = (float)tan(fXDiscomfortAngle); pNode->m_fTanYDiscomfort = (float)tan(fYDiscomfortAngle); pNode->m_fTanXThreshold = (float)tan(fXMaxAngle); pNode->m_fTanYThreshold = (float)tan(fYMaxAngle); //handle setting up the maximum angular velocity pNode->m_fMaxAngVel = (float)fabs(fMaxAngVel); //and we are ready for primetime return true; }
// ----------------------------------------------------------------------- // // // ROUTINE: CLightCycleMgr::UpdateLightCycleTrail // // PURPOSE: Updates a ligt cyclist's trail // // ----------------------------------------------------------------------- // bool CLightCycleMgr::UpdateLightCycleTrail(LIGHT_CYCLIST* pCyclist, LTVector& vPos, int nTrailID) { ASSERT(pCyclist); if(!pCyclist) return false; LIGHT_CYCLE_TRAIL_POINT newPoint; LIGHT_CYCLE_TRAIL_POINT *pOldPoint; CreateTrailPointFromVector(newPoint,vPos); // See which trail we're dealing with LIGHT_CYCLE_TRAIL* pTrail; if(nTrailID == CURRENT_TRAIL_ID) { pTrail = pCyclist->pCurTrail; } else { pTrail = pCyclist->FindTrail((uint16)nTrailID); } ASSERT(pTrail); if(!pTrail) { // We tried to update a non-existant trail return false; } int nPoints = pTrail->collTrailPoints.size(); if(nPoints > 0) { // Get the last point pOldPoint = &(pTrail->collTrailPoints[nPoints-1]); // Save the old forward vector LTVector vOldForward = pCyclist->vForward; // Compute the forward vector pCyclist->vForward.x = newPoint.GetX() - pOldPoint->GetX(); pCyclist->vForward.y = newPoint.GetY() - pOldPoint->GetY(); pCyclist->vForward.z = newPoint.GetZ() - pOldPoint->GetZ(); pCyclist->vForward.Normalize(); if(nPoints > 1) { // Check for a turn float fDot = (float)(fabs(vOldForward.Dot(pCyclist->vForward))); // Check for a turn if(fDot <= MATH_EPSILON) { // We have a turn, so we have to do two things: // 1) Change our last point into the actual turn point // 2) Add on our current location point LIGHT_CYCLE_TRAIL_POINT turnPoint; // Check to see which axis we were travelling on if(((float)fabs(vOldForward.z)) > MATH_EPSILON) { // We were travelling in the Z direction and now we've turned // Set the new forward vector if(newPoint.GetX() > pOldPoint->GetX()) { pCyclist->vForward.x = 1.0f; } else { pCyclist->vForward.x = -1.0f; } pCyclist->vForward.z = 0.0f; // Now calculate the turn point /* o = old point, n = new point, t = turn point | o | |\ z| | \ | t--n | +-------- x */ // Turn point is the old X and new Z turnPoint.SetX(pOldPoint->GetX()); turnPoint.SetZ(newPoint.GetZ()); } else { // We were travelling in the X direction and now we've turned // Set the new forward vector if(newPoint.GetZ() > pOldPoint->GetZ()) { pCyclist->vForward.z = 1.0f; } else { pCyclist->vForward.z = -1.0f; } pCyclist->vForward.x = 0.0f; // Now calculate the turn point /* o = old point, n = new point, t = turn point | o--t | \ | z| \| | n | +------- x */ // Turn point is the old Z, and new X turnPoint.SetX(newPoint.GetX()); turnPoint.SetZ(pOldPoint->GetZ()); } turnPoint.SetY(newPoint.GetY()); if(pOldPoint->GetY() > (newPoint.GetY() + MATH_EPSILON)) { // We shouldn't be going up/down ramps! ASSERT(FALSE); } // Change the last point to the turn point pOldPoint->SetX(turnPoint.GetX()); pOldPoint->SetY(turnPoint.GetY()); pOldPoint->SetZ(turnPoint.GetZ()); // Now add on our current location point pTrail->collTrailPoints.push_back(newPoint); /************************ TEMP */ // Let's walk his list of points std::vector<LIGHT_CYCLE_TRAIL_POINT>::iterator iter; // Walk the list int nPoint = 0; g_pLTServer->CPrint("\n****\n"); for(iter=pTrail->collTrailPoints.begin();iter!=pTrail->collTrailPoints.end();iter++) { g_pLTServer->CPrint("Point %d: <%f, %f, %f>\n",nPoint,iter->GetX(),iter->GetY(),iter->GetZ()); nPoint++; } g_pLTServer->CPrint("****\n"); /************************ TEMP */ } else { // We're continuing in the same direction, // so we'll just change the last point to our location pOldPoint->SetX(newPoint.GetX()); pOldPoint->SetY(newPoint.GetY()); pOldPoint->SetZ(newPoint.GetZ()); } } else { // We only have one point on our list (the starting point) // so add a new one pTrail->collTrailPoints.push_back(newPoint); } } else { // We shouldn't ever have no points when we update ASSERT(FALSE); } return true; }