Ejemplo n.º 1
0
//-----------------------------------------------------------------------------
// Level init, shutdown
//-----------------------------------------------------------------------------
void CClientLeafSystem::LevelInitPreEntity()
{
	MEM_ALLOC_CREDIT();

	m_Renderables.EnsureCapacity( 1024 );
	m_RenderablesInLeaf.EnsureCapacity( 1024 );
	m_ShadowsInLeaf.EnsureCapacity( 256 );
	m_ShadowsOnRenderable.EnsureCapacity( 256 );
	m_DirtyRenderables.EnsureCapacity( 256 );

	// Add all the leaves we'll need
	int leafCount = engine->LevelLeafCount();
	m_Leaf.EnsureCapacity( leafCount );

	ClientLeaf_t newLeaf;
	newLeaf.m_FirstElement = m_RenderablesInLeaf.InvalidIndex();
	newLeaf.m_FirstShadow = m_ShadowsInLeaf.InvalidIndex();
	newLeaf.m_FirstDetailProp = 0;
	newLeaf.m_DetailPropCount = 0;
	newLeaf.m_DetailPropRenderFrame = -1;
	while ( --leafCount >= 0 )
	{
		m_Leaf.AddToTail( newLeaf );
	}
}
Ejemplo n.º 2
0
//-----------------------------------------------------------------------------
//
//-----------------------------------------------------------------------------
void CDmeSingleIndexedComponent::Intersection( const CDmeSingleIndexedComponent &rhs )
{
	const int nLhs = Count();
	const int nRhs = rhs.Count();

	int l = 0;
	int r = 0;

	if ( IsComplete() )
	{
		// TODO
		Assert( 0 );
	}
	else
	{
		CUtlVector< int > newComponents;
		newComponents.EnsureCapacity( nLhs );
		CUtlVector< float > newWeights;
		newWeights.EnsureCapacity( nLhs );

		while ( l < nLhs || r < nRhs )
		{
			// In LHS but not RHS
			while ( l < nLhs && ( r >= nRhs || m_Components[ l ] < rhs.m_Components[ r ] ) )
			{
				++l;
			}

			// In RHS but not LHS
			while ( r < nRhs && ( l >= nLhs || m_Components[ l ] > rhs.m_Components[ r ] ) )
			{
				++r;
			}

			// In Both LHS & RHS
			while ( l < nLhs && r < nRhs && m_Components[ l ] == rhs.m_Components[ r ] )
			{
				newComponents.AddToTail( m_Components[ l ] );
				newWeights.AddToTail( m_Weights[ l ] );
				++l;
				++r;
			}
		}

		m_Components.CopyArray( newComponents.Base(), newComponents.Count() );
		m_Weights.CopyArray( newWeights.Base(), newWeights.Count() );
	}

	m_CompleteCount.Set( 0 );
	m_bComplete.Set( false );
}
bool CRunTimeKeyValuesStringTable::ReadStringTable( int numStrings, CUtlBuffer& buf )
{
	Assert( m_Strings.Count() == 0 );

	CUtlVector< int > offsets;
	offsets.EnsureCapacity( numStrings );

	offsets.CopyArray( (int *)( buf.PeekGet() ), numStrings );

	// Skip over data
	buf.SeekGet( CUtlBuffer::SEEK_HEAD, buf.TellGet() + numStrings * sizeof( int ) );

	int stringSize = buf.GetInt();
	
	// Read in the string table
	m_Strings.EnsureCapacity( numStrings );
	int i;
	for ( i = 0 ; i < numStrings; ++i )
	{
		m_Strings.AddToTail( (const char *)buf.PeekGet( offsets[ i ] ) );
	}

	buf.SeekGet( CUtlBuffer::SEEK_HEAD, buf.TellGet() + stringSize );

	return true;
}
Ejemplo n.º 4
0
	// build the final pool
	void GetTableAndPool( CUtlVector< unsigned int > &offsets, CUtlBuffer &buffer )
	{
		offsets.Purge();
		buffer.Purge();

		offsets.EnsureCapacity( m_StringMap.GetNumStrings() );
		buffer.EnsureCapacity( m_nOffset );

		unsigned int currentOffset = 0;
		for ( int i = 0; i < m_StringMap.GetNumStrings(); i++ )
		{
			offsets.AddToTail( currentOffset );

			const char *pString = m_StringMap.String( i );
			buffer.Put( pString, strlen( pString ) + 1 ); 

			currentOffset += strlen( pString ) + 1;
		}
		Assert( currentOffset == m_nOffset );

		// align string pool to end on dword boundary
		while ( buffer.TellMaxPut() & 0x03 )
		{
			buffer.PutChar( '\0' );
			m_nOffset++;
		}
	}
Ejemplo n.º 5
0
void ResizeAnimationLayerCallback( void *pStruct, int offsetToUtlVector, int len )
{
	C_BaseAnimatingOverlay *pEnt = (C_BaseAnimatingOverlay*)pStruct;
	CUtlVector < CAnimationLayer > *pVec = &pEnt->m_AnimOverlay;
	CUtlVector< CInterpolatedVar< CAnimationLayer > > *pVecIV = &pEnt->m_iv_AnimOverlay;
	
	Assert( (char*)pVec - (char*)pEnt == offsetToUtlVector );
	Assert( pVec->Count() == pVecIV->Count() );
	Assert( pVec->Count() <= C_BaseAnimatingOverlay::MAX_OVERLAYS );
	
	int diff = len - pVec->Count();
	if ( diff != 0 )
	{
		// remove all entries
		for ( int i=0; i < pVec->Count(); i++ )
		{
			pEnt->RemoveVar( &pVec->Element( i ) );
		}

		pEnt->InvalidatePhysicsRecursive( BOUNDS_CHANGED );

		// adjust vector sizes
		if ( diff > 0 )
		{
			for ( int i = 0; i < diff; ++i )
			{
				int j = pVec->AddToTail( );
				(*pVec)[j].SetOwner( pEnt );
			}
			pVecIV->AddMultipleToTail( diff );
		}
		else
		{
			pVec->RemoveMultiple( len, -diff );
			pVecIV->RemoveMultiple( len, -diff );
		}

		// Rebind all the variables in the ent's list.
		for ( int i=0; i < len; i++ )
		{
			IInterpolatedVar *pWatcher = &pVecIV->Element( i );
			pWatcher->SetDebugName( s_m_iv_AnimOverlayNames[i] );
			pEnt->AddVar( &pVec->Element( i ), pWatcher, LATCH_ANIMATION_VAR, true );
		}
	}

	// FIXME: need to set historical values of nOrder in pVecIV to MAX_OVERLAY

	// Ensure capacity
	pVec->EnsureCapacity( len );

	int nNumAllocated = pVec->NumAllocated();

	// This is important to do because EnsureCapacity doesn't actually call the constructors
	// on the elements, but we need them to be initialized, otherwise it'll have out-of-range
	// values which will piss off the datatable encoder.
	UtlVector_InitializeAllocatedElements( pVec->Base() + pVec->Count(), nNumAllocated - pVec->Count() );
}
Ejemplo n.º 6
0
//-----------------------------------------------------------------------------
// Purpose: Generate a list of file matching mask
//-----------------------------------------------------------------------------
int CScriptLib::FindFiles( char* pFileMask, bool bRecurse, CUtlVector<fileList_t> &fileList )
{
	char	dirPath[MAX_PATH];
	char	pattern[MAX_PATH];
	char	extension[MAX_PATH];

	// get path only
	strcpy( dirPath, pFileMask );
	V_StripFilename( dirPath );

	// get pattern only
	V_FileBase( pFileMask, pattern, sizeof( pattern ) );
	V_ExtractFileExtension( pFileMask, extension, sizeof( extension ) );
	if ( extension[0] )
	{
		strcat( pattern, "." );
		strcat( pattern, extension );
	}

	if ( !bRecurse )
	{
		GetFileList( dirPath, pattern, fileList );
	}
	else
	{
		// recurse and get the tree
		CUtlVector< fileList_t > tempList;
		CUtlVector< CUtlString > dirList;
		RecurseFileTree_r( dirPath, 0, dirList );
		for ( int i=0; i<dirList.Count(); i++ )
		{
			// iterate each directory found
			tempList.Purge();
			tempList.EnsureCapacity( dirList.Count() );

			GetFileList( dirList[i].String(), pattern, tempList );

			int start = fileList.AddMultipleToTail( tempList.Count() );
			for ( int j=0; j<tempList.Count(); j++ )
			{
				fileList[start+j] = tempList[j];
			}
		}	
	}

	return fileList.Count();
}
Ejemplo n.º 7
0
void CBSPTreeData::Init( ISpatialQuery* pBSPTree )
{
	Assert( pBSPTree );
	m_pBSPTree = pBSPTree;

	m_Handles.EnsureCapacity( 1024 );
	m_LeafElements.EnsureCapacity( 1024 );
	m_HandleLeafList.EnsureCapacity( 1024 );

	// Add all the leaves we'll need
	int leafCount = m_pBSPTree->LeafCount();
	m_Leaf.EnsureCapacity( leafCount );

	Leaf_t newLeaf;
	newLeaf.m_FirstElement = m_LeafElements.InvalidIndex();
	while ( --leafCount >= 0 )
	{
		m_Leaf.AddToTail( newLeaf );
	}
}
Ejemplo n.º 8
0
//-----------------------------------------------------------------------------
//
//-----------------------------------------------------------------------------
void CDmeSingleIndexedComponent::Add( const CDmeSingleIndexedComponent &rhs )
{
	int nLhs = Count();
	const int nRhs = rhs.Count();

	int l = 0;
	int r = 0;

	if ( IsComplete() )
	{
		if ( rhs.IsComplete() && nRhs > nLhs )
		{
			m_CompleteCount.Set( nRhs );
		}
		else
		{
			while ( r < nRhs )
			{
				if ( rhs.m_Components[ r ] >= nLhs )
				{
					// Got one that's greater than the complete count of this one

					CUtlVector< int > newComponents;
					newComponents.EnsureCapacity( nLhs + nRhs - r );

					CUtlVector< float > newWeights;
					newWeights.EnsureCapacity( nLhs  + nRhs - r );

					GetComponents( newComponents, newWeights );

					while ( r < nRhs )
					{
						newComponents.AddToTail( rhs.m_Components[ r ] );
						newWeights.AddToTail( rhs.m_Weights[ r ] );
						++r;
					}

					m_Components.CopyArray( newComponents.Base(), newComponents.Count() );
					m_Weights.CopyArray( newWeights.Base(), newWeights.Count() );

					m_CompleteCount.Set( 0 );
					m_bComplete.Set( false );

					break;
				}

				++r;
			}
		}
	}
	else
	{
		CUtlVector< int > newComponents;
		newComponents.EnsureCapacity( nLhs + nRhs * 0.5 );	// Just an estimate assuming 50% of the components in rhs aren't in lhs
		CUtlVector< float > newWeights;
		newWeights.EnsureCapacity( nLhs  + nRhs * 0.5 );	// Just an estimate

		while ( l < nLhs || r < nRhs )
		{
			while ( l < nLhs && ( r >= nRhs || m_Components[ l ] < rhs.m_Components[ r ] ) )
			{
				newComponents.AddToTail( m_Components[ l ] );
				newWeights.AddToTail( m_Weights[ l ] );
				++l;
			}

			// In RHS but not LHS
			while ( r < nRhs && ( l >= nLhs || m_Components[ l ] > rhs.m_Components[ r ] ) )
			{
				newComponents.AddToTail( rhs.m_Components[ r ] );
				newWeights.AddToTail( rhs.m_Weights[ r ] );
				++r;
			}

			// In Both LHS & RHS
			while ( l < nLhs && r < nRhs && m_Components[ l ] == rhs.m_Components[ r ] )
			{
				newComponents.AddToTail( m_Components[ l ] );
				newWeights.AddToTail( m_Weights[ l ] );
				++l;
				++r;
			}
		}

		m_Components.CopyArray( newComponents.Base(), newComponents.Count() );
		m_Weights.CopyArray( newWeights.Base(), newWeights.Count() );
	}

	m_CompleteCount.Set( 0 );
	m_bComplete.Set( false );
}
Ejemplo n.º 9
0
void BlendCorners( CCoreDispInfo **ppListBase, int listSize )
{
	CUtlVector<int> nbCornerVerts;

	for ( int iDisp=0; iDisp < listSize; iDisp++ )
	{
		CCoreDispInfo *pDisp = ppListBase[iDisp];

		int iNeighbors[512];
		int nNeighbors = GetAllNeighbors( pDisp, iNeighbors );

		// Make sure we have room for all the neighbors.
		nbCornerVerts.RemoveAll();
		nbCornerVerts.EnsureCapacity( nNeighbors );
		nbCornerVerts.AddMultipleToTail( nNeighbors );
		
		// For each corner.
		for ( int iCorner=0; iCorner < 4; iCorner++ )
		{
			// Has it been touched?
			CVertIndex cornerVert = pDisp->GetCornerPointIndex( iCorner );
			int iCornerVert = pDisp->VertIndexToInt( cornerVert );
			const Vector &vCornerVert = pDisp->GetVert( iCornerVert );

			// For each displacement sharing this corner..
			Vector vAverage = pDisp->GetNormal( iCornerVert );

			for ( int iNeighbor=0; iNeighbor < nNeighbors; iNeighbor++ )
			{
				int iNBListIndex = iNeighbors[iNeighbor];
				CCoreDispInfo *pNeighbor = ppListBase[iNBListIndex];
				
				// Find out which vert it is on the neighbor.
				int iNBCorner = FindNeighborCornerVert( pNeighbor, vCornerVert );
				if ( iNBCorner == -1 )
				{
					nbCornerVerts[iNeighbor] = -1; // remove this neighbor from the list.
				}
				else
				{
					CVertIndex viNBCornerVert = pNeighbor->GetCornerPointIndex( iNBCorner );
					int iNBVert = pNeighbor->VertIndexToInt( viNBCornerVert );
					nbCornerVerts[iNeighbor] = iNBVert;
					vAverage += pNeighbor->GetNormal( iNBVert );
				}
			}


			// Blend all the neighbor normals with this one.
			VectorNormalize( vAverage );
			pDisp->SetNormal( iCornerVert, vAverage );

#if defined( USE_SCRATCHPAD )
			ScratchPad_DrawArrowSimple( 
				g_pPad, 
				pDisp->GetVert( iCornerVert ), 
				pDisp->GetNormal( iCornerVert ), 
				Vector( 0, 0, 1 ),
				25 );
#endif

			for ( int iNeighbor=0; iNeighbor < nNeighbors; iNeighbor++ )
			{
				int iNBListIndex = iNeighbors[iNeighbor];
				if ( nbCornerVerts[iNeighbor] == -1 )
					continue;

				CCoreDispInfo *pNeighbor = ppListBase[iNBListIndex];
				pNeighbor->SetNormal( nbCornerVerts[iNeighbor], vAverage );
			}
		}
	}
}
Ejemplo n.º 10
0
//-----------------------------------------------------------------------------
// A Scene image file contains all the compiled .XCD
//-----------------------------------------------------------------------------
bool CSceneImage::CreateSceneImageFile( CUtlBuffer &targetBuffer, char const *pchModPath, bool bLittleEndian, bool bQuiet, ISceneCompileStatus *pStatus )
{
	CUtlVector<fileList_t>	vcdFileList;
	CUtlSymbolTable			vcdSymbolTable( 0, 32, true );

	Msg( "\n" );

	// get all the VCD files according to the seacrh paths
	char searchPaths[512];
	g_pFullFileSystem->GetSearchPath( "GAME", false, searchPaths, sizeof( searchPaths ) );
	char *pPath = strtok( searchPaths, ";" );
	while ( pPath )
	{
		int currentCount = vcdFileList.Count();

		char szPath[MAX_PATH];
		V_ComposeFileName( pPath, "scenes/*.vcd", szPath, sizeof( szPath ) );

		scriptlib->FindFiles( szPath, true, vcdFileList );

		Msg( "Scenes: Searching '%s' - Found %d scenes.\n", szPath, vcdFileList.Count() - currentCount );

		pPath = strtok( NULL, ";" );
	}

	if ( !vcdFileList.Count() )
	{
		Msg( "Scenes: No Scene Files found!\n" );
		return false;
	}

	// iterate and convert all the VCD files
	bool bGameIsTF = V_stristr( pchModPath, "\\tf" ) != NULL;
	for ( int i=0; i<vcdFileList.Count(); i++ )
	{
		const char *pFilename = vcdFileList[i].fileName.String();
		const char *pSceneName = V_stristr( pFilename, "scenes\\" );
		if ( !pSceneName )
		{
			continue;
		}

		if ( !bLittleEndian && bGameIsTF && V_stristr( pSceneName, "high\\" ) )
		{
			continue;
		}

		// process files in order they would be found in search paths
		// i.e. skipping later processed files that match an earlier conversion
		UtlSymId_t symbol = vcdSymbolTable.Find( pSceneName );
		if ( symbol == UTL_INVAL_SYMBOL )
		{
			vcdSymbolTable.AddString( pSceneName );

			pStatus->UpdateStatus( pFilename, bQuiet, i, vcdFileList.Count() );

			if ( !CreateTargetFile_VCD( pFilename, "", false, bLittleEndian ) )
			{
				Error( "CreateSceneImageFile: Failed on '%s' conversion!\n", pFilename );
			}


		}
	}

	if ( !g_SceneFiles.Count() )
	{
		// nothing to do
		return true;
	}

	Msg( "Scenes: Finalizing %d unique scenes.\n", g_SceneFiles.Count() );


	// get the string pool
	CUtlVector< unsigned int > stringOffsets;
	CUtlBuffer stringPool;
	g_ChoreoStringPool.GetTableAndPool( stringOffsets, stringPool );

	if ( !bQuiet )
	{
		Msg( "Scenes: String Table: %d bytes\n", stringOffsets.Count() * sizeof( int ) );
		Msg( "Scenes: String Pool: %d bytes\n", stringPool.TellMaxPut() );
	}

	// first header, then lookup table, then string pool blob
	int stringPoolStart = sizeof( SceneImageHeader_t ) + stringOffsets.Count() * sizeof( int );
	// then directory
	int sceneEntryStart = stringPoolStart + stringPool.TellMaxPut();
	// then variable sized summaries
	int sceneSummaryStart = sceneEntryStart + g_SceneFiles.Count() * sizeof( SceneImageEntry_t );
	// then variable sized compiled binary scene data
	int sceneDataStart = 0;

	// construct header
	SceneImageHeader_t imageHeader = { 0 };
	imageHeader.nId = SCENE_IMAGE_ID;
	imageHeader.nVersion = SCENE_IMAGE_VERSION;
	imageHeader.nNumScenes = g_SceneFiles.Count();
	imageHeader.nNumStrings = stringOffsets.Count();
	imageHeader.nSceneEntryOffset = sceneEntryStart;
	if ( !bLittleEndian )
	{
		imageHeader.nId = BigLong( imageHeader.nId );
		imageHeader.nVersion = BigLong( imageHeader.nVersion );
		imageHeader.nNumScenes = BigLong( imageHeader.nNumScenes );
		imageHeader.nNumStrings = BigLong( imageHeader.nNumStrings );
		imageHeader.nSceneEntryOffset = BigLong( imageHeader.nSceneEntryOffset );
	}
	targetBuffer.Put( &imageHeader, sizeof( imageHeader ) );

	// header is immediately followed by string table and pool
	for ( int i = 0; i < stringOffsets.Count(); i++ )
	{
		unsigned int offset = stringPoolStart + stringOffsets[i];
		if ( !bLittleEndian )
		{
			offset = BigLong( offset );
		}
		targetBuffer.PutInt( offset );
	}
	Assert( stringPoolStart == targetBuffer.TellMaxPut() );
	targetBuffer.Put( stringPool.Base(), stringPool.TellMaxPut() );

	// construct directory
	CUtlSortVector< SceneImageEntry_t, CSceneImageEntryLessFunc > imageDirectory;
	imageDirectory.EnsureCapacity( g_SceneFiles.Count() );

	// build directory
	// directory is linear sorted by filename checksum for later binary search
	for ( int i = 0; i < g_SceneFiles.Count(); i++ )
	{
		SceneImageEntry_t imageEntry = { 0 };

		// name needs to be normalized for determinstic later CRC name calc
		// calc crc based on scenes\anydir\anyscene.vcd
		char szCleanName[MAX_PATH];
		V_strncpy( szCleanName, g_SceneFiles[i].fileName.String(), sizeof( szCleanName ) );
		V_strlower( szCleanName );
		V_FixSlashes( szCleanName );
		char *pName = V_stristr( szCleanName, "scenes\\" );
		if ( !pName )
		{
			// must have scenes\ in filename
			Error( "CreateSceneImageFile: Unexpected lack of scenes prefix on %s\n", g_SceneFiles[i].fileName.String() );
		}

		CRC32_t crcFilename = CRC32_ProcessSingleBuffer( pName, strlen( pName ) );
		imageEntry.crcFilename = crcFilename;

		// temp store an index to its file, fixup later, necessary to access post sort
		imageEntry.nDataOffset = i;
		if ( imageDirectory.Find( imageEntry ) != imageDirectory.InvalidIndex() )
		{
			// filename checksums must be unique or runtime binary search would be bogus
			Error( "CreateSceneImageFile: Unexpected filename checksum collision!\n" );
		}		

		imageDirectory.Insert( imageEntry );
	}

	// determine sort order and start of data after dynamic summaries
	CUtlVector< int > writeOrder;
	writeOrder.EnsureCapacity( g_SceneFiles.Count() );
	sceneDataStart = sceneSummaryStart;
	for ( int i = 0; i < imageDirectory.Count(); i++ )
	{
		// reclaim offset, indicates write order of scene file
		int iScene = imageDirectory[i].nDataOffset;
		writeOrder.AddToTail( iScene );

		// march past each variable sized summary to determine start of scene data
		int numSounds = g_SceneFiles[iScene].soundList.Count();
		sceneDataStart += sizeof( SceneImageSummary_t ) + ( numSounds - 1 ) * sizeof( int );
	}

	// finalize and write directory
	Assert( sceneEntryStart == targetBuffer.TellMaxPut() );
	int nSummaryOffset = sceneSummaryStart;
	int nDataOffset = sceneDataStart;
	for ( int i = 0; i < imageDirectory.Count(); i++ )
	{
		int iScene = writeOrder[i];

		imageDirectory[i].nDataOffset = nDataOffset;
		imageDirectory[i].nDataLength = g_SceneFiles[iScene].compiledBuffer.TellMaxPut();
		imageDirectory[i].nSceneSummaryOffset = nSummaryOffset;
		if ( !bLittleEndian )
		{
			imageDirectory[i].crcFilename = BigLong( imageDirectory[i].crcFilename );
			imageDirectory[i].nDataOffset = BigLong( imageDirectory[i].nDataOffset );
			imageDirectory[i].nDataLength = BigLong( imageDirectory[i].nDataLength );
			imageDirectory[i].nSceneSummaryOffset = BigLong( imageDirectory[i].nSceneSummaryOffset );
		}
		targetBuffer.Put( &imageDirectory[i], sizeof( SceneImageEntry_t ) );

		int numSounds = g_SceneFiles[iScene].soundList.Count();
		nSummaryOffset += sizeof( SceneImageSummary_t ) + (numSounds - 1) * sizeof( int );

		nDataOffset += g_SceneFiles[iScene].compiledBuffer.TellMaxPut();
	}

	// finalize and write summaries
	Assert( sceneSummaryStart == targetBuffer.TellMaxPut() );
	for ( int i = 0; i < imageDirectory.Count(); i++ )
	{
		int iScene = writeOrder[i];
		int msecs = g_SceneFiles[iScene].msecs;
		int soundCount = g_SceneFiles[iScene].soundList.Count();
		if ( !bLittleEndian )
		{
			msecs = BigLong( msecs );
			soundCount = BigLong( soundCount );
		}
		targetBuffer.PutInt( msecs );
		targetBuffer.PutInt( soundCount );
		for ( int j = 0; j < g_SceneFiles[iScene].soundList.Count(); j++ )
		{
			int soundId = g_SceneFiles[iScene].soundList[j];
			if ( !bLittleEndian )
			{
				soundId = BigLong( soundId );
			}
			targetBuffer.PutInt( soundId );
		}
	}

	// finalize and write data
	Assert( sceneDataStart == targetBuffer.TellMaxPut() );
	for ( int i = 0; i < imageDirectory.Count(); i++ )
	{	
		int iScene = writeOrder[i];
		targetBuffer.Put( g_SceneFiles[iScene].compiledBuffer.Base(), g_SceneFiles[iScene].compiledBuffer.TellMaxPut() );
	}

	if ( !bQuiet )
	{
		Msg( "Scenes: Final size: %.2f MB\n", targetBuffer.TellMaxPut() / (1024.0f * 1024.0f ) );
	}

	// cleanup
	g_SceneFiles.Purge();

	return true;
}