void TriangleBatch::Stripify( TriangleBatch& pNewBatch ) { #if GD_PLATFORM == GD_PLATFORM_WIN32 GD_ASSERT( mIndices ); PrimitiveGroup* strip; UInt16 numGroups; SetCacheSize(CACHESIZE_GEFORCE1_2); SetStitchStrips(true); SetMinStripSize(0); SetListsOnly(false); // Stripify! GenerateStrips( mIndices, mIndicesCount, &strip, &numGroups ); GD_ASSERT( numGroups == 1 ); // Copy the result in our triangle batch. pNewBatch.Allocate( TriangleStrip, strip->numIndices ); memcpy( pNewBatch.GetIndices(), strip->indices, strip->numIndices*sizeof(UInt16) ); //GD_DELETE_ARRAY(strip); #else debugBreak(); #endif }
void AtlasOldMesher::optimize() { AssertISV(false, "This is probably broken right now - BJG"); #if 0 // Shove everything through nvTriStrip SetCacheSize(24); // first, stripify our geometry... PrimitiveGroup *pg; U16 pgCount; GenerateStrips(mIndices.address(), mIndices.size(), &pg, &pgCount, AtlasOldActivationHeightfield::smDoChecks); // We're lazy. AssertISV(pgCount == 1, "AtlasOldMesher::optimize - Got unexpectedly complex geometry from NVTriStrip! (a)"); AssertISV(pg->type == PT_STRIP, "AtlasOldMesher::optimize - Got unexpectedly complex geometry from NVTriStrip! (b)"); // Remap indices! BJGTODO - how am I supposed to interpet the results? /* PrimitiveGroup *pgRemapped; RemapIndices(pg, pgCount, mVerts.size(), &pgRemapped); */ // Ok, let's suck this stuff back in. mIndices.clear(); mIndices.reserve(pg->numIndices); for(S32 i=0; i<pg->numIndices; i++) mIndices[i] = pg->indices[i]; // And clean up the memory from the stripper. delete[] pg; #endif }
//~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ // TreeCache::CacheResize(): // void TreeCache::CacheResize (uint size) { // If cache size decreases, simply reset it if (CacheSize > size) { SetCacheSize(size); return; } cachedTreeT* oldCache = Cache; uint oldSize = CacheSize; #ifdef WINCE Cache = (cachedTreeT*) my_Tcl_Alloc( sizeof(cachedTreeT [size])); #else Cache = new cachedTreeT [size]; #endif CacheSize = size; // Clear all the filters and nodes so they dont contain garbage: for (uint i=0; i < size; i++) { cachedTreeT * ctree = &(Cache[i]); ctree->cfilter = NULL; for (uint count = 0; count < MAX_TREE_NODES; count++) { initTreeNode (&(ctree->tree.node[count])); } } // copy old data to new Cache for (uint i=0; i < oldSize; i++) { cachedTreeT * ctree = &(oldCache[i]); Cache[i] = *ctree; } }
void nvStripWrap(std::vector<Primitive> & faces, std::vector<U16> & indices, S32 cacheSize) { // set stripper parameters...we'll just set some default ones SetCacheSize(cacheSize); SetStitchStrips(true); // engine can handle this, so let it SetListsOnly(false); // engine can handle this, so let it SetMinStripSize(0); // engine can handle this, so let it // main strip loop... U32 start, end, i; std::vector<Primitive> someStrips; std::vector<Primitive> retStrips; std::vector<U16> someIndices; std::vector<U16> retIndices; for (start = 0; start<faces.size(); start=end) { for (end=start; end<faces.size() && faces[start].type==faces[end].type; end++) ; // copy start to end faces into new list -- this is so we end up doing less copying // down the road (when we are doing the look ahead simulation) someStrips.clear(); someIndices.clear(); for (i=start;i<end;i++) { someIndices.push_back(indices[faces[i].firstElement + 0]); someIndices.push_back(indices[faces[i].firstElement + 1]); someIndices.push_back(indices[faces[i].firstElement + 2]); } U32 matIndex = faces[start].type ^ (Primitive::Triangles|Primitive::Strip); nvMakeStrips(someStrips,someIndices,cacheSize,matIndex); // now move strips and indices into larger list S32 startStrips = retStrips.size(); retStrips.resize(startStrips+someStrips.size()); S32 startIndices = retIndices.size(); retIndices.resize(startIndices+someIndices.size()); memcpy(&retStrips[startStrips],&someStrips[0],someStrips.size()*sizeof(Primitive)); memcpy(&retIndices[startIndices],&someIndices[0],someIndices.size()*sizeof(U16)); // now adjust start of new strips for (i=startStrips; i<retStrips.size(); i++) retStrips[i].firstElement += startIndices; } indices = retIndices; faces = retStrips; }
void optimizeFaces(int index) { assert(index >= 0 && index < LOD_AMOUNT); int faceAmount = faces[index].size(); unsigned short *oldIndices = new unsigned short[faceAmount * 3]; for(int i = 0; i < faceAmount; ++i) for(int j = 0; j < 3; ++j) { int oldIndex = faces[index][i].getVertexIndex(j); assert(oldIndex >= 0 && oldIndex < int(vertices.size())); oldIndices[i * 3 + j] = oldIndex; } PrimitiveGroup *primitiveGroup = 0; unsigned short groupAmount = 0; SetCacheSize(CACHESIZE_GEFORCE3); SetListsOnly(true); GenerateStrips(oldIndices, faceAmount * 3, &primitiveGroup, &groupAmount); faces[index].resize(primitiveGroup->numIndices / 3); { for(unsigned int i = 0; i < primitiveGroup->numIndices / 3; ++i) for(int j = 0; j < 3; ++j) { int newIndex = primitiveGroup->indices[i * 3 + j]; assert(newIndex >= 0 && newIndex < int(vertices.size())); faces[index][i].setVertexIndex(j, newIndex); } } delete[] oldIndices; delete[] primitiveGroup; }
void xrStripify (xr_vector<u16> &indices, xr_vector<u16> &perturb, int iCacheSize, int iMinStripLength) { SetCacheSize (iCacheSize); SetMinStripSize (iMinStripLength); SetListsOnly (true); // Generate strips xr_vector<PrimitiveGroup> PGROUP; GenerateStrips (&*indices.begin(),(u32)indices.size(),PGROUP); R_ASSERT (PGROUP.size()==1); R_ASSERT (PGROUP[0].type==PT_LIST); if (indices.size()!=PGROUP[0].numIndices) throw "Stripify failed."; // Remap indices xr_vector<PrimitiveGroup> xPGROUP; RemapIndices (PGROUP,u16(perturb.size()),xPGROUP); R_ASSERT (xPGROUP.size()==1); R_ASSERT (xPGROUP[0].type==PT_LIST); // Build perturberation table for(u32 index = 0; index < PGROUP[0].numIndices; index++) { u16 oldIndex = PGROUP[0].indices [index]; int newIndex = xPGROUP[0].indices [index]; R_ASSERT(oldIndex<(int)perturb.size()); R_ASSERT(newIndex<(int)perturb.size()); perturb[newIndex] = oldIndex; } // Copy indices Memory.mem_copy (&*indices.begin(),xPGROUP[0].indices,(u32)indices.size()*sizeof(u16)); // Release memory xPGROUP.clear (); PGROUP.clear (); }
errorT TreeCache::ReadFile (const char * fname) { // Only read the file if the cache is empty: if (NumInUse > 0) { return OK; } #ifdef WINCE /*FILE * */Tcl_Channel fp; fileNameT fullname; strCopy (fullname, fname); strAppend (fullname, TREEFILE_SUFFIX); //fp = fopen (fullname, "rb"); fp = mySilent_Tcl_OpenFileChannel(NULL, fullname, "r", 0666); if (fp == NULL) { return ERROR_FileOpen; } my_Tcl_SetChannelOption(NULL, fp, "-encoding", "binary"); my_Tcl_SetChannelOption(NULL, fp, "-translation", "binary"); uint magic = readFourBytes (fp); if (magic != TREEFILE_MAGIC) { //fclose (fp); my_Tcl_Close(NULL, fp); #else FILE * fp; fileNameT fullname; strCopy (fullname, fname); strAppend (fullname, TREEFILE_SUFFIX); fp = fopen (fullname, "rb"); if (fp == NULL) { return ERROR_FileOpen; } uint magic = readFourBytes (fp); if (magic != TREEFILE_MAGIC) { fclose (fp); #endif return ERROR_Corrupt; } readTwoBytes (fp); // Scid Version; unused uint cacheSize = readFourBytes (fp); SetCacheSize (cacheSize); NumInUse = readFourBytes (fp); LowestTotal = readFourBytes (fp); LowestTotalIndex = readFourBytes(fp); for (uint count=0; count < NumInUse; count++) { cachedTreeT * ctree = &(Cache[count]); ctree->toMove = readOneByte (fp); for (squareT sq=0; sq < 64; sq++) { ctree->board[sq] = readOneByte (fp); } // Read the moves: ctree->tree.moveCount = readFourBytes (fp); ctree->tree.totalCount = readFourBytes (fp); uint numMoves = ctree->tree.moveCount; for (uint i=0; i < numMoves; i++) { // Read this move node: treeNodeT * tnode = &(ctree->tree.node[i]); readSimpleMove (fp, &(tnode->sm)); readString (fp, tnode->san, 8); for (uint res = 0; res < 4; res++) { tnode->freq[res] = readFourBytes (fp); } tnode->total = readFourBytes (fp); tnode->score = readFourBytes (fp); tnode->ecoCode = readTwoBytes (fp); tnode->eloCount = readFourBytes (fp); tnode->eloSum = readFourBytes (fp); tnode->perfCount = readFourBytes (fp); tnode->perfSum = readFourBytes (fp); tnode->yearCount = readFourBytes (fp); tnode->yearSum = readFourBytes (fp); } // Read the compressed filter: ctree->cfilter = new CompressedFilter; ctree->cfilter->ReadFromFile (fp); } #ifdef WINCE my_Tcl_Close(NULL, fp); #else fclose (fp); #endif return OK; }
bool CMusikLibrary::Load() { wxString sFilename = MUSIK_DB_FILENAME; //--- look for database.. if need be, create it and create tables ---// static const char *szCreateVersionQuery = "CREATE TABLE IF NOT EXISTS version ( " "name, major, majorsub ,minor, minorsub " " );"; //--- create the tables ---// static const char *szCreateSongTableQuery = "CREATE TABLE IF NOT EXISTS songs ( " "songid INTEGER PRIMARY KEY, " "format INTEGER, " "vbr INTEGER, " "filename TEXT NOT NULL UNIQUE, " "artist TEXT, " "title TEXT, " "album TEXT, " "tracknum INTEGER, " "year INTEGER, " "genre TEXT, " "rating INTEGER, " "bitrate INTEGER, " "lastplayed REAL, " "notes TEXT, " "timesplayed INTEGER , " "duration INTEGER, " "timeadded REAL, " "filesize INTEGER, " "dirty INTEGER, " "modified REAL" " );"; const char* szCreateSongTableIdxQuery = "CREATE INDEX IF NOT EXISTS songs_title_idx on songs (title);" "CREATE UNIQUE INDEX IF NOT EXISTS songs_filename_idx on songs (filename);" "CREATE INDEX IF NOT EXISTS songs_artist_idx on songs (artist);" "CREATE INDEX IF NOT EXISTS songs_album_idx on songs (album);" "CREATE INDEX IF NOT EXISTS songs_genre_idx on songs (genre);" "CREATE INDEX IF NOT EXISTS songs_year_idx on songs (year);" // "CREATE INDEX IF NOT EXISTS songs_tracknum_idx on songs (tracknum);" "CREATE INDEX IF NOT EXISTS songs_artist_album_tracknum_idx on songs (artist collate nocase,album collate nocase,tracknum);" "CREATE INDEX IF NOT EXISTS songs_timeadded_idx on songs (timeadded);" "CREATE INDEX IF NOT EXISTS songs_lastplayed_idx on songs (lastplayed);" ; static const char *szCreateSongHistoryQuery = "CREATE TABLE IF NOT EXISTS songhistory ( " "songid number(10), " "date_played timestamp , " "percent_played number(10)," "selected_by_user number(1) " " );"; static const char* szCreateSongHistoryTableIdxQuery = "CREATE INDEX IF NOT EXISTS songhistory_songid_idx on songhistory (songid);" "CREATE INDEX IF NOT EXISTS songhistory_date_played_idx on songhistory (date_played);" "CREATE INDEX IF NOT EXISTS songhistory_percent_played_idx on songhistory (percent_played);" ; wxLogNull lognull; Shutdown(); bool bConvertFromDB2 = !wxFileExists(sFilename) && wxFileExists(MUSIK_DB_FILENAME2); m_pDB.reset(new MusikDb_Sqlite3()); if(m_pDB.get() && m_pDB->Open(sFilename)) { #ifdef __WXMSW__ m_pDB->Exec("PRAGMA page_size=4096;"); #endif m_pDB->Exec("PRAGMA temp_store = \"memory\";"); // always create table, if it exists an error will be returned by Exec(), but we dont care. m_pDB->Exec( szCreateVersionQuery); m_pDB->Exec( szCreateSongTableQuery); m_pDB->Exec( szCreateSongHistoryQuery); if(bConvertFromDB2) ConvertFromDB2(); m_pDB->Exec( szCreateSongTableIdxQuery); m_pDB->Exec( szCreateSongHistoryTableIdxQuery); m_pDB->Exec( "CREATE TRIGGER song_deleted_trigger DELETE ON songs " "BEGIN " "DELETE FROM songhistory WHERE songid = old.songid;" "END;"); //m_pDB->Exec( "PRAGMA synchronous = OFF;"); SetCacheSize(wxGetApp().Prefs.nDBCacheSize); CheckVersion(); SetAutoDjFilter(wxGetApp().Prefs.sAutoDjFilter ); m_pDB->Exec( "CREATE VIEW valid_albums as select album,artist,most_lastplayed from (" "select album,artist,sum(duration) as sum_duration,max(lastplayed+0) as most_lastplayed " "from songs where album != '' group by album) where sum_duration > 1500000;" ); m_pDB->Exec("CREATE VIEW autodj_albums as select album,artist,most_lastplayed from (" "select album,artist,sum(duration) as sum_duration,max(lastplayed+0) as most_lastplayed " "from autodj_songs where album != '' group by album) where sum_duration > 1500000;" ); if(m_pMasterLibrary == NULL) m_pDB->SetBusyHandler(m_pBusyHandler.get()); return true; } return false; }
void FSPath::MakeCache( int cs, unicode_t splitter ) { ASSERT( data.count() >= 0 ); if ( Count() == 1 && data[0].IsEmpty() ) // значит просто "/" { if ( cs == CS_UNICODE ) { SetCacheSize( 2 * sizeof( unicode_t ) ); ( ( unicode_t* )cache.data() )[0] = splitter; ( ( unicode_t* )cache.data() )[1] = 0; } else { SetCacheSize( 2 ); cache[0] = char( splitter & 0xFF ); cache[1] = 0; } cacheCs = cs; cacheSplitter = splitter; return; } int i, l; if ( cs == CS_UNICODE ) { for ( i = l = 0; i < data.count(); i++ ) { l += unicode_strlen( ( unicode_t* )data[i].Get( cs ) ); } l += data.count() - 1; //разделители if ( l < 0 ) { l = 0; } SetCacheSize( ( l + 1 )*sizeof( unicode_t ) ); unicode_t* p = ( unicode_t* ) cache.data(); for ( i = 0; i < data.count(); i++ ) { const void* v = data[i].Get( cs ); l = unicode_strlen( ( unicode_t* )v ); if ( l ) { memcpy( p, v, l * sizeof( unicode_t ) ); } p += l; if ( i + 1 < data.count() ) { *( p++ ) = splitter; } /////////////// !!! } *p++ = 0; } else { for ( i = l = 0; i < data.count(); i++ ) { l += strlen( ( char* )data[i].Get( cs ) ); } l += data.count() - 1; //разделители if ( l < 0 ) { l = 0; } SetCacheSize( ( l + 1 )*sizeof( char ) ); char* p = cache.data(); for ( i = 0; i < data.count(); i++ ) { const void* v = data[i].Get( cs ); l = strlen( ( char* )v ); if ( l ) { strcpy( p, ( char* )v ); } p += l; if ( i + 1 < data.count() ) { *( p++ ) = char( splitter & 0xFF ); } /////////////// !!! } *p++ = 0; } cacheCs = cs; cacheSplitter = splitter; }