static void BuildIndices(rssDatabase *db, const char *feedsFileURL) { url u; urlconnection urlconn; URLNewAbsolute(&u, feedsFileURL); URLConnectionNew(&urlconn, &u); if (urlconn.responseCode / 100 == 3) { BuildIndices(db, urlconn.newUrl); } else { streamtokenizer st; char remoteFileName[2048]; HashSetNew(&db->indices, sizeof(rssIndexEntry), kNumIndexEntryBuckets, IndexEntryHash, IndexEntryCompare, IndexEntryFree); VectorNew(&db->previouslySeenArticles, sizeof(rssNewsArticle), NewsArticleFree, 0); STNew(&st, urlconn.dataStream, kNewLineDelimiters, true); while (STSkipUntil(&st, ":") != EOF) { // ignore everything up to the first selicolon of the line STSkipOver(&st, ": "); // now ignore the semicolon and any whitespace directly after it STNextToken(&st, remoteFileName, sizeof(remoteFileName)); ProcessFeed(db, remoteFileName); } printf("\n"); STDispose(&st); } URLConnectionDispose(&urlconn); URLDispose(&u); }
int main(int argc, char **argv) { Welcome(kWelcomeTextFile); hashset stopWords; BuildStopWordsHashset(&stopWords, kDefaultStopWordsFile); hashset wordHash; HashSetNew(&wordHash, sizeof(currWord), 10007, WordHashFn, WordCompare, WordFree); hashset articlesSeen; HashSetNew(&articlesSeen, sizeof(article), 10007, ArticleHashFn, ArticleCompare, ArticleFree); BuildIndices((argc == 1) ? kDefaultFeedsFile : argv[1], &stopWords, &wordHash, &articlesSeen); QueryIndices(&stopWords, &wordHash, &articlesSeen); HashSetDispose(&stopWords); HashSetDispose(&wordHash); HashSetDispose(&articlesSeen); return 0; }
int main(int argc, char **argv) { static const char *stopwordFilename = "/home/compilers/media/assn-4-rss-news-search-data/stop-words.txt"; static const int kStopwordBuckets = 1009; static const int kIndexNumBuckets = 10007; rssData allData; HashSetNew(&allData.stopwords, sizeof(char*), kStopwordBuckets, StringHash, StringCmp, StringFree); HashSetNew(&allData.indices, sizeof(indexEntry), kIndexNumBuckets, IndexHash, IndexCmp, IndexFree); // this vector VectorNew(&allData.explored, sizeof(article), ArticleFree, 10); Welcome(kWelcomeTextFile); ReadStopwords(&allData.stopwords, stopwordFilename); BuildIndices((argc == 1) ? kDefaultFeedsFile : argv[1], &allData ); int hcount = HashSetCount(&allData.indices); printf("hcount: %d\n", hcount); printf("Finished BuildIndices\n"); QueryIndices(&allData); return 0; }
static void BuildIndices(const char *feedsFileURL ,rssFeedData * data) { url u; urlconnection urlconn; URLNewAbsolute(&u, feedsFileURL); URLConnectionNew(&urlconn, &u); if (urlconn.responseCode / 100 == 3) { // redirection, so recurse BuildIndices(urlconn.newUrl, data); } else { streamtokenizer st; char remoteDocumentURL[2048]; STNew(&st, urlconn.dataStream, kNewLineDelimiters, true); while (STSkipUntil(&st, ":") != EOF) { // ignore everything up to the first selicolon of the line STSkipOver(&st, ": "); // now ignore the semicolon and any whitespace directly after it STNextToken(&st, remoteDocumentURL, sizeof(remoteDocumentURL)); ProcessFeed(remoteDocumentURL,data); } printf("\n"); STDispose(&st); } URLConnectionDispose(&urlconn); URLDispose(&u); }
void IndexTuner::IndexTuneHelper(storage::DataTable* table) { // Add required indices Analyze(table); // Build desired indices BuildIndices(table); }
void CPatchRData::Build() { BuildVertices(); BuildSides(); BuildIndices(); BuildBlends(); BuildWater(); }
int main(int argc, char **argv) { const char *feedsFileName = (argc == 1) ? kDefaultFeedsFile : argv[1]; rssDatabase db; initThreadsData(&db); //InitThreadPackage(false); Welcome(kWelcomeTextFile); LoadStopWords(&db.stopWords, kDefaultStopWordsFile); BuildIndices(&db, feedsFileName); cleanThreadData(&db); QueryIndices(&db); pthread_exit(NULL); return 0; }
void CPatchRData::Update(CSimulation2* simulation) { m_Simulation = simulation; if (m_UpdateFlags!=0) { // TODO,RC 11/04/04 - need to only rebuild necessary bits of renderdata rather // than everything; it's complicated slightly because the blends are dependent // on both vertex and index data BuildVertices(); BuildSides(); BuildIndices(); BuildBlends(); BuildWater(); m_UpdateFlags=0; } }
int main(int argc, char **argv) { hashset stopWords; loadStopWords(&stopWords); hashset prevSeenArticles; initPrevSeenArticles(&prevSeenArticles); hashset wordCounts; initWordCount(&wordCounts); Welcome(kWelcomeTextFile); BuildIndices((argc == 1) ? kDefaultFeedsFile : argv[1], &stopWords, &prevSeenArticles, &wordCounts); wordCountSort(&wordCounts); QueryIndices(&stopWords, &wordCounts); HashSetDispose(&stopWords); HashSetDispose(&prevSeenArticles); HashSetDispose(&wordCounts); return 0; }
int main(int argc, char **argv) { const char *feedsFileURL = (argc == 1) ? kDefaultFeedsFileURL : argv[1]; Welcome(kWelcomeTextURL); rssFeedData rssFData; CreateDataStructure(&rssFData); LoadStopWords(kDefaultStopWordsURL, &rssFData); //void *found = HashSetLookup(&(rssFData.stopWords), &smstr); BuildIndices(feedsFileURL, &rssFData); //HashSetMap(&(rssFData.articles), ArticleMap, NULL); QueryIndices(&rssFData); DisposeDataStructure(&rssFData); return 0; }
/** * Function: main * -------------- * Serves as the entry point of the full RSS News Feed Aggregator. * * @param argc the number of tokens making up the shell command invoking the * application. It should be either 1 or 2--2 when the used wants to * specify what flat text file should be used to source all of the * RSS feeds. * @param argv the array of one of more tokens making up the command line invoking * the application. The 0th token is ignored, and the 1st one, if present, * is taken to be the path identifying where the list of RSS feeds is. * @return always 0 if it main returns normally (although there might be exit(n) calls * within the code base that end the program abnormally) */ int main(int argc, char **argv) { const char *feedsFileURL = (argc == 1) ? kDefaultFeedsFileURL : argv[1]; Welcome(kWelcomeTextURL); rssFeedData data; CreateDataStructure(&data); LoadStopWords(kDefaultStopWordsURL,&data.stopWords); BuildIndices(feedsFileURL, &data); // tests // HashSetMap(&data.stopWords, StringMap, NULL); // HashSetMap(&data.articles, ArticleMap, NULL); // HashSetMap(&data.indices, IndexMap, NULL); QueryIndices(&data); DisposeData(&data); return 0; }
void USkeletalMesh::ConvertMesh() { guard(USkeletalMesh::ConvertMesh); CSkeletalMesh *Mesh = new CSkeletalMesh(this); ConvertedMesh = Mesh; Mesh->BoundingBox = BoundingBox; Mesh->BoundingSphere = BoundingSphere; Mesh->RotOrigin = RotOrigin; Mesh->MeshScale = CVT(MeshScale); Mesh->MeshOrigin = CVT(MeshOrigin); Mesh->Lods.Empty(LODModels.Num()); #if DEBUG_SKELMESH appPrintf(" Base : Points[%d] Wedges[%d] Influences[%d] Faces[%d]\n", Points.Num(), Wedges.Num(), VertInfluences.Num(), Triangles.Num() ); #endif // some games has troubles with LOD models ... #if TRIBES3 if (GetGame() == GAME_Tribes3) goto base_mesh; #endif #if SWRC if (GetGame() == GAME_RepCommando) goto base_mesh; #endif if (!LODModels.Num()) { base_mesh: guard(ConvertBaseMesh); // create CSkelMeshLod from base mesh CSkelMeshLod *Lod = new (Mesh->Lods) CSkelMeshLod; Lod->NumTexCoords = 1; Lod->HasNormals = false; Lod->HasTangents = false; if (Points.Num() && Wedges.Num() && VertInfluences.Num()) { InitSections(*Lod); ConvertWedges(*Lod, Points, Wedges, VertInfluences); BuildIndices(*Lod); } else { appPrintf("ERROR: bad base mesh\n"); } goto skeleton; unguard; } // convert LODs for (int lod = 0; lod < LODModels.Num(); lod++) { guard(ConvertLod); const FStaticLODModel &SrcLod = LODModels[lod]; #if DEBUG_SKELMESH appPrintf(" Lod %d: Points[%d] Wedges[%d] Influences[%d] Faces[%d] Rigid(Indices[%d] Verts[%d]) Smooth(Indices[%d] Verts[%d] Stream[%d])\n", lod, SrcLod.Points.Num(), SrcLod.Wedges.Num(), SrcLod.VertInfluences.Num(), SrcLod.Faces.Num(), SrcLod.RigidIndices.Indices.Num(), SrcLod.VertexStream.Verts.Num(), SrcLod.SmoothIndices.Indices.Num(), SrcLod.SkinPoints.Num(), SrcLod.SkinningData.Num() ); #endif // if (SrcLod.Faces.Num() == 0 && SrcLod.SmoothSections.Num() > 0) // continue; CSkelMeshLod *Lod = new (Mesh->Lods) CSkelMeshLod; Lod->NumTexCoords = 1; Lod->HasNormals = false; Lod->HasTangents = false; if (IsCorrectLOD(SrcLod)) { InitSections(*Lod); ConvertWedges(*Lod, SrcLod.Points, SrcLod.Wedges, SrcLod.VertInfluences); BuildIndicesForLod(*Lod, SrcLod); } else { appPrintf("WARNING: bad LOD#%d mesh, switching to base\n", lod); if (lod == 0) { Mesh->Lods.Empty(); goto base_mesh; } else { Mesh->Lods.RemoveAt(lod); break; } } unguard; } skeleton: // copy skeleton guard(ProcessSkeleton); Mesh->RefSkeleton.Empty(RefSkeleton.Num()); for (int i = 0; i < RefSkeleton.Num(); i++) { const FMeshBone &B = RefSkeleton[i]; CSkelMeshBone *Dst = new (Mesh->RefSkeleton) CSkelMeshBone; Dst->Name = B.Name; Dst->ParentIndex = B.ParentIndex; Dst->Position = CVT(B.BonePos.Position); Dst->Orientation = CVT(B.BonePos.Orientation); } unguard; // ProcessSkeleton // copy sockets int NumSockets = AttachAliases.Num(); Mesh->Sockets.Empty(NumSockets); for (int i = 0; i < NumSockets; i++) { CSkelMeshSocket *DS = new (Mesh->Sockets) CSkelMeshSocket; DS->Name = AttachAliases[i]; DS->Bone = AttachBoneNames[i]; DS->Transform = CVT(AttachCoords[i]); } Mesh->FinalizeMesh(); unguard; }