TEST(TQQueueTest, Shuffle) { try { TQQueue<TInt> Q(64, -1); ASSERT_TRUE(Q.Empty()); for (int i = 0; i < 200; i++) { Q.Push(i); ASSERT_EQ(Q.Len(), i+1); ASSERT_EQ(Q.Front(), 0); ASSERT_EQ(Q.Back(), i); ASSERT_EQ(Q[i], i); } TRnd Rnd; Q.Shuffle(Rnd); TIntV QVec; Q.GetSubValVec(0, 199, QVec); QVec.Sort(true); ASSERT_EQ(QVec.Len(), 200); for (int i = 0; i < 200; i++) { ASSERT_EQ(QVec[i], i); } } catch (PExcept& Except) { printf("Error: %s", Except->GetStr()); throw Except; } }
void TBlobBs::GenBlockLenV(TIntV& BlockLenV){ BlockLenV.Clr(); for (int P2Exp=0; P2Exp<TB4Def::MxP2Exp; P2Exp++){ BlockLenV.Add(TInt(TB4Def::GetP2(P2Exp)));} EAssert(int(BlockLenV.Last())<2000000000); {for (int Len=10; Len<100; Len+=10){BlockLenV.Add(Len);}} {for (int Len=100; Len<10000; Len+=100){BlockLenV.Add(Len);}} {for (int Len=10000; Len<100000; Len+=1000){BlockLenV.Add(Len);}} {for (int Len=100000; Len<1000000; Len+=25000){BlockLenV.Add(Len);}} {for (int Len=1000000; Len<10000000; Len+=1000000){BlockLenV.Add(Len);}} {for (int Len=10000000; Len<100000000; Len+=10000000){BlockLenV.Add(Len);}} BlockLenV.Sort(); }
int main(int argc, char *argv[]) { TStr BaseString = "/lfs/1/tmp/curis/week/QBDB.bin"; TFIn BaseFile(BaseString); TQuoteBase *QB = new TQuoteBase; TDocBase *DB = new TDocBase; QB->Load(BaseFile); DB->Load(BaseFile); TIntV QuoteIds; QB->GetAllQuoteIds(QuoteIds); int NumQuotes = QuoteIds.Len(); THash<TInt, TStrSet> PeakCounts; for (int i = 0; i < NumQuotes; i++) { TQuote CurQuote; if (QB->GetQuote(QuoteIds[i], CurQuote)) { TVec<TSecTm> Peaks; CurQuote.GetPeaks(DB, Peaks); TStr QuoteString; CurQuote.GetParsedContentString(QuoteString); TStrSet StringSet; if (PeakCounts.IsKey(Peaks.Len())) { StringSet = PeakCounts.GetDat(Peaks.Len()); } StringSet.AddKey(QuoteString); PeakCounts.AddDat(Peaks.Len(), StringSet); } } TIntV PeakCountKeys; PeakCounts.GetKeyV(PeakCountKeys); PeakCountKeys.Sort(true); for (int i = 0; i < PeakCountKeys.Len(); i++) { TStrSet CurSet = PeakCounts.GetDat(PeakCountKeys[i]); if (CurSet.Len() > 0) { printf("QUOTES WITH %d PEAKS\n", PeakCountKeys[i].Val); printf("#########################################\n"); THashSet<TStr> StringSet = PeakCounts.GetDat(PeakCountKeys[i]); for (THashSet<TStr>::TIter l = StringSet.BegI(); l < StringSet.EndI(); l++) { printf("%s\n", l.GetKey().CStr()); } printf("\n"); } } delete QB; delete DB; return 0; }
// vector benchmark with integer values void VecBench(const int& n) { TIntV Vec; float ft0, ft1; int x; int i; int Found; int NotFound; int Id; // build the vector ft0 = GetCPUTime(); for (i = 0; i < n; i++) { x = (int) (drand48() * 100000000); Vec.Add(x); } printf("vec : size %d\n", Vec.Len()); ft1 = GetCPUTime(); printf("vec : %7.3fs inserting %d numbers\n",ft1-ft0,i); // sort the vector ft0 = GetCPUTime(); Vec.Sort(); printf("vec : size %d\n", Vec.Len()); ft1 = GetCPUTime(); printf("vec : %7.3fs sorting %d numbers\n",ft1-ft0,i); // search the vector ft0 = GetCPUTime(); Found = 0; NotFound = 0; for (i = 0; i < n; i++) { x = (int) (drand48() * 100000000); Id = Vec.IsInBin(x); if (Id == 0) { NotFound++; } else { Found++; } } printf("vec : found %d, notfound %d\n", Found, NotFound); ft1 = GetCPUTime(); printf("vec : %7.3fs searching %d numbers\n",ft1-ft0,i); }
void TGStatVec::SaveTxt(const TStr& FNmPref, const TStr& Desc) const { FILE *F = fopen(TStr::Fmt("growth.%s.tab", FNmPref.CStr()).CStr(), "wt"); fprintf(F, "# %s\n", Desc.CStr()); fprintf(F, "# %s", TTmInfo::GetTmUnitStr(TmUnit).CStr()); TIntSet StatValSet; for (int i = 0; i < Len(); i++) { for (int v = gsvNone; v < gsvMx; v++) { if (At(i)->HasVal(TGStatVal(v))) { StatValSet.AddKey(v); } } } TIntV StatValV; StatValSet.GetKeyV(StatValV); StatValV.Sort(); for (int sv = 0; sv < StatValV.Len(); sv++) { fprintf(F, "\t%s", TGStat::GetValStr(TGStatVal(StatValV[sv].Val)).CStr()); } fprintf(F, "Time\n"); for (int i = 0; i < Len(); i++) { const TGStat& G = *At(i); for (int sv = 0; sv < StatValV.Len(); sv++) { fprintf(F, "%g\t", G.GetVal(TGStatVal(StatValV[sv].Val))); } fprintf(F, "%s\n", G.GetTmStr().CStr()); } fclose(F); }
void TTable::Unique(TStr Col){ if(!ColTypeMap.IsKey(Col)){TExcept::Throw("no such column " + Col);} TIntV RemainingRows = TIntV(NumValidRows,0); // group by given column (keys) and keep only first row for each key switch(GetColType(Col)){ case INT:{ THash<TInt,TIntV> T; // can't really estimate the size of T for constructor hinting GroupByIntCol(Col, T, TIntV(0), true); for(THash<TInt,TIntV>::TIter it = T.BegI(); it < T.EndI(); it++){ RemainingRows.Add(it->Dat[0]); } break; } case FLT:{ THash<TFlt,TIntV> T; GroupByFltCol(Col, T, TIntV(0), true); for(THash<TFlt,TIntV>::TIter it = T.BegI(); it < T.EndI(); it++){ RemainingRows.Add(it->Dat[0]); } break; } case STR:{ THash<TStr,TIntV> T; GroupByStrCol(Col, T, TIntV(0), true); for(THash<TStr,TIntV>::TIter it = T.BegI(); it < T.EndI(); it++){ RemainingRows.Add(it->Dat[0]); } break; } } // with the current implementation of GroupByX, RemainingRows is sorted: // GroupByX returns a hash Table T:X-->TIntV. In the current implementation, // if key X1 appears before key X2 in T Then T(X1)[0] <= T(X2)[0] // Not sure if we could always make this assumption. Might want to remove this sorting.. RemainingRows.Sort(); KeepSortedRows(RemainingRows); }
void TGreedyAlg::runGreedyAlgorithm() { outputGraph = TKColourNet::New(); for (THash<TInt, TNodeInfo>::TIter NI = nodeNmH.BegI(); NI < nodeNmH.EndI(); NI++) { outputGraph->AddNode(NI.GetKey(), TKColourNode()); printf("Added node %d to output graph\n", (int) NI.GetKey()); } // for each node i for (THash<TInt, TNodeInfo>::TIter NI = nodeNmH.BegI(); NI < nodeNmH.EndI(); NI++) { int nodeI = NI.GetKey(); printf("***** Considering node i: %d *****\n", nodeI); // initialise unaccounted cascades U TVec<TCascade> cascades = cascadeV; // initialise parental neighbourhood TIntV parentalNeighbourhood; bool uselessUnaccountedCasacdesLeft = false; while ((cascades.Len() != 0) && (!uselessUnaccountedCasacdesLeft)) { // printf("%d Casacdes left...\n", cascades.Len()); // find node j that could possible have infected node i that has been observed for largest number of cascades int argmax = -1; int maxNoCascades = 0; bool areUnaccountedCascadesUseless = true; for (THash<TInt, TNodeInfo>::TIter LNI = nodeNmH.BegI(); LNI < nodeNmH.EndI(); LNI++) { int nodeJ = LNI.GetKey(); // printf("nodeJ: %d\n", nodeJ); int countPotentialNoCascades = 0; if (nodeI != nodeJ) { for (int c = 0; c < cascades.Len(); c++) { TCascade cascade = cascades[c]; if (cascade.IsNode(nodeJ) && cascade.IsNode(nodeI)) { // if (cascade.GetTm(nodeI) > cascade.GetTm(nodeJ)) { if (cascade.GetTm(nodeJ) == (cascade.GetTm(nodeI) - 1)) { countPotentialNoCascades++; } } } if (countPotentialNoCascades > maxNoCascades) { maxNoCascades = countPotentialNoCascades; argmax = nodeJ; areUnaccountedCascadesUseless = false; } else { areUnaccountedCascadesUseless = areUnaccountedCascadesUseless && true; } } } if (areUnaccountedCascadesUseless) { uselessUnaccountedCasacdesLeft = true; } if (argmax != -1) { // printf("argmax (k) = %d, noCasacdesAppearedIn = %d\n", argmax, maxNoCascades); // add arg max (k) to the set of parental neighbours parentalNeighbourhood.Add(argmax); } // remove the cascades which k belongs to TIntV cascadesToRemove; for (int c = 0; c < cascades.Len(); c++) { TCascade cascade = cascades[c]; if (cascade.IsNode(argmax) && cascade.IsNode(nodeI)) { if (cascade.GetTm(nodeI) > cascade.GetTm(argmax)) { cascadesToRemove.Add(c); } } } cascadesToRemove.Sort(); // printf("cascadesToRemove: "); for (int i = 0; i < cascadesToRemove.Len(); i++) { // printf("%d, ", (int) cascadesToRemove[i]); cascades.Del(cascadesToRemove[i]-i); } // printf("\n"); } // add edges from node i to each of parent for (int i = 0; i < parentalNeighbourhood.Len(); i++) { int srcNodeId = parentalNeighbourhood[i]; int dstNodeId = nodeI; outputGraph->AddEdge(srcNodeId, dstNodeId); // printf("##### Added Edge: %d -> %d #####\n", srcNodeId, dstNodeId); } } }