void Octree::Raycast(Vector<RaycastResult>& result, const Ray& ray, unsigned short nodeFlags, float maxDistance, unsigned layerMask) { PROFILE(OctreeRaycast); result.Clear(); CollectNodes(result, &root, ray, nodeFlags, maxDistance, layerMask); Sort(result.Begin(), result.End(), CompareRaycastResults); }
void Scene::SetVarNamesAttr(const String& value) { Vector<String> varNames = value.Split(';'); varNames_.Clear(); for (Vector<String>::ConstIterator i = varNames.Begin(); i != varNames.End(); ++i) varNames_[*i] = *i; }
// 测试const迭代器 void PrintVector2(const Vector<int>& v) { Vector<int>::ConstIterator it = v.Begin(); for (; it != v.End(); ++it) { cout<<*it<<" "; } cout<<endl; }
/// Set all animation tracks. void Animation::SetTracks(const Vector<AnimationTrack>& tracks) { tracks_.Clear(); for (Vector<AnimationTrack>::ConstIterator itr = tracks.Begin(); itr != tracks.End(); itr++) { tracks_[itr->name_] = *itr; } }
void UEventObserver::NativeRemoveMatch( /* [in] */ const String& matchStr) { AutoLock lock(sMatchesMutex); Vector<String>::Iterator it; for (it = sMatches.Begin(); it != sMatches.End(); ++it) { if ((*it).Equals(matchStr)) { sMatches.Erase(it); break; // only remove first occurrence } } }
/*---------------------------------------------------------------------------------------------- Read Unicode data field out of the column icol, and set prgchData to point at the result. The caller supplies a buffer prgccBuf of size cchMaxBuf into which a small result can be placed, and also a Vector<wchar> into which a larger result will be placed. Note: call from inside try/catch block; may throw exceptions. ----------------------------------------------------------------------------------------------*/ void VwRsOdbcDa::ReadUnicode(SQLHSTMT hstmt, int icol, wchar * prgchBuf, int cchMaxBuf, Vector<wchar> & vchData, wchar * & prgchData, long & cchRet) { long cbData; RETCODE rc; prgchData = prgchBuf; // by default (short strings) data is returned in the buffer. // Read the formatting data. rc = CheckSqlRc(SQLGetData(hstmt, (unsigned short)(icol), SQL_C_WCHAR, prgchBuf, cchMaxBuf * 2, &cbData)); if (cbData < 0) { // NULL cbData = 0; } // REVIEW ShonK (JohnT): Should we check the SQLSTATE for 01004? If so, how? if (rc == SQL_SUCCESS_WITH_INFO) { vchData.Clear(); // forget anything from previous property do { if ((uint)cbData > (uint)cchMaxBuf * 2) cbData = cchMaxBuf * 2 - 2; //-2 allows for terminating null) vchData.Replace(vchData.Size(), vchData.Size(), prgchBuf, cbData / 2); } while ((rc = CheckSqlRc(SQLGetData(hstmt, (unsigned short)(icol), SQL_C_WCHAR, prgchBuf, cchMaxBuf * 2, &cbData))) != SQL_NO_DATA); cbData = vchData.Size() * 2; prgchData = vchData.Begin(); } else if (rc != SQL_SUCCESS) { CheckHr(WarnHr(E_UNEXPECTED)); // TOxDO JohnT: throw exception of some sort... } Assert(prgchData == vchData.Begin() && cbData == vchData.Size() * 2 || prgchData == prgchBuf && cbData <= cchMaxBuf * 2); cchRet = cbData / 2; }
/*---------------------------------------------------------------------------------------------- Read a binary data field out of the column icol, and set prgbData to point at the result. The caller supplies a buffer prgbBuf of size cbMaxBuf into which a small result can be placed, and also a Vector<byte> into which a larger result will be placed. Note: call from inside try/catch block; may throw exceptions. ----------------------------------------------------------------------------------------------*/ void VwRsOdbcDa::ReadBinary(SQLHSTMT hstmt, int icol, byte * prgbBuf, int cbMaxBuf, Vector<byte> & vbData, byte * & prgbData, long & cbRet) { long cbData; RETCODE rc; prgbData = prgbBuf; // by default (short strings) data is returned in the buffer. // Read the formatting data. rc = CheckSqlRc(SQLGetData(hstmt, (unsigned short)(icol), SQL_C_BINARY, prgbBuf, cbMaxBuf, &cbData)); if (cbData < 0) { // Null field; return 0 length cbData = 0; } // REVIEW ShonK (JohnT): Should we check the SQLSTATE for 01004? If so, how? if (rc == SQL_SUCCESS_WITH_INFO) { vbData.Clear(); // in case reused from an earlier call do { if ((uint)cbData > (uint)cbMaxBuf) cbData = cbMaxBuf; // Do NOT subtract 1, binary data is not null terminated. vbData.Replace(vbData.Size(), vbData.Size(), prgbBuf, cbData); } while ((rc = CheckSqlRc(SQLGetData(hstmt, (unsigned short)(icol), SQL_C_BINARY, prgbBuf, cbMaxBuf, &cbData))) != SQL_NO_DATA); cbData = vbData.Size(); prgbData = vbData.Begin(); } else if (rc != SQL_SUCCESS) { CheckHr(WarnHr(E_UNEXPECTED)); } Assert(prgbData == vbData.Begin() && cbData == vbData.Size() || prgbData == prgbBuf && cbData <= cbMaxBuf); cbRet = cbData; }
void InputContext::ReleaseAllKeys() { Vector<Key> keysToRelease; // We double buffer the to-be-released keys first to a temporary list, since invoking the trigger // function will add new entries to newKeyEvents. for(auto iter = heldKeysBuffered.Begin(); iter != heldKeysBuffered.End(); ++iter) keysToRelease.Push(iter->first_); for(auto iter = newKeyEvents.Begin(); iter != newKeyEvents.End(); ++iter) keysToRelease.Push(iter->first_); for(auto iter = keysToRelease.Begin(); iter != keysToRelease.End(); ++iter) TriggerKeyReleaseEvent(*iter); }
bool Console::PopulateInterpreter() { interpreters_->RemoveAllItems(); EventReceiverGroup* group = context_->GetEventReceivers(E_CONSOLECOMMAND); if (!group || group->receivers_.Empty()) return false; Vector<String> names; for (unsigned i = 0; i < group->receivers_.Size(); ++i) { Object* receiver = group->receivers_[i]; if (receiver) names.Push(receiver->GetTypeName()); } Sort(names.Begin(), names.End()); unsigned selection = M_MAX_UNSIGNED; for (unsigned i = 0; i < names.Size(); ++i) { const String& name = names[i]; if (name == commandInterpreter_) selection = i; Text* text = new Text(context_); text->SetStyle("ConsoleText"); text->SetText(name); interpreters_->AddItem(text); } const IntRect& border = interpreters_->GetPopup()->GetLayoutBorder(); interpreters_->SetMaxWidth(interpreters_->GetListView()->GetContentElement()->GetWidth() + border.left_ + border.right_); bool enabled = interpreters_->GetNumItems() > 1; interpreters_->SetEnabled(enabled); interpreters_->SetFocusMode(enabled ? FM_FOCUSABLE_DEFOCUSABLE : FM_NOTFOCUSABLE); if (selection == M_MAX_UNSIGNED) { selection = 0; commandInterpreter_ = names[selection]; } interpreters_->SetSelection(selection); return true; }
void WorkspaceWork::ScanWorkspace() { Workspace wspc; if(main.GetCount()) wspc.Scan(main); actualpackage.Clear(); actualfileindex = -1; filelist.Clear(); package.Clear(); Vector<String> pks; speed.Clear(); for(int i = 0; i < wspc.package.GetCount(); i++) { pks.Add(wspc.package.GetKey(i)); speed.Add(wspc.GetPackage(i).optimize_speed); } if(sort && wspc.GetCount()) { PackageOrder po; po.mainpath = PackagePath(pks[0]); IndexSort(pks.Begin() + 1, pks.End(), speed.Begin() + 1, po); } for(int i = 0; i < wspc.package.GetCount(); i++) { String pk = pks[i]; Font fnt = ListFont(); if(i == 0) fnt.Bold(); PackageInfo pi = GetPackageInfo(pk); if(pi.bold) fnt.Bold(); if(pi.italic) fnt.Italic(); package.Add(pk, Null, fnt, Nvl(pi.ink, SColorText()), false, 0, Null, SColorMark); } if(!organizer) { if(main.GetCount()) package.Add(prjaux, IdeImg::PrjAux(), ListFont(), Magenta); package.Add(ideaux, IdeImg::IdeAux(), ListFont(), Magenta); package.Add(tempaux, IdeImg::TempAux(), ListFont(), Magenta); if(main.GetCount()) package.Add(METAPACKAGE, IdeImg::Meta(), ListFont(), Red); } package.SetCursor(0); SyncErrorPackages(); }
bool XPathQuery::SetQuery(const String& queryString, const String& variableString, bool bind) { if (!variableString.Empty()) { Clear(); variables_ = new pugi::xpath_variable_set(); // Parse the variable string having format "name1:type1,name2:type2,..." where type is one of "Bool", "Float", "String", "ResultSet" Vector<String> vars = variableString.Split(','); for (Vector<String>::ConstIterator i = vars.Begin(); i != vars.End(); ++i) { Vector<String> tokens = i->Trimmed().Split(':'); if (tokens.Size() != 2) continue; pugi::xpath_value_type type; if (tokens[1] == "Bool") type = pugi::xpath_type_boolean; else if (tokens[1] == "Float") type = pugi::xpath_type_number; else if (tokens[1] == "String") type = pugi::xpath_type_string; else if (tokens[1] == "ResultSet") type = pugi::xpath_type_node_set; else return false; if (!variables_->add(tokens[0].CString(), type)) return false; } } queryString_ = queryString; if (bind) Bind(); return true; }
unsigned WorkQueue::RemoveWorkItems(const Vector<SharedPtr<WorkItem> >& items) { MutexLock lock(queueMutex_); unsigned removed = 0; for (Vector<SharedPtr<WorkItem> >::ConstIterator i = items.Begin(); i != items.End(); ++i) { List<WorkItem*>::Iterator j = queue_.Find(i->Get()); if (j != queue_.End()) { List<SharedPtr<WorkItem> >::Iterator k = workItems_.Find(*i); if (k != workItems_.End()) { queue_.Erase(j); ReturnToPool(*k); workItems_.Erase(k); ++removed; } } } return removed; }
static Boolean IsMatch( /* [in] */ const char* buffer, /* [in] */ Int32 length) { AutoLock lock(sMatchesMutex); Vector<String>::Iterator it; for (it = sMatches.Begin(); it != sMatches.End(); ++it) { String match = *it; // Consider all zero-delimited fields of the buffer. const char* field = buffer; const char* end = buffer + length + 1; do { if (strstr(field, match.string())) { Logger::V("Matched uevent message with pattern: %s", match.string()); return TRUE; } field += strlen(field) + 1; } while (field != end); } return FALSE; }
void Run(const Vector<String>& arguments) { if (arguments.Size() < 2) ErrorExit("Usage: PackageTool <directory to process> <package name> [basepath]\n"); const String& dirName = arguments[0]; const String& packageName = arguments[1]; if (arguments.Size() > 2) basePath_ = AddTrailingSlash(arguments[2]); PrintLine("Scanning directory " + dirName + " for files"); // Get the file list recursively Vector<String> fileNames; fileSystem_->ScanDir(fileNames, dirName, "*.*", SCAN_FILES, true); if (!fileNames.Size()) ErrorExit("No files found"); // Check for extensions to ignore for (unsigned i = fileNames.Size() - 1; i < fileNames.Size(); --i) { String extension = GetExtension(fileNames[i]); for (unsigned j = 0; ignoreExtensions_[j].Length(); ++j) { if (extension == ignoreExtensions_[j]) { fileNames.Erase(fileNames.Begin() + i); break; } } } for (unsigned i = 0; i < fileNames.Size(); ++i) ProcessFile(fileNames[i], dirName); WritePackageFile(packageName, dirName); }
Length View::GetGlyphs( GlyphInfo* glyphs, Vector2* glyphPositions, GlyphIndex glyphIndex, Length numberOfGlyphs ) const { Length numberOfLaidOutGlyphs = 0u; if( mImpl->mVisualModel ) { // If ellipsis is enabled, the number of glyphs the layout engine has laid out may be less than 'numberOfGlyphs'. // Check the last laid out line to know if the layout engine elided some text. const Length numberOfLines = mImpl->mVisualModel->mLines.Count(); if( numberOfLines > 0u ) { const LineRun& lastLine = *( mImpl->mVisualModel->mLines.Begin() + ( numberOfLines - 1u ) ); // If ellipsis is enabled, calculate the number of laid out glyphs. // Otherwise use the given number of glyphs. if( lastLine.ellipsis ) { numberOfLaidOutGlyphs = lastLine.glyphRun.glyphIndex + lastLine.glyphRun.numberOfGlyphs; } else { numberOfLaidOutGlyphs = numberOfGlyphs; } // Retrieve from the visual model the glyphs and positions. mImpl->mVisualModel->GetGlyphs( glyphs, glyphIndex, numberOfLaidOutGlyphs ); mImpl->mVisualModel->GetGlyphPositions( glyphPositions, glyphIndex, numberOfLaidOutGlyphs ); // Get the lines for the given range of glyphs. // The lines contain the alignment offset which needs to be added to the glyph's position. LineIndex firstLine = 0u; Length numberOfLines = 0u; mImpl->mVisualModel->GetNumberOfLines( glyphIndex, numberOfLaidOutGlyphs, firstLine, numberOfLines ); Vector<LineRun> lines; lines.Resize( numberOfLines ); LineRun* lineBuffer = lines.Begin(); mImpl->mVisualModel->GetLinesOfGlyphRange( lineBuffer, glyphIndex, numberOfLaidOutGlyphs ); // Get the first line for the given glyph range. LineIndex lineIndex = firstLine; LineRun* line = lineBuffer + lineIndex; // Index of the last glyph of the line. GlyphIndex lastGlyphIndexOfLine = line->glyphRun.glyphIndex + line->glyphRun.numberOfGlyphs - 1u; // Add the alignment offset to the glyph's position. for( Length index = 0u; index < numberOfLaidOutGlyphs; ++index ) { ( *( glyphPositions + index ) ).x += line->alignmentOffset; if( lastGlyphIndexOfLine == index ) { // Get the next line. ++lineIndex; if( lineIndex < numberOfLines ) { line = lineBuffer + lineIndex; lastGlyphIndexOfLine = line->glyphRun.glyphIndex + line->glyphRun.numberOfGlyphs - 1u; } } } if( 1u == numberOfLaidOutGlyphs ) { // not a point try to do ellipsis with only one laid out character. return numberOfLaidOutGlyphs; } if( lastLine.ellipsis ) { // firstPenX, penY and firstPenSet are used to position the ellipsis glyph if needed. float firstPenX = 0.f; // Used if rtl text is elided. float penY = 0.f; bool firstPenSet = false; // Add the ellipsis glyph. bool inserted = false; float removedGlypsWidth = 0.f; Length numberOfRemovedGlyphs = 0u; GlyphIndex index = numberOfLaidOutGlyphs - 1u; // The ellipsis glyph has to fit in the place where the last glyph(s) is(are) removed. while( !inserted ) { const GlyphInfo& glyphToRemove = *( glyphs + index ); if( 0u != glyphToRemove.fontId ) { // i.e. The font id of the glyph shaped from the '\n' character is zero. // Need to reshape the glyph as the font may be different in size. const GlyphInfo& ellipsisGlyph = mImpl->mFontClient.GetEllipsisGlyph( mImpl->mFontClient.GetPointSize( glyphToRemove.fontId ) ); if( !firstPenSet ) { const Vector2& position = *( glyphPositions + index ); // Calculates the penY of the current line. It will be used to position the ellipsis glyph. penY = position.y + glyphToRemove.yBearing; // Calculates the first penX which will be used if rtl text is elided. firstPenX = position.x - glyphToRemove.xBearing; if( firstPenX < -ellipsisGlyph.xBearing ) { // Avoids to exceed the bounding box when rtl text is elided. firstPenX = -ellipsisGlyph.xBearing; } removedGlypsWidth = -ellipsisGlyph.xBearing; firstPenSet = true; } removedGlypsWidth += std::min( glyphToRemove.advance, ( glyphToRemove.xBearing + glyphToRemove.width ) ); // Calculate the width of the ellipsis glyph and check if it fits. const float ellipsisGlyphWidth = ellipsisGlyph.width + ellipsisGlyph.xBearing; if( ellipsisGlyphWidth < removedGlypsWidth ) { GlyphInfo& glyphInfo = *( glyphs + index ); Vector2& position = *( glyphPositions + index ); position.x -= ( 0.f > glyphInfo.xBearing ) ? glyphInfo.xBearing : 0.f; // Replace the glyph by the ellipsis glyph. glyphInfo = ellipsisGlyph; // Change the 'x' and 'y' position of the ellipsis glyph. if( position.x > firstPenX ) { position.x = firstPenX + removedGlypsWidth - ellipsisGlyphWidth; } position.x += ellipsisGlyph.xBearing; position.y = penY - ellipsisGlyph.yBearing; inserted = true; } } if( !inserted ) { if( index > 0u ) { --index; } else { // No space for the ellipsis. inserted = true; } ++numberOfRemovedGlyphs; } } // 'Removes' all the glyphs after the ellipsis glyph. numberOfLaidOutGlyphs -= numberOfRemovedGlyphs; } } } return numberOfLaidOutGlyphs; }
int main() { #ifdef _MSC_VER _CrtSetDbgFlag(_CRTDBG_ALLOC_MEM_DF | _CRTDBG_LEAK_CHECK_DF); #endif printf("Size of String: %d\n", sizeof(String)); printf("Size of Vector: %d\n", sizeof(Vector<int>)); printf("Size of List: %d\n", sizeof(List<int>)); printf("Size of HashMap: %d\n", sizeof(HashMap<int, int>)); printf("Size of RefCounted: %d\n", sizeof(RefCounted)); { printf("\nTesting AutoPtr assignment\n"); AutoPtr<Test> ptr1(new Test); AutoPtr<Test> ptr2; ptr2 = ptr1; } { printf("\nTesting AutoPtr copy construction\n"); AutoPtr<Test> ptr1(new Test); AutoPtr<Test> ptr2(ptr1); } { printf("\nTesting AutoPtr detaching\n"); AutoPtr<Test> ptr1(new Test); // We would now have a memory leak if we don't manually delete the object Test* object = ptr1.Detach(); delete object; } { printf("\nTesting AutoPtr inside a vector\n"); Vector<AutoPtr<Test> > vec; printf("Filling vector\n"); for (size_t i = 0; i < 4; ++i) vec.Push(new Test()); printf("Clearing vector\n"); vec.Clear(); } { printf("\nTesting SharedPtr\n"); SharedPtr<TestRefCounted> ptr1(new TestRefCounted); SharedPtr<TestRefCounted> ptr2(ptr1); printf("Number of refs: %d\n", ptr1.Refs()); } { printf("\nTesting WeakPtr\n"); TestRefCounted* object = new TestRefCounted; WeakPtr<TestRefCounted> ptr1(object); WeakPtr<TestRefCounted> ptr2(ptr1); printf("Number of weak refs: %d expired: %d\n", ptr1.WeakRefs(), ptr1.IsExpired()); ptr2.Reset(); delete object; printf("Number of weak refs: %d expired: %d\n", ptr1.WeakRefs(), ptr1.IsExpired()); } { printf("\nTesting Vector\n"); HiresTimer t; Vector<int> vec; SetRandomSeed(0); for (size_t i = 0; i < NUM_ITEMS; ++i) vec.Push(Rand()); int sum = 0; int count = 0; for (auto it = vec.Begin(); it != vec.End(); ++it) { sum += *it; ++count; } int usec = (int)t.ElapsedUSec(); printf("Size: %d capacity: %d\n", vec.Size(), vec.Capacity()); printf("Counted vector items %d, sum: %d\n", count, sum); printf("Processing took %d usec\n", usec); } { printf("\nTesting List\n"); HiresTimer t; List<int> list; SetRandomSeed(0); for (size_t i = 0; i < NUM_ITEMS; ++i) list.Push(Rand()); int sum = 0; int count = 0; for (auto it = list.Begin(); it != list.End(); ++it) { sum += *it; ++count; } int usec = (int)t.ElapsedUSec(); printf("Size: %d\n", list.Size()); printf("Counted list items %d, sum: %d\n", count, sum); printf("Processing took %d usec\n", usec); printf("\nTesting List insertion\n"); List<int> list2; List<int> list3; for (int i = 0; i < 10; ++i) list3.Push(i); list2.Insert(list2.End(), list3); for (auto it = list2.Begin(); it != list2.End(); ++it) printf("%d ", *it); printf("\n"); } { printf("\nTesting String\n"); HiresTimer t; String test; for (size_t i = 0; i < NUM_ITEMS/4; ++i) test += "Test"; String test2; test2.AppendWithFormat("Size: %d capacity: %d\n", test.Length(), test.Capacity()); printf(test2.CString()); test2 = test2.ToUpper(); printf(test2.CString()); test2.Replace("SIZE:", "LENGTH:"); printf(test2.CString()); int usec = (int)t.ElapsedUSec(); printf("Processing took %d usec\n", usec); } { printf("\nTesting HashSet\n"); HiresTimer t; size_t found = 0; unsigned sum = 0; HashSet<int> testHashSet; srand(0); found = 0; sum = 0; printf("Insert, search and iteration, %d keys\n", NUM_ITEMS); for (size_t i = 0; i < NUM_ITEMS; ++i) { int number = (rand() & 32767); testHashSet.Insert(number); } for (int i = 0; i < 32768; ++i) { if (testHashSet.Find(i) != testHashSet.End()) ++found; } for (auto it = testHashSet.Begin(); it != testHashSet.End(); ++it) sum += *it; int usec = (int)t.ElapsedUSec(); printf("Set size and sum: %d %d\n", testHashSet.Size(), sum); printf("Processing took %d usec\n", usec); } { printf("\nTesting HashMap\n"); HashMap<int, int> testHashMap; for (int i = 0; i < 10; ++i) testHashMap.Insert(MakePair(i, rand() & 32767)); printf("Keys: "); Vector<int> keys = testHashMap.Keys(); for (size_t i = 0; i < keys.Size(); ++i) printf("%d ", keys[i]); printf("\n"); printf("Values: "); Vector<int> values = testHashMap.Values(); for (size_t i = 0; i < values.Size(); ++i) printf("%d ", values[i]); printf("\n"); } return 0; }
String Shader::NormalizeDefines(const String& defines) { Vector<String> definesVec = defines.ToUpper().Split(' '); Sort(definesVec.Begin(), definesVec.End()); return String::Joined(definesVec, " "); }
int UtcDaliVectorAcidTest(void) { tet_infoline("Testing multiple Dali::Vector's"); // create multiple vectors Vector< std::pair< float, float > > pairvector; DALI_TEST_EQUALS( ZERO, pairvector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( ZERO, pairvector.Capacity(), TEST_LOCATION ); Vector< double > doublevector; DALI_TEST_EQUALS( ZERO, doublevector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( ZERO, doublevector.Capacity(), TEST_LOCATION ); Vector< int* > intptrvector; DALI_TEST_EQUALS( ZERO, intptrvector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( ZERO, intptrvector.Capacity(), TEST_LOCATION ); Vector< Dali::Actor* > actorptrvector; DALI_TEST_EQUALS( ZERO, actorptrvector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( ZERO, actorptrvector.Capacity(), TEST_LOCATION ); Vector< long > longvector; DALI_TEST_EQUALS( ZERO, longvector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( ZERO, longvector.Capacity(), TEST_LOCATION ); Vector< char > charvector; DALI_TEST_EQUALS( ZERO, charvector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( ZERO, charvector.Capacity(), TEST_LOCATION ); // add items static unsigned int acidCount = 10000; int* ptr = NULL; for( unsigned int i = 0; i < acidCount; ++i ) { pairvector.PushBack( std::make_pair( i, i ) ); doublevector.PushBack( (double)i ); intptrvector.PushBack( (int*)ptr ); actorptrvector.PushBack( (Dali::Actor*)ptr ); longvector.PushBack( (long)i ); charvector.PushBack( (char)i ); } DALI_TEST_EQUALS( acidCount, pairvector.Count(), TEST_LOCATION ); std::size_t pairCapacity = pairvector.Capacity(); DALI_TEST_EQUALS( acidCount, doublevector.Count(), TEST_LOCATION ); std::size_t doubleCapacity = doublevector.Capacity(); DALI_TEST_EQUALS( acidCount, intptrvector.Count(), TEST_LOCATION ); std::size_t intptrCapacity = intptrvector.Capacity(); DALI_TEST_EQUALS( acidCount, actorptrvector.Count(), TEST_LOCATION ); std::size_t actorptrCapacity = actorptrvector.Capacity(); DALI_TEST_EQUALS( acidCount, longvector.Count(), TEST_LOCATION ); std::size_t longCapacity = longvector.Capacity(); DALI_TEST_EQUALS( acidCount, charvector.Count(), TEST_LOCATION ); std::size_t charCapacity = charvector.Capacity(); tet_printf("Dali::Vector< pair > capacity after %d pushbacks is %d", acidCount, pairCapacity ); tet_printf("Dali::Vector< double > capacity after %d pushbacks is %d", acidCount, doubleCapacity ); tet_printf("Dali::Vector< int* > capacity after %d pushbacks is %d", acidCount, intptrCapacity ); tet_printf("Dali::Vector< Actor* > capacity after %d pushbacks is %d", acidCount, actorptrCapacity ); tet_printf("Dali::Vector< long > capacity after %d pushbacks is %d", acidCount, longCapacity ); tet_printf("Dali::Vector< char > capacity after %d pushbacks is %d", acidCount, charCapacity ); // erase items for( unsigned int i = 0; i < acidCount; ++i ) { pairvector.Erase( pairvector.Begin() + ( i % pairvector.Count() ) ); doublevector.Erase( doublevector.Begin() + ( i % doublevector.Count() ) ); intptrvector.Erase( intptrvector.Begin() + ( i % intptrvector.Count() ) ); actorptrvector.Erase( actorptrvector.Begin() + ( i % actorptrvector.Count() ) ); longvector.Erase( longvector.Begin() + ( i % longvector.Count() ) ); charvector.Erase( charvector.Begin() + ( i % charvector.Count() ) ); } DALI_TEST_EQUALS( ZERO, pairvector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( pairCapacity, pairvector.Capacity(), TEST_LOCATION ); DALI_TEST_EQUALS( ZERO, doublevector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( doubleCapacity, doublevector.Capacity(), TEST_LOCATION ); DALI_TEST_EQUALS( ZERO, intptrvector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( intptrCapacity, intptrvector.Capacity(), TEST_LOCATION ); DALI_TEST_EQUALS( ZERO, actorptrvector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( actorptrCapacity, actorptrvector.Capacity(), TEST_LOCATION ); DALI_TEST_EQUALS( ZERO, longvector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( longCapacity, longvector.Capacity(), TEST_LOCATION ); DALI_TEST_EQUALS( ZERO, charvector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( charCapacity, charvector.Capacity(), TEST_LOCATION ); END_TEST; }
void Run(const Vector<String>& arguments) { if (arguments.Size() < 2) ErrorExit( "Usage: PackageTool <directory to process> <package name> [basepath] [options]\n" "\n" "Options:\n" "-c Enable package file LZ4 compression\n" "-q Enable quiet mode\n" "\n" "Basepath is an optional prefix that will be added to the file entries.\n\n" "Alternative output usage: PackageTool <output option> <package name>\n" "Output option:\n" "-i Output package file information\n" "-l Output file names (including their paths) contained in the package\n" "-L Similar to -l but also output compression ratio (compressed package file only)\n" ); const String& dirName = arguments[0]; const String& packageName = arguments[1]; bool isOutputMode = arguments[0].Length() == 2 && arguments[0][0] == '-'; if (arguments.Size() > 2) { for (unsigned i = 2; i < arguments.Size(); ++i) { if (arguments[i][0] != '-') basePath_ = AddTrailingSlash(arguments[i]); else { if (arguments[i].Length() > 1) { switch (arguments[i][1]) { case 'c': compress_ = true; break; case 'q': quiet_ = true; break; default: ErrorExit("Unrecognized option"); } } } } } if (!isOutputMode) { if (!quiet_) PrintLine("Scanning directory " + dirName + " for files"); // Get the file list recursively Vector<String> fileNames; fileSystem_->ScanDir(fileNames, dirName, "*.*", SCAN_FILES, true); if (!fileNames.Size()) ErrorExit("No files found"); // Check for extensions to ignore for (unsigned i = fileNames.Size() - 1; i < fileNames.Size(); --i) { String extension = GetExtension(fileNames[i]); for (unsigned j = 0; j < ignoreExtensions_[j].Length(); ++j) { if (extension == ignoreExtensions_[j]) { fileNames.Erase(fileNames.Begin() + i); break; } } } for (unsigned i = 0; i < fileNames.Size(); ++i) ProcessFile(fileNames[i], dirName); WritePackageFile(packageName, dirName); } else { SharedPtr<PackageFile> packageFile(new PackageFile(context_, packageName)); bool outputCompressionRatio = false; switch (arguments[0][1]) { case 'i': PrintLine("Number of files: " + String(packageFile->GetNumFiles())); PrintLine("File data size: " + String(packageFile->GetTotalDataSize())); PrintLine("Package size: " + String(packageFile->GetTotalSize())); PrintLine("Checksum: " + String(packageFile->GetChecksum())); PrintLine("Compressed: " + String(packageFile->IsCompressed() ? "yes" : "no")); break; case 'L': if (!packageFile->IsCompressed()) ErrorExit("Invalid output option: -L is applicable for compressed package file only"); outputCompressionRatio = true; // Fallthrough case 'l': { const HashMap<String, PackageEntry>& entries = packageFile->GetEntries(); for (HashMap<String, PackageEntry>::ConstIterator i = entries.Begin(); i != entries.End();) { HashMap<String, PackageEntry>::ConstIterator current = i++; String fileEntry(current->first_); if (outputCompressionRatio) { unsigned compressedSize = (i == entries.End() ? packageFile->GetTotalSize() - sizeof(unsigned) : i->second_.offset_) - current->second_.offset_; fileEntry.AppendWithFormat("\tin: %u\tout: %u\tratio: %f", current->second_.size_, compressedSize, compressedSize ? 1.f * current->second_.size_ / compressedSize : 0.f); } PrintLine(fileEntry); } } break; default: ErrorExit("Unrecognized output option"); } } }
int UtcDaliVectorAsserts(void) { tet_infoline("Testing Dali::Vector< int* > exception handling"); // empty vector Vector< int* > pointervector; try { int* value = NULL; pointervector[ 1 ] = value; tet_printf("Assertion expected, but not occurred at %s\n", TEST_LOCATION ); tet_result(TET_FAIL); } catch(Dali::DaliException& e) { DALI_TEST_PRINT_ASSERT( e ); DALI_TEST_ASSERT( e, "VectorBase::mData", TEST_LOCATION ); } catch(...) { tet_printf("Assertion test failed - wrong Exception\n" ); tet_result(TET_FAIL); } try { int* value = NULL; value = pointervector[ 0 ]; (void)value; // to "use" the value tet_printf("Assertion expected, but not occurred at %s\n", TEST_LOCATION ); tet_result(TET_FAIL); } catch(Dali::DaliException& e) { DALI_TEST_PRINT_ASSERT( e ); DALI_TEST_ASSERT( e, "VectorBase::mData", TEST_LOCATION ); } catch(...) { tet_printf("Assertion test failed - wrong Exception\n" ); tet_result(TET_FAIL); } Vector< int* >::Iterator iter = pointervector.Begin(); if( iter != pointervector.End() ) { tet_result(TET_FAIL); } try { pointervector.Erase( pointervector.Begin() ); tet_printf("Assertion expected, but not occurred at %s\n", TEST_LOCATION ); tet_result(TET_FAIL); } catch(Dali::DaliException& e) { DALI_TEST_PRINT_ASSERT( e ); DALI_TEST_ASSERT( e, "(iterator < End()) && (iterator >= Begin())", TEST_LOCATION ); } catch(...) { tet_printf("Assertion test failed - wrong Exception\n" ); tet_result(TET_FAIL); } iter = pointervector.Begin(); if( iter != pointervector.End() ) { tet_result(TET_FAIL); } try { pointervector.Remove( pointervector.Begin() ); tet_printf("Assertion expected, but not occurred at %s\n", TEST_LOCATION ); tet_result(TET_FAIL); } catch(Dali::DaliException& e) { DALI_TEST_PRINT_ASSERT( e ); DALI_TEST_ASSERT( e, "(iterator < End()) && (iterator >= Begin())", TEST_LOCATION ); } catch(...) { tet_printf("Assertion test failed - wrong Exception\n" ); tet_result(TET_FAIL); } iter = pointervector.Begin(); if( iter != pointervector.End() ) { tet_result(TET_FAIL); } // reserve 0 space pointervector.Reserve( 0 ); iter = pointervector.Begin(); if( iter != pointervector.End() ) { tet_result(TET_FAIL); } // reserve 1 space pointervector.Reserve( 1 ); iter = pointervector.Begin(); if( iter != pointervector.End() ) { tet_result(TET_FAIL); } try { int* value = NULL; pointervector[ 1 ] = value; tet_printf("Assertion expected, but not occurred at %s\n", TEST_LOCATION ); tet_result(TET_FAIL); } catch(Dali::DaliException& e) { DALI_TEST_PRINT_ASSERT( e ); DALI_TEST_ASSERT( e, "index < VectorBase::Count()", TEST_LOCATION ); } catch(...) { tet_printf("Assertion test failed - wrong Exception\n" ); tet_result(TET_FAIL); } try { int* value = pointervector[ 1 ]; (void)value; // to "use" the value tet_printf("Assertion expected, but not occurred at %s\n", TEST_LOCATION ); tet_result(TET_FAIL); } catch(Dali::DaliException& e) { DALI_TEST_PRINT_ASSERT( e ); DALI_TEST_ASSERT( e, "index < VectorBase::Count()", TEST_LOCATION ); } catch(...) { tet_printf("Assertion test failed - wrong Exception\n" ); tet_result(TET_FAIL); } END_TEST; }
int UtcDaliVectorInsert01(void) { tet_infoline( "Testing Dali::Vector< int* >Insert(Iterator, Element)" ); // Test order of array inserted-into: Vector< unsigned int > orderedVector; orderedVector.PushBack( 9u ); for( unsigned int i = 8u; i <= 8u; --i ) { orderedVector.Insert( orderedVector.Begin(), i ); DALI_TEST_EQUALS( 10u - i, orderedVector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( i, orderedVector[0u], TEST_LOCATION ); } for( unsigned int i = 0u; i < 10u; ++i ) { DALI_TEST_EQUALS( i, orderedVector[i], TEST_LOCATION ); } // Test insertion out of range in non-empty array throws: try { orderedVector.Insert( orderedVector.Begin() + 99u, 99u ); tet_printf( "Assertion expected, but not occurred at %s\n", TEST_LOCATION ); tet_result( TET_FAIL ); } catch( Dali::DaliException& e ) { DALI_TEST_PRINT_ASSERT( e ); DALI_TEST_ASSERT( e, "( at <= End() ) && ( at >= Begin() )", TEST_LOCATION ); } catch( ... ) { tet_printf( "Assertion test failed - wrong Exception\n" ); tet_result( TET_FAIL ); } try { orderedVector.Insert( orderedVector.Begin() - 1u, 99u ); tet_printf( "Assertion expected, but not occurred at %s\n", TEST_LOCATION ); tet_result( TET_FAIL ); } catch( Dali::DaliException& e ) { DALI_TEST_PRINT_ASSERT( e ); DALI_TEST_ASSERT( e, "( at <= End() ) && ( at >= Begin() )", TEST_LOCATION ); } catch( ... ) { tet_printf( "Assertion test failed - wrong Exception\n" ); tet_result( TET_FAIL ); } // Test insertion part-way through a largish array retains ordering: // Build vector with hole in sequence: Vector< unsigned int > longerVector; const unsigned int insertionPoint = 131571u; const unsigned int finalLength = 262143u; for( unsigned int i = 0u; i < insertionPoint; ++i ) { longerVector.PushBack( i ); } for( unsigned int i = insertionPoint; i < finalLength; ++i ) { longerVector.PushBack( i + 1 ); } // Fill the hole in the sequence: longerVector.Insert( longerVector.Begin() + insertionPoint, insertionPoint ); // Check the sequence is monotonically increasing by one every time: for( unsigned int i = 0u; i <= finalLength; ++i ) { DALI_TEST_EQUALS( i, longerVector[i], TEST_LOCATION ); } // Insert into an empty vector Vector< unsigned int > vector; vector.Insert( vector.End(), orderedVector.Begin(), orderedVector.End() ); for( unsigned int i = 0u; i < 10u; ++i ) { DALI_TEST_EQUALS( i, vector[i], TEST_LOCATION ); } vector.Clear(); vector.Insert( vector.Begin(), orderedVector.Begin(), orderedVector.End() ); for( unsigned int i = 0u; i < 10u; ++i ) { DALI_TEST_EQUALS( i, vector[i], TEST_LOCATION ); } // Insert nothing. vector.Insert( vector.Begin(), orderedVector.Begin(), orderedVector.Begin() ); for( unsigned int i = 0u; i < 10u; ++i ) { DALI_TEST_EQUALS( i, vector[i], TEST_LOCATION ); } vector.Insert( vector.Begin() + 5, vector.Begin() + 5, vector.Begin() + 5 ); for( unsigned int i = 0u; i < 10u; ++i ) { DALI_TEST_EQUALS( i, vector[i], TEST_LOCATION ); } // AutoInsert vector.Clear(); vector.PushBack( 0u ); vector.PushBack( 1u ); vector.PushBack( 2u ); vector.PushBack( 3u ); vector.Insert( vector.Begin() + 2, vector.Begin(), vector.End() ); DALI_TEST_EQUALS( 8u, vector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( 0u, vector[0u], TEST_LOCATION ); DALI_TEST_EQUALS( 1u, vector[1u], TEST_LOCATION ); DALI_TEST_EQUALS( 0u, vector[2u], TEST_LOCATION ); DALI_TEST_EQUALS( 1u, vector[3u], TEST_LOCATION ); DALI_TEST_EQUALS( 2u, vector[4u], TEST_LOCATION ); DALI_TEST_EQUALS( 3u, vector[5u], TEST_LOCATION ); DALI_TEST_EQUALS( 2u, vector[6u], TEST_LOCATION ); DALI_TEST_EQUALS( 3u, vector[7u], TEST_LOCATION ); END_TEST; }
void LocalSlaveProcess::Open(const char *command, const char *envptr) { SVRLOG("LocalSlaveProcess::Open(" << command << ")"); Kill(); while(*command && (byte)*command <= ' ') command++; #ifdef PLATFORM_WIN32 HANDLE hOutputReadTmp, hInputRead; HANDLE hInputWriteTmp, hOutputWrite; HANDLE hErrorWrite; SECURITY_ATTRIBUTES sa; sa.nLength = sizeof(SECURITY_ATTRIBUTES); sa.lpSecurityDescriptor = NULL; sa.bInheritHandle = TRUE; HANDLE hp = GetCurrentProcess(); CreatePipe(&hOutputReadTmp, &hOutputWrite, &sa, 0); DuplicateHandle(hp, hOutputWrite, hp, &hErrorWrite, 0, TRUE, DUPLICATE_SAME_ACCESS); CreatePipe(&hInputRead, &hInputWriteTmp, &sa, 0); DuplicateHandle(hp, hOutputReadTmp, hp, &hOutputRead, 0, FALSE, DUPLICATE_SAME_ACCESS); DuplicateHandle(hp, hInputWriteTmp, hp, &hInputWrite, 0, FALSE, DUPLICATE_SAME_ACCESS); CloseHandle(hOutputReadTmp); CloseHandle(hInputWriteTmp); PROCESS_INFORMATION pi; STARTUPINFO si; ZeroMemory(&si, sizeof(STARTUPINFO)); si.cb = sizeof(STARTUPINFO); si.dwFlags = STARTF_USESTDHANDLES | STARTF_USESHOWWINDOW; si.wShowWindow = SW_HIDE; si.hStdInput = hInputRead; si.hStdOutput = hOutputWrite; si.hStdError = hErrorWrite; int n = (int)strlen(command) + 1; Buffer<char> cmd(n); memcpy(cmd, command, n); bool h = CreateProcess(NULL, cmd, &sa, &sa, TRUE, NORMAL_PRIORITY_CLASS, (void *)envptr, NULL, &si, &pi); SVRLOG("CreateProcess " << (h ? "succeeded" : "failed")); CloseHandle(hErrorWrite); CloseHandle(hInputRead); CloseHandle(hOutputWrite); if(h) { hProcess = pi.hProcess; CloseHandle(pi.hThread); } else { Free(); throw Exc(NFormat("Error running process: %s\nCommand: %s", GetErrorMessage(GetLastError()), command)); } #endif #ifdef PLATFORM_POSIX // parse command line for execve cmd_buf.Alloc(strlen(command) + 1); char *cmd_out = cmd_buf; const char *p = command; const char *b = p; while(*p && (byte)*p > ' ') if(*p++ == '\"') while(*p && *p++ != '\"') ; const char *app = cmd_out; args.Add(cmd_out); memcpy(cmd_out, b, p - b); cmd_out += p - b; *cmd_out++ = '\0'; while(*p) if((byte)*p <= ' ') p++; else { args.Add(cmd_out); while(*p && (byte)*p > ' ') if(*p == '\\') { if(*++p) *cmd_out++ = *p++; } else if(*p == '\"') { p++; while(*p && *p != '\"') if(*p == '\\') { if(*++p) *cmd_out++ = *p++; } else *cmd_out++ = *p++; if(*p == '\"') p++; } else *cmd_out++ = *p++; *cmd_out++ = '\0'; } args.Add(NULL); String app_full = GetFileOnPath(app, getenv("PATH"), true); if(IsNull(app_full)) throw Exc(Format("Cannot find executable '%s'\n", app)); if(pipe(rpipe) || pipe(wpipe)) throw Exc(NFormat(t_("pipe() error; error code = %d"), errno)); SVRLOG("\nLocalSlaveProcess::Open"); SVRLOG("rpipe[" << rpipe[0] << ", " << rpipe[1] << "]"); SVRLOG("wpipe[" << wpipe[0] << ", " << wpipe[1] << "]"); pid = fork(); SVRLOG("\tfork, pid = " << (int)pid << ", getpid = " << (int)getpid()); if(pid < 0) throw Exc(NFormat(t_("fork() error; error code = %d"), errno)); if(pid) { // parent process; clear child pipe endpoints SVRLOG("parent process - continue"); // rpipe[0] = wpipe[1] = -1; return; } SVRLOG("child process - execute application"); // rpipe[1] = wpipe[0] = -1; if(dup2(rpipe[0], 0) < 0) { // stdin SVRLOG("dup2(stdin) error: " << errno << ", " << strerror(errno)); } if(dup2(wpipe[1], 1) < 0) { // stdout SVRLOG("dup2(stdout) error: " << errno << ", " << strerror(errno)); } if(dup2(wpipe[1], 2) < 0) { // stderr SVRLOG("dup2(stderr) error: " << errno << ", " << strerror(errno)); } #if DO_SVRLOG SVRLOG(args.GetCount() << "arguments:"); for(int a = 0; a < args.GetCount(); a++) SVRLOG("[" << a << "]: <" << (args[a] ? args[a] : "NULL") << ">"); #endif//DO_SVRLOG SVRLOG("running execve, app = " << app << ", #args = " << args.GetCount()); const char *from = envptr; Vector<const char *> env; while(*from) { env.Add(from); from += strlen(from) + 1; } env.Add(NULL); execve(app_full, args.Begin(), (char *const *)env.Begin()); SVRLOG("execve failed, errno = " << errno); printf("Error running '%s', error code %d\n", command, errno); exit(-errno); #endif }
operator tchar *() { return v.Begin(); }
tchar *Begin() { return v.Begin(); }
operator const char *() const { return v.Begin(); }
const tchar *Begin() const { return v.Begin(); }
/*---------------------------------------------------------------------------------------------- Load data into the cache from the record set defined by hstmt, according to the specs in prgocs/cocs. Columns with m_icolID = 0 give properties of hvoBase. Load properties of at most crowMax objects; this may only be used if there is no vector property being loaded, since we could not be sure of having a complete record of the value of a vector without loading the next row. If crowMax is zero, load everything. Note: call from inside try/catch block; may throw exceptions. Note that prgocs[i] describes the column which ODBC indexes as [i+1]. ----------------------------------------------------------------------------------------------*/ void VwRsOdbcDa::Load(SQLHSTMT hstmt, OdbcColSpec * prgocs, int cocs, HVO hvoBase, int crowMax) { AssertArray(prgocs, cocs); Assert((uint)cocs <= (uint) 200); // limit because of size of rghvoBaseIds Assert(crowMax >= 0); ITsStrFactoryPtr qtsf; qtsf.CreateInstance(CLSID_TsStrFactory); ITsPropsFactoryPtr qtpf; qtpf.CreateInstance(CLSID_TsPropsFactory); // Block of variables for binary fields Vector<byte> vbData; // used to buffer data from binary fields const int kcbMaxData = 1000; // amount of binary data to read in one go byte rgbData[kcbMaxData]; // buffer for short binary data fields long cbData; // how many bytes in prgbData hold valid data byte * prgbData; // points to rgbData or vbData.Begin(), as appropriate // Similar block for Unicode text Vector<wchar> vchData; const int kcchMaxData = 1000; wchar rgchData[kcchMaxData]; long cchData; wchar * prgchData; Vector<HVO> vhvo; // accumulate objects for sequence property int nrows = 0; if (crowMax == 0) crowMax = INT_MAX; HVO rghvoBaseIds[200]; int icolVec = -1; // index of (one and only) column of type koctObjVec int hvoVecBase; // object that is base of vector property while (CheckSqlRc(SQLFetch(hstmt)) != SQL_NO_DATA) { // We have a record. for (int icol = 0; icol < cocs; icol++) { int nVal; HVO hvoVal; ITsStringPtr qtssVal; // TOxDO JohnT: fill this in... HVO hvoCurBase; // object whose property we will read. if (prgocs[icol].m_icolID == 0) hvoCurBase = hvoBase; else { // Must refer to a previous column; use <= because m_icolID is 1-based, so // if equal to i, it refers to the immediate previous column. Assert(prgocs[icol].m_icolID <= icol); hvoCurBase = rghvoBaseIds[prgocs[icol].m_icolID - 1]; } switch (prgocs[icol].m_oct) { default: Assert(false); ThrowHr(WarnHr(E_UNEXPECTED)); case koctInt: CheckSqlRc(SQLGetData(hstmt, (unsigned short)(icol + 1), SQL_C_SLONG, &nVal, 4, NULL)); CacheIntProp(hvoCurBase, prgocs[icol].m_tag, nVal); break; case koctUnicode: ReadUnicode(hstmt, icol + 1, rgchData, kcchMaxData, vchData, prgchData, cchData); CacheUnicodeProp(hvoCurBase, prgocs[icol].m_tag, prgchData, cchData); break; case koctString: case koctMlsAlt: case koctMltAlt: // Next column must give format; both are for the same property ReadUnicode(hstmt, icol + 1, rgchData, kcchMaxData, vchData, prgchData, cchData); if (koctMltAlt != prgocs[icol].m_oct) { Assert(icol < cocs - 1 && prgocs[icol + 1].m_oct == koctFmt); Assert(prgocs[icol].m_tag == prgocs[icol + 1].m_tag); // Leave the data in prgchData and cchData, to be processed next iteration // when we read the format. break; } // A MS alt without a FMT column, use the specified writing system both for the string // formatting and to indicate the alternative. CheckHr(qtsf->MakeStringRgch(prgchData, cchData, prgocs[icol].m_ws, &qtssVal)); CacheStringAlt(hvoCurBase, prgocs[icol].m_tag, prgocs[icol].m_ws, qtssVal); break; case koctFmt: // Previous column must be string or multistring; we have already checked same tag. Assert(icol > 0 && (prgocs[icol - 1].m_oct == koctString || prgocs[icol - 1].m_oct == koctMlsAlt)); ReadBinary(hstmt, icol + 1, rgbData, kcbMaxData, vbData, prgbData, cbData); int cbDataInt; cbDataInt = cbData; int cchDataInt; cchDataInt = cchData; if (cchDataInt == 0 && cbDataInt == 0) CheckHr(qtsf->MakeStringRgch(NULL, 0, prgocs[icol - 1].m_ws, &qtssVal)); else CheckHr(qtsf->DeserializeStringRgch(prgchData, &cchDataInt, prgbData, &cbDataInt, &qtssVal)); if (prgocs[icol - 1].m_oct == koctString) { CacheStringProp(hvoCurBase, prgocs[icol].m_tag, qtssVal); } else { CacheStringAlt(hvoCurBase, prgocs[icol].m_tag, prgocs[icol - 1].m_ws, qtssVal); } break; case koctObj: case koctBaseId: long nIndicator; CheckSqlRc(SQLGetData(hstmt, (unsigned short)(icol + 1), SQL_C_SLONG, &hvoVal, 4, &nIndicator)); // Treat null as zero. if (nIndicator == SQL_NULL_DATA) hvoVal = 0; if (prgocs[icol].m_oct == koctObj) CacheObjProp(hvoCurBase, prgocs[icol].m_tag, hvoVal); rghvoBaseIds[icol] = hvoVal; break; case koctObjVec: CheckSqlRc(SQLGetData(hstmt, (unsigned short)(icol + 1), SQL_C_SLONG, &hvoVal, 4, NULL)); rghvoBaseIds[icol] = hvoVal; // See if there has been a change in the base column, if so, record value and // start a new one. if (icolVec < 0) { // First iteration, ignore previous object icolVec = icol; hvoVecBase = hvoCurBase; } else { // Only one vector column allowed! Assert(icolVec == icol); if (hvoVecBase != hvoCurBase) { // Started a new vector! Record the old one CacheVecProp(hvoVecBase, prgocs[icolVec].m_tag, vhvo.Begin(), vhvo.Size()); // clear the list out and note new base object vhvo.Clear(); hvoVecBase = hvoCurBase; } } vhvo.Push(hvoVal); break; case koctTtp: ReadBinary(hstmt, icol + 1, rgbData, kcbMaxData, vbData, prgbData, cbData); if (cbData > 0) // otherwise field is null, cache nothing { cbDataInt = cbData; ITsTextPropsPtr qttp; qtpf->DeserializePropsRgb(prgbData, &cbDataInt, &qttp); CacheUnknown(hvoCurBase, prgocs[icol].m_tag, qttp); } break; } } // Stop if we have processed the requested number of rows. nrows++; if (nrows >= crowMax) break; } // If we are processing a vector, we need to fill in the last occurrence if (icolVec >= 0) { CacheVecProp(hvoVecBase, prgocs[icolVec].m_tag, vhvo.Begin(), vhvo.Size()); } }
void Main() { int n; string h = "tst"; Vector<string> key; for(int i = 0; i < 1000000; i++) { char h[50]; sprintf(h, "%.10f", (double) i / 7); key.Add(h); } std::random_shuffle(key.Begin(), key.End()); for(int i = 0; i < 5; i++) cout << key[i] << '\n'; Vector<String> skey, skey2; for(int i = 0; i < 1000000; i++) { skey.Add(key[i].c_str()); skey2.Add(key[i].c_str()); } /* cout << "ADDING ELEMENTS TO VECTOR\n\n"; StopTime(); for(n = 0; n < 100; n++) { vector<int> v; for(int i = 0; i < 50000; i++) v.push_back(i); } cout << "Adding 50000 elements to vector<int> done in " << StopTime() / 100 << " s\n"; StopTime(); for(n = 0; n < 100; n++) { Vector<int> v; for(int i = 0; i < 50000; i++) v.Add(i); } cout << "Adding 50000 elements to Vector<int> done in " << StopTime() / 100 << " s\n"; PrintLine(); StopTime(); for(n = 0; n < 10; n++) { vector<string> v; for(int i = 0; i < 50000; i++) v.push_back(key[i]); } cout << "Adding 50000 elements to vector<string> done in " << StopTime() / 10 << " s\n"; StopTime(); for(n = 0; n < 10; n++) { Vector<string> v; for(int i = 0; i < 50000; i++) v.Add(key[i]); } cout << "Adding 50000 elements to Vector<string> done in " << StopTime() / 10 << " s\n"; PrintLine(); StopTime(); for(n = 0; n < 1000; n++) { vector< vector<int> > v; for(int i = 0; i < 100; i++) { vector<int> x; for(int q = 0; q < 100; q++) x.push_back(q); v.push_back(x); } } cout << "Adding 100x100 elements to vector< vector<int> > done in " << StopTime() << " ms\n"; StopTime(); for(n = 0; n < 1000; n++) { vector< vector<int> > v; for(int i = 0; i < 100; i++) { vector<int> x; v.push_back(x); vector<int>& w = v.back(); for(int q = 0; q < 100; q++) w.push_back(q); } } cout << "Adding 100x100 elements to vector< vector<int> > (smart version) done in " << StopTime() << " ms\n"; StopTime(); for(n = 0; n < 1000; n++) { Vector< Vector<int> > v; for(int i = 0; i < 100; i++) { Vector<int>& w = v.Add(); for(int q = 0; q < 100; q++) w.Add(q); } } cout << "Adding 100x100 elements to Vector< Vector<int> > done in " << StopTime() << " ms\n"; PrintLine(); StopTime(); for(n = 0; n < 1000; n++) { vector< vector<string> > v; for(int i = 0; i < 100; i++) { vector<string> x; for(int q = 0; q < 100; q++) x.push_back(key[q]); v.push_back(x); } } cout << "Adding 100x100 elements to vector< vector<string> > done in " << StopTime() << " ms\n"; StopTime(); for(n = 0; n < 1000; n++) { vector< vector<string> > v; for(int i = 0; i < 100; i++) { vector<string> x; v.push_back(x); vector<string>& w = v.back(); for(int q = 0; q < 100; q++) w.push_back(key[q]); } } cout << "Adding 100x100 elements to vector< vector<string> > (smart version) done in " << StopTime() << " ms\n"; StopTime(); for(n = 0; n < 1000; n++) { Vector< Vector<string> > v; for(int i = 0; i < 100; i++) { Vector<string>& w = v.Add(); for(int q = 0; q < 100; q++) w.Add(key[q]); } } cout << "Adding 100x100 elements to Vector< Vector<string> > done in " << StopTime() << " ms\n"; PrintLine(); */ // return; /* StopTime(); { for(int i = 0; i < 1000; i++) { int i; deque<int> d; for(i = 0; i < 50000; i++) d.push_front(i); for(i = 0; i < 50000; i++) { d.pop_back(); d.push_front(i); } for(i = 0; i < 50000; i++) d.pop_back(); } } cout << "Passing 150000 elements through deque<int> done in " << StopTime() << " ms\n"; StopTime(); { for(int i = 0; i < 1000; i++) { int i; BiVector<int> d; for(i = 0; i < 50000; i++) d.AddHead(i); for(i = 0; i < 50000; i++) { d.DropTail(); d.AddHead(i); } for(i = 0; i < 50000; i++) d.DropTail(); } } cout << "Passing 150000 elements through BiVector<int> done in " << StopTime() << " ms\n"; */ cout << "\n\nMAP BENCHMARKS\n\n"; /* { Vector<int> key; for(int i = 0; i < 1000000; i++) key.Add(i); std::random_shuffle(key.Begin(), key.End()); cout << "\nVectorMap<int, int> benchmark "; benchmark_VectorMap(key, 1000000); cout << "\nmap<int, int> benchmark "; benchmark_map(key, 1000000); PrintLine(); cout << "\nVectorMap<int, int> benchmark "; benchmark_VectorMap(key, 100000); cout << "\nmap<int, int> benchmark "; benchmark_map(key, 100000); } PrintLine(); */ { cout << "\nVectorMap<string, int> benchmark "; benchmark_VectorMap(key, key, 1000000); cout << "\nVectorMap<String, int> benchmark "; benchmark_VectorMap(skey, skey2, 1000000); cout << "\nmap<string, int> benchmark "; benchmark_map(key, 1000000); PrintLine(); cout << "\nVectorMap<string, int> benchmark "; benchmark_VectorMap(key, key, 100000); cout << "\nVectorMap<String, int> benchmark "; benchmark_VectorMap(skey, skey2, 100000); cout << "\nmap<string, int> benchmark "; benchmark_map(key, 100000); } getchar(); }
/*---------------------------------------------------------------------------------------------- Get all the MONOlingual formatted string fields in the database. The standard metadata cache does not provide the information we need, so we find all the class/field pairs whose values are String or BigString. @param podc Pointer to data base command object. @param vstuClass Reference to output class names (parallel to vstuField). @param vstuField Reference to output field names. @param rgnFlidsToIgnore array of flids that we don't want to put in output (defaults to empty) @param cFlidsToIgnore size of rgnFlidsToIgnore ----------------------------------------------------------------------------------------------*/ void DbStringCrawler::GetFieldsForTypes(IOleDbCommand * podc, const int * rgnTypes, int cTypes, Vector<StrUni> & vstuClass, Vector<StrUni> & vstuField, const int * rgflidToIgnore, const int cflidToIgnore) { AssertPtr(podc); AssertArray(rgnTypes, cTypes); AssertArray(rgflidToIgnore, cflidToIgnore); if (!cTypes) return; StrUni stuCmd; ComBool fMoreRows; ComBool fIsNull; unsigned long cbSpaceTaken; Vector<wchar> vchFieldName; Vector<wchar> vchClassName; int cchFieldName; int cchClassName; StrUni stu; stuCmd.Assign(L"SELECT f.Name, c.Name FROM Field$ f" L" JOIN Class$ c ON c.id = f.Class WHERE f.Type IN ("); for (int i = 0; i < cTypes; ++i) { if (i != 0) stuCmd.FormatAppend(L",%d", rgnTypes[i]); else stuCmd.FormatAppend(L"%d", rgnTypes[i]); } stuCmd.Append(L")"); if (cflidToIgnore) { stuCmd.Append(L" AND NOT f.Id IN ("); for (int i = 0; i < cflidToIgnore; ++i) { if (i != 0) stuCmd.FormatAppend(L",%d", rgflidToIgnore[i]); else stuCmd.FormatAppend(L"%d", rgflidToIgnore[i]); } stuCmd.Append(L")"); } CheckHr(podc->ExecCommand(stuCmd.Bstr(), knSqlStmtSelectWithOneRowset)); CheckHr(podc->GetRowset(0)); CheckHr(podc->NextRow(&fMoreRows)); vchFieldName.Resize(100); vchClassName.Resize(100); while (fMoreRows) { CheckHr(podc->GetColValue(1, reinterpret_cast<BYTE *>(vchFieldName.Begin()), vchFieldName.Size() * isizeof(wchar), &cbSpaceTaken, &fIsNull, 0)); cchFieldName = cbSpaceTaken / isizeof(wchar); Assert(cbSpaceTaken == cchFieldName * sizeof(wchar)); if (cchFieldName >= vchFieldName.Size()) { vchFieldName.Resize(cchFieldName + 1); CheckHr(podc->GetColValue(1, reinterpret_cast<BYTE *>(vchFieldName.Begin()), vchFieldName.Size() * isizeof(wchar), &cbSpaceTaken, &fIsNull, 0)); cchFieldName = cbSpaceTaken / isizeof(wchar); Assert(cchFieldName < vchFieldName.Size()); } CheckHr(podc->GetColValue(2, reinterpret_cast<BYTE *>(vchClassName.Begin()), vchClassName.Size() * isizeof(wchar), &cbSpaceTaken, &fIsNull, 0)); cchClassName = cbSpaceTaken / isizeof(wchar); Assert(cbSpaceTaken == cchClassName * sizeof(wchar)); if (cchClassName >= vchClassName.Size()) { vchClassName.Resize(cchClassName + 1); CheckHr(podc->GetColValue(2, reinterpret_cast<BYTE *>(vchClassName.Begin()), vchClassName.Size() * isizeof(wchar), &cbSpaceTaken, &fIsNull, 0)); cchClassName = cbSpaceTaken / isizeof(wchar); Assert(cchClassName < vchClassName.Size()); } stu.Assign(vchFieldName.Begin(), cchFieldName); vstuField.Push(stu); stu.Assign(vchClassName.Begin(), cchClassName); vstuClass.Push(stu); CheckHr(podc->NextRow(&fMoreRows)); } }
int UtcDaliVectorDoubleRemove(void) { tet_infoline("Testing Dali::Vector<double>::Remove"); Vector< double > vector; DALI_TEST_EQUALS( ZERO, vector.Count(), TEST_LOCATION ); vector.PushBack( 11.1 ); vector.PushBack( 22.2 ); vector.PushBack( 33.3 ); vector.PushBack( 44.4 ); DALI_TEST_EQUALS( static_cast<Dali::VectorBase::SizeType>(4), vector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( vector[ 0 ], 11.1, TEST_LOCATION ); DALI_TEST_EQUALS( vector[ 1 ], 22.2, TEST_LOCATION ); DALI_TEST_EQUALS( vector[ 2 ], 33.3, TEST_LOCATION ); DALI_TEST_EQUALS( vector[ 3 ], 44.4, TEST_LOCATION ); Vector< double >::Iterator res = std::find( vector.Begin(), vector.End(), 22.2 ); DALI_TEST_EQUALS( 22.2, *res, TEST_LOCATION ); vector.Remove( res ); res = std::find( vector.Begin(), vector.End(), 22.2 ); DALI_TEST_EQUALS( vector.End(), res, TEST_LOCATION ); DALI_TEST_EQUALS( static_cast<Dali::VectorBase::SizeType>(3), vector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( vector[ 0 ], 11.1, TEST_LOCATION ); DALI_TEST_EQUALS( vector[ 1 ], 44.4, TEST_LOCATION ); DALI_TEST_EQUALS( vector[ 2 ], 33.3, TEST_LOCATION ); vector.Remove( vector.End() - 1 ); DALI_TEST_EQUALS( static_cast<Dali::VectorBase::SizeType>(2), vector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( vector[ 0 ], 11.1, TEST_LOCATION ); DALI_TEST_EQUALS( vector[ 1 ], 44.4, TEST_LOCATION ); vector.Remove( vector.Begin() ); DALI_TEST_EQUALS( static_cast<Dali::VectorBase::SizeType>(1), vector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( vector[ 0 ], 44.4, TEST_LOCATION ); try { // illegal erase, one past the end vector.Remove( vector.Begin() + 1 ); tet_result(TET_FAIL); } catch( Dali::DaliException& e ) { DALI_TEST_PRINT_ASSERT( e ); DALI_TEST_ASSERT( e, "(iterator < End()) && (iterator >= Begin())", TEST_LOCATION ); } catch( ... ) { tet_printf("Assertion test failed - wrong Exception\n" ); tet_result(TET_FAIL); } DALI_TEST_EQUALS( static_cast<Dali::VectorBase::SizeType>(1), vector.Count(), TEST_LOCATION ); DALI_TEST_EQUALS( vector[ 0 ], 44.4, TEST_LOCATION ); vector.Remove( vector.Begin() ); DALI_TEST_EQUALS( ZERO, vector.Count(), TEST_LOCATION ); try { // illegal erase, one before the beginning vector.Remove( vector.Begin() - 1 ); tet_result(TET_FAIL); } catch( Dali::DaliException& e ) { DALI_TEST_PRINT_ASSERT( e ); DALI_TEST_ASSERT( e, "(iterator < End()) && (iterator >= Begin())", TEST_LOCATION ); } catch( ... ) { tet_printf("Assertion test failed - wrong Exception\n" ); tet_result(TET_FAIL); } END_TEST; }