void HashMapTest::testClear() { HashMap<int, std::string> hashMap; hashMap.put(1, "one"); hashMap.put(3, "three"); hashMap.put(2, "two"); hashMap.clear(); CPPUNIT_ASSERT_EQUAL_MESSAGE("Clear failed to reset size", 0, hashMap.size()); for (int i = 0; i < 125; i++) { CPPUNIT_ASSERT_THROW_MESSAGE( "Failed to clear all elements", hashMap.get(i), NoSuchElementException); } // Check clear on a large loaded map of Integer keys HashMap<int, std::string> map; for (int i = -32767; i < 32768; i++) { map.put(i, "foobar"); } map.clear(); CPPUNIT_ASSERT_EQUAL_MESSAGE("Failed to reset size on large integer map", 0, map.size()); for (int i = -32767; i < 32768; i++) { CPPUNIT_ASSERT_THROW_MESSAGE( "Failed to clear all elements", map.get(i), NoSuchElementException); } }
static void setLiveValues(HashMap<Node*, AbstractValue>& values, HashSet<Node*>& live) { values.clear(); HashSet<Node*>::iterator iter = live.begin(); HashSet<Node*>::iterator end = live.end(); for (; iter != end; ++iter) values.add(*iter, AbstractValue()); }
void WebNotificationManagerProxy::populateCopyOfNotificationPermissions(HashMap<String, bool>& permissions) { RefPtr<ImmutableDictionary> knownPermissions = m_provider.notificationPermissions(); permissions.clear(); RefPtr<ImmutableArray> knownOrigins = knownPermissions->keys(); for (size_t i = 0; i < knownOrigins->size(); ++i) { WebString* origin = knownOrigins->at<WebString>(i); permissions.set(origin->string(), knownPermissions->get<WebBoolean>(origin->string())->value()); } }
void testClear () { map->put ((char*)"one", 1); CPPUNIT_ASSERT_EQUAL (map->empty (), false); map->clear (); CPPUNIT_ASSERT_EQUAL (map->empty (), true); }
void HashMapTest::testIsEmpty() { HashMap<int, std::string> hashMap; CPPUNIT_ASSERT_MESSAGE("Returned false for new map", hashMap.isEmpty()); hashMap.put(1, "1"); CPPUNIT_ASSERT_MESSAGE("Returned true for non-empty", !hashMap.isEmpty()); hashMap.clear(); CPPUNIT_ASSERT_MESSAGE("Returned false for cleared map", hashMap.isEmpty()); }
int main() { SString *inicial = new SString(NULL, '\0'); while(true) { cin.getline(lectura, 1010); int tam = strlen(lectura); int act = 0; for(int i = 0; i < tam + 1; i++) { if(lectura[i] != ' ') { tams[act] = 0; lecturaSE[act++] = lectura[i]; } } tam = strlen(lecturaSE); if(tam == 0) return 0; tabla.clear(); for(int i = 0; i < tam; i++) { SString *anterior = inicial; for(int j = i; j < tam; j++) { SString nuevo(anterior, lecturaSE[j]); SString **enTabla = tabla.lookupKey(&nuevo); if(enTabla == NULL) { SString *nuevaEntrada = new SString(nuevo.anterior, nuevo.ultima); tabla[nuevaEntrada] = 1; anterior = nuevaEntrada; } else { int nuevoV = ++tabla[*enTabla]; anterior = *enTabla; if(nuevoV > tams[j - i + 1]) tams[j - i + 1] = nuevoV; } } } for(int i = 0; i < tam; i++) { if(tams[i] > 1) cout << tams[i] << endl; } cout << endl; } }
void DOMWindowExtensionBasic::willDestroyPage(WKBundleRef, WKBundlePageRef) { HashMap<WKBundleDOMWindowExtensionRef, int>::iterator it = m_extensionToRecordMap.begin(); HashMap<WKBundleDOMWindowExtensionRef, int>::iterator end = m_extensionToRecordMap.end(); for (; it != end; ++it) { updateExtensionStateRecord(it->key, Removed); WKRelease(it->key); } m_extensionToRecordMap.clear(); sendExtensionStateMessage(); sendBundleMessage("TestComplete"); }
vector<ServerDescriptor> ServerFactory::getServerFromRegistry( const string& application, const string& serverName, string& result) { LOG->debug()<<"get server from registry ["<< application <<"."<<serverName<<"]"<<endl; vector<ServerDescriptor> vServerDescriptor; try { RegistryPrx _pRegistryPrx = AdminProxy::getInstance()->getRegistryProxy(); if(!_pRegistryPrx) { result = "coun't get the proxy of registry."; LOG->error()<<result<< endl; return vServerDescriptor; } vServerDescriptor =_pRegistryPrx->getServers( application, serverName,_tPlatformInfo.getNodeName()); //清空cache if( vServerDescriptor.size()> 0 && application == "" && serverName == "") { g_serverInfoHashmap.clear(); LOGINFO("hashmap clear ok "<<endl); } //重置cache for(unsigned i = 0; i < vServerDescriptor.size(); i++) { ServerInfo tServerInfo; tServerInfo.application = vServerDescriptor[i].application; tServerInfo.serverName = vServerDescriptor[i].serverName; g_serverInfoHashmap.set(tServerInfo,vServerDescriptor[i]); LOGINFO("hashmap set ok "<<tServerInfo.application<<"."<<tServerInfo.serverName<<endl); } } catch (exception &e) { result = "ServerFactory::getServerFromRegistry exception:"; result = result + e.what(); LOG->error()<<result<< endl; } return vServerDescriptor; }
int main () { #ifndef ONLINE_JUDGE freopen ("in.txt", "r", stdin); freopen ("out.txt", "w", stdout); #endif for (;;) { map.clear(); int len, n; bool ans = false; scanf("%d\n", &len); if(len == -1) return 0; scanf("%d\n", &n); for (int i = 0; i < n; ++i) { int left, right; char s[20]; scanf("%d %d %s\n", &left, &right, s); if(ans) continue; if(left > right) swap(left, right); if(! add(left, right, s[0]=='o')) { ans = true; printf("%d\n", i); } } if(!ans) printf("%d\n", n); } return 0; }
String HeapSnapshotBuilder::json(std::function<bool (const HeapSnapshotNode&)> allowNodeCallback) { VM& vm = m_profiler.vm(); DeferGCForAWhile deferGC(vm.heap); // Build a node to identifier map of allowed nodes to use when serializing edges. HashMap<JSCell*, unsigned> allowedNodeIdentifiers; // Build a list of used class names. HashMap<const char*, unsigned> classNameIndexes; classNameIndexes.set("<root>", 0); unsigned nextClassNameIndex = 1; // Build a list of used edge names. HashMap<UniquedStringImpl*, unsigned> edgeNameIndexes; unsigned nextEdgeNameIndex = 0; StringBuilder json; auto appendNodeJSON = [&] (const HeapSnapshotNode& node) { // Let the client decide if they want to allow or disallow certain nodes. if (!allowNodeCallback(node)) return; allowedNodeIdentifiers.set(node.cell, node.identifier); auto result = classNameIndexes.add(node.cell->classInfo()->className, nextClassNameIndex); if (result.isNewEntry) nextClassNameIndex++; unsigned classNameIndex = result.iterator->value; bool isInternal = false; if (!node.cell->isString()) { Structure* structure = node.cell->structure(vm); isInternal = !structure || !structure->globalObject(); } // <nodeId>, <sizeInBytes>, <className>, <optionalInternalBoolean> json.append(','); json.appendNumber(node.identifier); json.append(','); json.appendNumber(node.cell->estimatedSizeInBytes()); json.append(','); json.appendNumber(classNameIndex); json.append(','); json.append(isInternal ? '1' : '0'); }; bool firstEdge = true; auto appendEdgeJSON = [&] (const HeapSnapshotEdge& edge) { if (!firstEdge) json.append(','); firstEdge = false; // <fromNodeId>, <toNodeId>, <edgeTypeIndex>, <edgeExtraData> json.appendNumber(edge.from.identifier); json.append(','); json.appendNumber(edge.to.identifier); json.append(','); json.appendNumber(edgeTypeToNumber(edge.type)); json.append(','); switch (edge.type) { case EdgeType::Property: case EdgeType::Variable: { auto result = edgeNameIndexes.add(edge.u.name, nextEdgeNameIndex); if (result.isNewEntry) nextEdgeNameIndex++; unsigned edgeNameIndex = result.iterator->value; json.appendNumber(edgeNameIndex); break; } case EdgeType::Index: json.appendNumber(edge.u.index); break; default: // No data for this edge type. json.append('0'); break; } }; json.append('{'); // version json.appendLiteral("\"version\":1"); // nodes json.append(','); json.appendLiteral("\"nodes\":"); json.append('['); json.appendLiteral("0,0,0,0"); // <root> for (HeapSnapshot* snapshot = m_profiler.mostRecentSnapshot(); snapshot; snapshot = snapshot->previous()) { for (auto& node : snapshot->m_nodes) appendNodeJSON(node); } json.append(']'); // node class names json.append(','); json.appendLiteral("\"nodeClassNames\":"); json.append('['); Vector<const char *> orderedClassNames(classNameIndexes.size()); for (auto& entry : classNameIndexes) orderedClassNames[entry.value] = entry.key; classNameIndexes.clear(); bool firstClassName = true; for (auto& className : orderedClassNames) { if (!firstClassName) json.append(','); firstClassName = false; json.appendQuotedJSONString(className); } orderedClassNames.clear(); json.append(']'); // Process edges. // Replace pointers with identifiers. // Remove any edges that we won't need. m_edges.removeAllMatching([&] (HeapSnapshotEdge& edge) { // If the from cell is null, this means a <root> edge. if (!edge.from.cell) edge.from.identifier = 0; else { auto fromLookup = allowedNodeIdentifiers.find(edge.from.cell); if (fromLookup == allowedNodeIdentifiers.end()) return true; edge.from.identifier = fromLookup->value; } if (!edge.to.cell) edge.to.identifier = 0; else { auto toLookup = allowedNodeIdentifiers.find(edge.to.cell); if (toLookup == allowedNodeIdentifiers.end()) return true; edge.to.identifier = toLookup->value; } return false; }); allowedNodeIdentifiers.clear(); m_edges.shrinkToFit(); // Sort edges based on from identifier. std::sort(m_edges.begin(), m_edges.end(), [&] (const HeapSnapshotEdge& a, const HeapSnapshotEdge& b) { return a.from.identifier < b.from.identifier; }); // edges json.append(','); json.appendLiteral("\"edges\":"); json.append('['); for (auto& edge : m_edges) appendEdgeJSON(edge); json.append(']'); // edge types json.append(','); json.appendLiteral("\"edgeTypes\":"); json.append('['); json.appendQuotedJSONString(edgeTypeToString(EdgeType::Internal)); json.append(','); json.appendQuotedJSONString(edgeTypeToString(EdgeType::Property)); json.append(','); json.appendQuotedJSONString(edgeTypeToString(EdgeType::Index)); json.append(','); json.appendQuotedJSONString(edgeTypeToString(EdgeType::Variable)); json.append(']'); // edge names json.append(','); json.appendLiteral("\"edgeNames\":"); json.append('['); Vector<UniquedStringImpl*> orderedEdgeNames(edgeNameIndexes.size()); for (auto& entry : edgeNameIndexes) orderedEdgeNames[entry.value] = entry.key; edgeNameIndexes.clear(); bool firstEdgeName = true; for (auto& edgeName : orderedEdgeNames) { if (!firstEdgeName) json.append(','); firstEdgeName = false; json.appendQuotedJSONString(edgeName); } orderedEdgeNames.clear(); json.append(']'); json.append('}'); return json.toString(); }
void hashMapInt() { HashMap<int, int> m; //insert assert("HashMap::insert()", m.insert(2, 5).second); assert("HashMap::insert()", m.insert(3, 6).second); assert("HashMap::insert()", m.insert(4, 7).second); assert("HashMap::size()", m.size() == 3); //dupe insert Pair<HashMap<int, int>::Iterator, bool> res = m.insert(4, 8); assert("HashMap::insert()", !res.second); assert("HashMap::insert()", res.first != m.end()); assert("HashMap::insert()", res.first->first == 4); assert("HashMap::insert()", res.first->second == 7); //find HashMap<int, int>::Iterator itr = m.find(3); assert("HashMap::find()", itr != m.end()); assert("HashMap::find()", itr->first == 3); assert("HashMap::find()", itr->second == 6); //iterate itr = m.begin(); assert("HashMap::begin()", itr != m.end()); ++itr; itr++; ++itr; assert("HashMap::Iterator", itr == m.end()); //const iterator HashMap<int, int>::ConstIterator citr = m.find(2); assert("cHashMap::find()", citr != m.end()); assert("cHashMap::find()", citr->first == 2); assert("cHashMap::find()", citr->second == 5); citr = m.begin(); assert("cHashMap::begin()", citr != m.end()); ++citr; citr++; ++citr; assert("cHashMap::Iterator", citr == m.end()); //erase assert("HashMap::erase()", m.erase(3)); assert("HashMap::erase()", m.size() == 2); //copy constuctor HashMap<int, int> copy(m); assert("HashMap::HashMap(HashMap)", copy.size() == 2); //erase by iterator m.erase(m.find(2)); assert("HashMap::erase(Iterator)", m.size() == 1); //clear m.clear(); assert("HashMap::clear()", m.size() == 0); itr = m.find(4); assert("HashMap::clear()", itr == m.end()); assert("HashMap::clear()", m.begin() == m.end()); //assignment operator m = copy; assert("HashMap::operator=", m.size() == 2); //square bracket operator m[9] = 10; assert("HashMap::operator[]", m[2] == 5); }
void hashMap() { HashMap<String, String> m; //insert assert("HashMap::insert()", m.insert("foo", "F00").second); assert("HashMap::insert()", m.insert("bar", "B4R").second); assert("HashMap::insert()", m.insert("baz", "B4Z").second); assert("HashMap::size()", m.size() == 3); //dupe insert Pair<HashMap<String, String>::Iterator, bool> res = m.insert("foo", "whatev"); assert("HashMap::insert()", !res.second); assert("HashMap::insert()", res.first != m.end()); assert("HashMap::insert()", res.first->first == "foo"); assert("HashMap::insert()", res.first->second == "F00"); //find HashMap<String, String>::Iterator itr = m.find("bar"); assert("HashMap::find()", itr != m.end()); assert("HashMap::find()", itr->first == "bar"); assert("HashMap::find()", itr->second == "B4R"); //iterate itr = m.begin(); assert("HashMap::begin()", itr != m.end()); ++itr; itr++; ++itr; assert("HashMap::Iterator", itr == m.end()); //const iterator HashMap<String, String>::ConstIterator citr = m.find("baz"); assert("cHashMap::find()", citr != m.end()); assert("cHashMap::find()", citr->first == "baz"); assert("cHashMap::find()", citr->second == "B4Z"); citr = m.begin(); assert("cHashMap::begin()", citr != m.end()); ++citr; citr++; ++citr; assert("cHashMap::Iterator", citr == m.end()); //erase assert("HashMap::erase()", m.erase("bar")); assert("HashMap::erase()", m.size() == 2); //copy constuctor HashMap<String, String> copy(m); assert("HashMap::HashMap(HashMap)", copy.size() == 2); //erase by iterator m.erase(m.find("baz")); assert("HashMap::erase(Iterator)", m.size() == 1); //clear m.clear(); assert("HashMap::clear()", m.size() == 0); itr = m.find("baz"); assert("HashMap::clear()", itr == m.end()); assert("HashMap::clear()", m.begin() == m.end()); //assignment operator m = copy; assert("HashMap::operator=", m.size() == 2); //square bracket operator m["norg"] = "N07G"; assert("HashMap::operator[]", m["foo"] == "F00"); }
bool fixSSA(Procedure& proc) { PhaseScope phaseScope(proc, "fixSSA"); // Collect the stack "variables". If there aren't any, then we don't have anything to do. // That's a fairly common case. HashMap<StackSlotValue*, Type> stackVariable; for (Value* value : proc.values()) { if (StackSlotValue* stack = value->as<StackSlotValue>()) { if (stack->kind() == StackSlotKind::Anonymous) stackVariable.add(stack, Void); } } if (stackVariable.isEmpty()) return false; // Make sure that we know how to optimize all of these. We only know how to handle Load and // Store on anonymous variables. for (Value* value : proc.values()) { auto reject = [&] (Value* value) { if (StackSlotValue* stack = value->as<StackSlotValue>()) stackVariable.remove(stack); }; auto handleAccess = [&] (Value* access, Type type) { StackSlotValue* stack = access->lastChild()->as<StackSlotValue>(); if (!stack) return; if (value->as<MemoryValue>()->offset()) { stackVariable.remove(stack); return; } auto result = stackVariable.find(stack); if (result == stackVariable.end()) return; if (result->value == Void) { result->value = type; return; } if (result->value == type) return; stackVariable.remove(result); }; switch (value->opcode()) { case Load: // We're OK with loads from stack variables at an offset of zero. handleAccess(value, value->type()); break; case Store: // We're OK with stores to stack variables, but not storing stack variables. reject(value->child(0)); handleAccess(value, value->child(0)->type()); break; default: for (Value* child : value->children()) reject(child); break; } } Vector<StackSlotValue*> deadValues; for (auto& entry : stackVariable) { if (entry.value == Void) deadValues.append(entry.key); } for (StackSlotValue* deadValue : deadValues) { deadValue->replaceWithNop(); stackVariable.remove(deadValue); } if (stackVariable.isEmpty()) return false; // We know that we have variables to optimize, so do that now. breakCriticalEdges(proc); SSACalculator ssa(proc); // Create a SSACalculator::Variable for every stack variable. Vector<StackSlotValue*> variableToStack; HashMap<StackSlotValue*, SSACalculator::Variable*> stackToVariable; for (auto& entry : stackVariable) { StackSlotValue* stack = entry.key; SSACalculator::Variable* variable = ssa.newVariable(); RELEASE_ASSERT(variable->index() == variableToStack.size()); variableToStack.append(stack); stackToVariable.add(stack, variable); } // Create Defs for all of the stores to the stack variable. for (BasicBlock* block : proc) { for (Value* value : *block) { if (value->opcode() != Store) continue; StackSlotValue* stack = value->child(1)->as<StackSlotValue>(); if (!stack) continue; if (SSACalculator::Variable* variable = stackToVariable.get(stack)) ssa.newDef(variable, block, value->child(0)); } } // Decide where Phis are to be inserted. This creates them but does not insert them. ssa.computePhis( [&] (SSACalculator::Variable* variable, BasicBlock* block) -> Value* { StackSlotValue* stack = variableToStack[variable->index()]; Value* phi = proc.add<Value>(Phi, stackVariable.get(stack), stack->origin()); if (verbose) { dataLog( "Adding Phi for ", pointerDump(stack), " at ", *block, ": ", deepDump(proc, phi), "\n"); } return phi; }); // Now perform the conversion. InsertionSet insertionSet(proc); HashMap<StackSlotValue*, Value*> mapping; for (BasicBlock* block : proc.blocksInPreOrder()) { mapping.clear(); for (auto& entry : stackToVariable) { StackSlotValue* stack = entry.key; SSACalculator::Variable* variable = entry.value; SSACalculator::Def* def = ssa.reachingDefAtHead(block, variable); if (def) mapping.set(stack, def->value()); } for (SSACalculator::Def* phiDef : ssa.phisForBlock(block)) { StackSlotValue* stack = variableToStack[phiDef->variable()->index()]; insertionSet.insertValue(0, phiDef->value()); mapping.set(stack, phiDef->value()); } for (unsigned valueIndex = 0; valueIndex < block->size(); ++valueIndex) { Value* value = block->at(valueIndex); value->performSubstitution(); switch (value->opcode()) { case Load: { if (StackSlotValue* stack = value->child(0)->as<StackSlotValue>()) { if (Value* replacement = mapping.get(stack)) value->replaceWithIdentity(replacement); } break; } case Store: { if (StackSlotValue* stack = value->child(1)->as<StackSlotValue>()) { if (stackToVariable.contains(stack)) { mapping.set(stack, value->child(0)); value->replaceWithNop(); } } break; } default: break; } } unsigned upsilonInsertionPoint = block->size() - 1; Origin upsilonOrigin = block->last()->origin(); for (BasicBlock* successorBlock : block->successorBlocks()) { for (SSACalculator::Def* phiDef : ssa.phisForBlock(successorBlock)) { Value* phi = phiDef->value(); SSACalculator::Variable* variable = phiDef->variable(); StackSlotValue* stack = variableToStack[variable->index()]; Value* mappedValue = mapping.get(stack); if (verbose) { dataLog( "Mapped value for ", *stack, " with successor Phi ", *phi, " at end of ", *block, ": ", pointerDump(mappedValue), "\n"); } if (!mappedValue) mappedValue = insertionSet.insertBottom(upsilonInsertionPoint, phi); insertionSet.insert<UpsilonValue>( upsilonInsertionPoint, upsilonOrigin, mappedValue, phi); } } insertionSet.execute(block); } // Finally, kill the stack slots. for (StackSlotValue* stack : variableToStack) stack->replaceWithNop(); if (verbose) { dataLog("B3 after SSA conversion:\n"); dataLog(proc); } return true; }
/*! \brief Get simple DOI identifier from full DOI content ID string. \param contentID input, full DOI contentID string. \param doi output, simple DOI identifier. \returns Boolean indicating success or failure. */ bool OsmsOpenIPMPMessenger::GetDOIFromContentID(const std::string& contentID, std::string& doiID) { XMLDocument pIPMP_ContentId; if (pIPMP_ContentId.decode(contentID.data(), "IPMP_ContentIdentity") == false) { return false; } HashMap* identifier = pIPMP_ContentId.getDocList("Identifier"); if (identifier == 0) { return false; } if (identifier->size() == 0) { identifier->clear(); delete identifier; return false; } XMLDocument* node = (XMLDocument*)identifier->first(); if (node == 0) { for (int j = 1; j < identifier->size(); j++) { node = (XMLDocument*)identifier->next(); if (node != 0) delete node; } identifier->clear(); delete identifier; return false; } char* identifierType = node->getString("IdentifierType"); char* identifierValue = node->getString("IdentifierValue"); if ((identifierType == 0) || (identifierValue == 0)) { delete node; for (int j = 1; j < identifier->size(); j++) { node = (XMLDocument*)identifier->next(); if (node != 0) delete node; } identifier->clear(); delete identifier; return false; } if (strcmp(identifierType, "DOI") == 0) { doiID = identifierValue; delete[] identifierType; delete[] identifierValue; delete node; for (int j = 1; j < identifier->size(); j++) { node = (XMLDocument*)identifier->next(); if (node != 0) delete node; } identifier->clear(); delete identifier; return true; } delete[] identifierType; delete[] identifierValue; delete node; for (int i = 1; i < identifier->size(); i++) { node = (XMLDocument*)identifier->next(); if (node == 0) { for (int j = (i + 1); j < identifier->size(); j++) { node = (XMLDocument*)identifier->next(); if (node != 0) delete node; } identifier->clear(); delete identifier; return false; } char* identifierType = node->getString("IdentifierType"); char* identifierValue = node->getString("IdentifierValue"); if ((identifierType == 0) || (identifierValue == 0)) { delete node; for (int j = (i + 1); j < identifier->size(); j++) { node = (XMLDocument*)identifier->next(); if (node != 0) delete node; } identifier->clear(); delete identifier; return false; } if (strcmp(identifierType, "DOI") == 0) { doiID = identifierValue; delete[] identifierType; delete[] identifierValue; delete node; for (int j = (i + 1); j < identifier->size(); j++) { node = (XMLDocument*)identifier->next(); if (node != 0) delete node; } identifier->clear(); delete identifier; return true; } delete[] identifierType; delete[] identifierValue; delete node; } identifier->clear(); delete identifier; return false; }
int FayyadMdlDiscretizer::_findBestSplitPoint(int start, int size, double& informationGain) { assert(start + size <= (int)_classes->size()); assert(start >= 0); const std::vector<double>& values = *_values; const std::vector<int>& classes = *_classes; HashMap<int, int> frequency; HashMap<int, int>::const_iterator it; double log2 = log(2.0); int cnt = 0; for (int i = start; i < start + size; i++) { int seqClasses = 1; it = frequency.find(classes[i]); if (it == frequency.end()) { frequency[classes[i]] = seqClasses; } else { int tmp = it->second; frequency[classes[i]] = tmp + seqClasses; } cnt++; double entropy = 0.0; for (it = frequency.begin(); it != frequency.end(); it++) { double proportion = (double)it->second / (double)cnt; entropy += proportion * log(proportion) / log2; } _entropyCacheUp[i] = -entropy; } cnt = 0; frequency.clear(); for (int i = start + size - 1; i >= start; i--) { int seqClasses = 1; it = frequency.find(classes[i]); if (it == frequency.end()) { frequency[classes[i]] = seqClasses; } else { int tmp = it->second; frequency[classes[i]] = tmp + seqClasses; } cnt++; double entropy = 0.0; for (it = frequency.begin(); it != frequency.end(); it++) { double proportion = (double)it->second / (double)cnt; entropy += proportion * log(proportion) / log2; } _entropyCacheDown[i] = -entropy; } double totalEntropy = _entropyCacheUp[start + size - 1]; int bestSplitPoint = -1; double bestGain = -1.0; int bestSmallSide = -1; for (int i = start; i < start + size; i++) { // move on up to the next possible split point. while (i < start + size - 1 && values[i] == values[i + 1]) { i++; } double leftEnt = _entropyCacheUp[i]; double rightEnt = 0.0; if (i < start + size - 1) { rightEnt = _entropyCacheDown[i + 1]; } double leftSize = i - start + 1; double rightSize = size - leftSize; double gain = totalEntropy - (rightEnt * rightSize / (double)size + leftEnt * leftSize / (double)size); int smallSide = (int)((leftSize < rightSize ? leftSize : rightSize) + .5); // find the split with the most information gain and the most even split if (gain > bestGain || (gain == bestGain && smallSide > bestSmallSide)) { bestGain = gain; bestSplitPoint = i; bestSmallSide = smallSide; } } informationGain = bestGain; return bestSplitPoint; }
String HeapSnapshotBuilder::json(Function<bool (const HeapSnapshotNode&)> allowNodeCallback) { VM& vm = m_profiler.vm(); DeferGCForAWhile deferGC(vm.heap); // Build a node to identifier map of allowed nodes to use when serializing edges. HashMap<JSCell*, NodeIdentifier> allowedNodeIdentifiers; // Build a list of used class names. HashMap<String, unsigned> classNameIndexes; classNameIndexes.set("<root>"_s, 0); unsigned nextClassNameIndex = 1; // Build a list of labels (this is just a string table). HashMap<String, unsigned> labelIndexes; labelIndexes.set(emptyString(), 0); unsigned nextLabelIndex = 1; // Build a list of used edge names. HashMap<UniquedStringImpl*, unsigned> edgeNameIndexes; unsigned nextEdgeNameIndex = 0; StringBuilder json; auto appendNodeJSON = [&] (const HeapSnapshotNode& node) { // Let the client decide if they want to allow or disallow certain nodes. if (!allowNodeCallback(node)) return; unsigned flags = 0; allowedNodeIdentifiers.set(node.cell, node.identifier); String className = node.cell->classInfo(vm)->className; if (node.cell->isObject() && className == JSObject::info()->className) { flags |= static_cast<unsigned>(NodeFlags::ObjectSubtype); // Skip calculating a class name if this object has a `constructor` own property. // These cases are typically F.prototype objects and we want to treat these as // "Object" in snapshots and not get the name of the prototype's parent. JSObject* object = asObject(node.cell); if (JSGlobalObject* globalObject = object->globalObject(vm)) { ExecState* exec = globalObject->globalExec(); PropertySlot slot(object, PropertySlot::InternalMethodType::VMInquiry); if (!object->getOwnPropertySlot(object, exec, vm.propertyNames->constructor, slot)) className = JSObject::calculatedClassName(object); } } auto result = classNameIndexes.add(className, nextClassNameIndex); if (result.isNewEntry) nextClassNameIndex++; unsigned classNameIndex = result.iterator->value; void* wrappedAddress = 0; unsigned labelIndex = 0; if (!node.cell->isString()) { Structure* structure = node.cell->structure(vm); if (!structure || !structure->globalObject()) flags |= static_cast<unsigned>(NodeFlags::Internal); if (m_snapshotType == SnapshotType::GCDebuggingSnapshot) { String nodeLabel; auto it = m_cellLabels.find(node.cell); if (it != m_cellLabels.end()) nodeLabel = it->value; if (nodeLabel.isEmpty()) { if (auto* object = jsDynamicCast<JSObject*>(vm, node.cell)) { if (auto* function = jsDynamicCast<JSFunction*>(vm, object)) nodeLabel = function->calculatedDisplayName(vm); } } String description = descriptionForCell(node.cell); if (description.length()) { if (nodeLabel.length()) nodeLabel.append(' '); nodeLabel.append(description); } if (!nodeLabel.isEmpty() && m_snapshotType == SnapshotType::GCDebuggingSnapshot) { auto result = labelIndexes.add(nodeLabel, nextLabelIndex); if (result.isNewEntry) nextLabelIndex++; labelIndex = result.iterator->value; } wrappedAddress = m_wrappedObjectPointers.get(node.cell); } } // <nodeId>, <sizeInBytes>, <nodeClassNameIndex>, <flags>, [<labelIndex>, <cellEddress>, <wrappedAddress>] json.append(','); json.appendNumber(node.identifier); json.append(','); json.appendNumber(node.cell->estimatedSizeInBytes(vm)); json.append(','); json.appendNumber(classNameIndex); json.append(','); json.appendNumber(flags); if (m_snapshotType == SnapshotType::GCDebuggingSnapshot) { json.append(','); json.appendNumber(labelIndex); json.appendLiteral(",\"0x"); appendUnsignedAsHex(reinterpret_cast<uintptr_t>(node.cell), json, Lowercase); json.appendLiteral("\",\"0x"); appendUnsignedAsHex(reinterpret_cast<uintptr_t>(wrappedAddress), json, Lowercase); json.append('"'); } }; bool firstEdge = true; auto appendEdgeJSON = [&] (const HeapSnapshotEdge& edge) { if (!firstEdge) json.append(','); firstEdge = false; // <fromNodeId>, <toNodeId>, <edgeTypeIndex>, <edgeExtraData> json.appendNumber(edge.from.identifier); json.append(','); json.appendNumber(edge.to.identifier); json.append(','); json.appendNumber(edgeTypeToNumber(edge.type)); json.append(','); switch (edge.type) { case EdgeType::Property: case EdgeType::Variable: { auto result = edgeNameIndexes.add(edge.u.name, nextEdgeNameIndex); if (result.isNewEntry) nextEdgeNameIndex++; unsigned edgeNameIndex = result.iterator->value; json.appendNumber(edgeNameIndex); break; } case EdgeType::Index: json.appendNumber(edge.u.index); break; default: // No data for this edge type. json.append('0'); break; } }; json.append('{'); // version json.appendLiteral("\"version\":2"); // type json.append(','); json.appendLiteral("\"type\":"); json.appendQuotedJSONString(snapshotTypeToString(m_snapshotType)); // nodes json.append(','); json.appendLiteral("\"nodes\":"); json.append('['); // <root> if (m_snapshotType == SnapshotType::GCDebuggingSnapshot) json.appendLiteral("0,0,0,0,0,\"0x0\",\"0x0\""); else json.appendLiteral("0,0,0,0"); for (HeapSnapshot* snapshot = m_profiler.mostRecentSnapshot(); snapshot; snapshot = snapshot->previous()) { for (auto& node : snapshot->m_nodes) appendNodeJSON(node); } json.append(']'); // node class names json.append(','); json.appendLiteral("\"nodeClassNames\":"); json.append('['); Vector<String> orderedClassNames(classNameIndexes.size()); for (auto& entry : classNameIndexes) orderedClassNames[entry.value] = entry.key; classNameIndexes.clear(); bool firstClassName = true; for (auto& className : orderedClassNames) { if (!firstClassName) json.append(','); firstClassName = false; json.appendQuotedJSONString(className); } orderedClassNames.clear(); json.append(']'); // Process edges. // Replace pointers with identifiers. // Remove any edges that we won't need. m_edges.removeAllMatching([&] (HeapSnapshotEdge& edge) { // If the from cell is null, this means a <root> edge. if (!edge.from.cell) edge.from.identifier = 0; else { auto fromLookup = allowedNodeIdentifiers.find(edge.from.cell); if (fromLookup == allowedNodeIdentifiers.end()) { if (m_snapshotType == SnapshotType::GCDebuggingSnapshot) WTFLogAlways("Failed to find node for from-edge cell %p", edge.from.cell); return true; } edge.from.identifier = fromLookup->value; } if (!edge.to.cell) edge.to.identifier = 0; else { auto toLookup = allowedNodeIdentifiers.find(edge.to.cell); if (toLookup == allowedNodeIdentifiers.end()) { if (m_snapshotType == SnapshotType::GCDebuggingSnapshot) WTFLogAlways("Failed to find node for to-edge cell %p", edge.to.cell); return true; } edge.to.identifier = toLookup->value; } return false; }); allowedNodeIdentifiers.clear(); m_edges.shrinkToFit(); // Sort edges based on from identifier. std::sort(m_edges.begin(), m_edges.end(), [&] (const HeapSnapshotEdge& a, const HeapSnapshotEdge& b) { return a.from.identifier < b.from.identifier; }); // edges json.append(','); json.appendLiteral("\"edges\":"); json.append('['); for (auto& edge : m_edges) appendEdgeJSON(edge); json.append(']'); // edge types json.append(','); json.appendLiteral("\"edgeTypes\":"); json.append('['); json.appendQuotedJSONString(edgeTypeToString(EdgeType::Internal)); json.append(','); json.appendQuotedJSONString(edgeTypeToString(EdgeType::Property)); json.append(','); json.appendQuotedJSONString(edgeTypeToString(EdgeType::Index)); json.append(','); json.appendQuotedJSONString(edgeTypeToString(EdgeType::Variable)); json.append(']'); // edge names json.append(','); json.appendLiteral("\"edgeNames\":"); json.append('['); Vector<UniquedStringImpl*> orderedEdgeNames(edgeNameIndexes.size()); for (auto& entry : edgeNameIndexes) orderedEdgeNames[entry.value] = entry.key; edgeNameIndexes.clear(); bool firstEdgeName = true; for (auto& edgeName : orderedEdgeNames) { if (!firstEdgeName) json.append(','); firstEdgeName = false; json.appendQuotedJSONString(edgeName); } orderedEdgeNames.clear(); json.append(']'); if (m_snapshotType == SnapshotType::GCDebuggingSnapshot) { json.append(','); json.appendLiteral("\"roots\":"); json.append('['); HeapSnapshot* snapshot = m_profiler.mostRecentSnapshot(); bool firstNode = true; for (auto it : m_rootData) { auto snapshotNode = snapshot->nodeForCell(it.key); if (!snapshotNode) { WTFLogAlways("Failed to find snapshot node for cell %p", it.key); continue; } if (!firstNode) json.append(','); firstNode = false; json.appendNumber(snapshotNode.value().identifier); // Maybe we should just always encode the root names. const char* rootName = rootTypeToString(it.value.markReason); auto result = labelIndexes.add(rootName, nextLabelIndex); if (result.isNewEntry) nextLabelIndex++; unsigned labelIndex = result.iterator->value; json.append(','); json.appendNumber(labelIndex); unsigned reachabilityReasonIndex = 0; if (it.value.reachabilityFromOpaqueRootReasons) { auto result = labelIndexes.add(it.value.reachabilityFromOpaqueRootReasons, nextLabelIndex); if (result.isNewEntry) nextLabelIndex++; reachabilityReasonIndex = result.iterator->value; } json.append(','); json.appendNumber(reachabilityReasonIndex); } json.append(']'); } if (m_snapshotType == SnapshotType::GCDebuggingSnapshot) { // internal node descriptions json.append(','); json.appendLiteral("\"labels\":"); json.append('['); Vector<String> orderedLabels(labelIndexes.size()); for (auto& entry : labelIndexes) orderedLabels[entry.value] = entry.key; labelIndexes.clear(); bool firstLabel = true; for (auto& label : orderedLabels) { if (!firstLabel) json.append(','); firstLabel = false; json.appendQuotedJSONString(label); } orderedLabels.clear(); json.append(']'); } json.append('}'); return json.toString(); }