void WebNotificationManagerProxy::providerDidCloseNotifications(API::Array* globalNotificationIDs) { HashMap<WebPageProxy*, Vector<uint64_t>> pageNotificationIDs; size_t size = globalNotificationIDs->size(); for (size_t i = 0; i < size; ++i) { auto it = m_globalNotificationMap.find(globalNotificationIDs->at<API::UInt64>(i)->value()); if (it == m_globalNotificationMap.end()) continue; if (WebPageProxy* webPage = WebProcessProxy::webPage(it->value.first)) { auto pageIt = pageNotificationIDs.find(webPage); if (pageIt == pageNotificationIDs.end()) { Vector<uint64_t> newVector; newVector.reserveInitialCapacity(size); pageIt = pageNotificationIDs.add(webPage, WTF::move(newVector)).iterator; } uint64_t pageNotificationID = it->value.second; pageIt->value.append(pageNotificationID); } m_notifications.remove(it->value); m_globalNotificationMap.remove(it); } for (auto it = pageNotificationIDs.begin(), end = pageNotificationIDs.end(); it != end; ++it) it->key->process().send(Messages::WebNotificationManager::DidCloseNotifications(it->value), 0); }
double FayyadMdlDiscretizer::_calculateEntropy(int start, int size) { assert(start + size <= (int)_classes->size()); assert(start >= 0); const std::vector<int>& classes = *_classes; HashMap<int, int> frequency; HashMap<int, int>::const_iterator it; for (int i = start; i < start + size; i++) { it = frequency.find(classes[i]); if (it == frequency.end()) { frequency[classes[i]] = 1; } else { int tmp = it->second; frequency[classes[i]] = tmp + 1; } } double entropy = 0.0; for (it = frequency.begin(); it != frequency.end(); it++) { double proportion = (double)it->second / (double)size; entropy += proportion * log(proportion) / log2; } return -entropy; }
int CountMaxContinusSequence(int a[], int n) { int maxLeng = 0; for (int i = 0; i < n; ++i) { // ignore duplicated elements if (hashMap.find(a[i]) != hashMap.end()) { continue; } hashMap.insert(std::make_pair(a[i], 1)); if (hashMap.find(a[i] - 1) != hashMap.end()) { maxLeng = Max(maxLeng, Merge(a[i] - 1, a[i])); } if (hashMap.find(a[i] + 1) != hashMap.end()) { maxLeng = Max(maxLeng, Merge(a[i], a[i] + 1)); } } return maxLeng; }
static BOOL __stdcall CryptHashData_done(BOOL retval, HCRYPTHASH hHash, BYTE *pbData, DWORD dwDataLen, DWORD dwFlags) { DWORD err = GetLastError(); int ret_addr = *((DWORD *) ((DWORD) &retval - 4)); if (retval && !called_internally(ret_addr)) { HashMap::iterator iter; LOCK(); iter = hash_map.find(hHash); if (iter != hash_map.end()) { HashContext *ctx = iter->second; message_logger_log(_T("CryptHashData"), (char *) &retval - 4, ctx->get_id(), MESSAGE_TYPE_PACKET, MESSAGE_CTX_INFO, PACKET_DIRECTION_INVALID, NULL, NULL, (const char *) pbData, dwDataLen, _T("hHash=0x%p, Algid=%s"), hHash, ctx->get_alg_id_as_string()); } UNLOCK(); } SetLastError(err); return retval; }
virtual void _Report_history(std::shared_ptr<library::XML> xml) { library::UniqueWriteLock uk(system_array_->getMutex()); //-------- // CONSTRUCT HISTORY //-------- std::shared_ptr<PRInvokeHistory> history(new PRInvokeHistory()); history->construct(xml); // IF THE HISTORY IS NOT EXIST IN PROGRESS, THEN TERMINATE REPORTING auto progress_it = progress_list_.find(history->getUID()); if (progress_it == progress_list_.end()) return; // ARCHIVE FIRST AND LAST INDEX history->first_ = std::dynamic_pointer_cast<PRInvokeHistory>(progress_it->second.second)->getFirst(); history->last_ = std::dynamic_pointer_cast<PRInvokeHistory>(progress_it->second.second)->getLast(); // ERASE FROM ORDINARY PROGRESS AND MIGRATE TO THE HISTORY progress_list_.erase(progress_it); history_list_.insert({ history->getUID(), history }); // NOTIFY TO THE MANAGER, SYSTEM_ARRAY ((base::ParallelSystemArrayBase*)system_array_)->_Complete_history(history); };
static BOOL __stdcall CryptDuplicateHash_done(BOOL retval, HCRYPTHASH hHash, DWORD *pdwReserved, DWORD dwFlags, HCRYPTHASH *phHash) { DWORD err = GetLastError(); int ret_addr = *((DWORD *) ((DWORD) &retval - 4)); if (retval && !called_internally(ret_addr)) { HashMap::iterator iter; LOCK(); iter = hash_map.find(hHash); if (iter != hash_map.end()) { HashContext *ctx = iter->second; message_logger_log(_T("CryptDuplicateHash"), (char *) &retval - 4, ctx->get_id(), MESSAGE_TYPE_MESSAGE, MESSAGE_CTX_INFO, PACKET_DIRECTION_INVALID, NULL, NULL, NULL, 0, _T("hHash=0x%p, Algid=%s => *phHash=0x%p"), hHash, ctx->get_alg_id_as_string(), *phHash); } UNLOCK(); } SetLastError(err); return retval; }
bool ProcessingInstruction::checkStyleSheet(String& href, String& charset) { if (m_target != "xml-stylesheet" || !document().frame() || parentNode() != document()) return false; // see http://www.w3.org/TR/xml-stylesheet/ // ### support stylesheet included in a fragment of this (or another) document // ### make sure this gets called when adding from javascript bool attrsOk; const HashMap<String, String> attrs = parseAttributes(m_data, attrsOk); if (!attrsOk) return false; HashMap<String, String>::const_iterator i = attrs.find("type"); String type; if (i != attrs.end()) type = i->value; m_isCSS = type.isEmpty() || type == "text/css"; m_isXSL = (type == "text/xml" || type == "text/xsl" || type == "application/xml" || type == "application/xhtml+xml" || type == "application/rss+xml" || type == "application/atom+xml"); if (!m_isCSS && !m_isXSL) return false; href = attrs.get("href"); charset = attrs.get("charset"); String alternate = attrs.get("alternate"); m_alternate = alternate == "yes"; m_title = attrs.get("title"); m_media = attrs.get("media"); return !m_alternate || !m_title.isEmpty(); }
void EventPath::calculateTreeScopePrePostOrderNumbers() { // Precondition: // - TreeScopes in m_treeScopeEventContexts must be *connected* in the same tree of trees. // - The root tree must be included. HashMap<const TreeScope*, TreeScopeEventContext*> treeScopeEventContextMap; for (size_t i = 0; i < m_treeScopeEventContexts.size(); ++i) treeScopeEventContextMap.add(&m_treeScopeEventContexts[i]->treeScope(), m_treeScopeEventContexts[i].get()); TreeScopeEventContext* rootTree = 0; for (size_t i = 0; i < m_treeScopeEventContexts.size(); ++i) { TreeScopeEventContext* treeScopeEventContext = m_treeScopeEventContexts[i].get(); // Use olderShadowRootOrParentTreeScope here for parent-child relationships. // See the definition of trees of trees in the Shado DOM spec: http://w3c.github.io/webcomponents/spec/shadow/ TreeScope* parent = treeScopeEventContext->treeScope().olderShadowRootOrParentTreeScope(); if (!parent) { ASSERT(!rootTree); rootTree = treeScopeEventContext; continue; } ASSERT(treeScopeEventContextMap.find(parent) != treeScopeEventContextMap.end()); treeScopeEventContextMap.find(parent)->value->addChild(*treeScopeEventContext); } ASSERT(rootTree); rootTree->calculatePrePostOrderNumber(0); }
static int certVerifyCallback(int ok, X509_STORE_CTX* ctx) { // whether the verification of the certificate in question was passed (preverify_ok=1) or not (preverify_ok=0) unsigned err = X509_STORE_CTX_get_error(ctx); if (!err) return 1; SSL* ssl = reinterpret_cast<SSL*>(X509_STORE_CTX_get_ex_data(ctx, SSL_get_ex_data_X509_STORE_CTX_idx())); SSL_CTX* sslctx = SSL_get_SSL_CTX(ssl); ResourceHandle* job = reinterpret_cast<ResourceHandle*>(SSL_CTX_get_app_data(sslctx)); String host = job->firstRequest().url().host(); ResourceHandleInternal* d = job->getInternal(); d->m_sslErrors = sslCertificateFlag(err); #if PLATFORM(WIN) HashMap<String, ListHashSet<String>>::iterator it = allowedHosts.find(host); ok = (it != allowedHosts.end()); #else ListHashSet<String> certificates; if (!pemData(ctx, certificates)) return 0; ok = sslIgnoreHTTPSCertificate(host.lower(), certificates); #endif if (ok) { // if the host and the certificate are stored for the current handle that means is enabled, // so don't need to curl verifies the authenticity of the peer's certificate curl_easy_setopt(d->m_handle, CURLOPT_SSL_VERIFYPEER, false); } return ok; }
PassRefPtr<Widget> SubframeLoader::createJavaAppletWidget(const IntSize& size, HTMLAppletElement* element, const HashMap<String, String>& args) { String baseURLString; String codeBaseURLString; Vector<String> paramNames; Vector<String> paramValues; HashMap<String, String>::const_iterator end = args.end(); for (HashMap<String, String>::const_iterator it = args.begin(); it != end; ++it) { if (equalIgnoringCase(it->first, "baseurl")) baseURLString = it->second; else if (equalIgnoringCase(it->first, "codebase")) codeBaseURLString = it->second; paramNames.append(it->first); paramValues.append(it->second); } if (!codeBaseURLString.isEmpty()) { KURL codeBaseURL = completeURL(codeBaseURLString); if (!SecurityOrigin::canLoad(codeBaseURL, String(), element->document())) { FrameLoader::reportLocalLoadFailed(m_frame, codeBaseURL.string()); return 0; } } if (baseURLString.isEmpty()) baseURLString = m_frame->document()->baseURL().string(); KURL baseURL = completeURL(baseURLString); RefPtr<Widget> widget = m_frame->loader()->client()->createJavaAppletWidget(size, element, baseURL, paramNames, paramValues); if (!widget) return 0; m_containsPlugins = true; return widget; }
void testIterator () { map->put ((char*)"zero", 0); map->put ((char*)"one", 1); map->put ((char*)"two", 2); map->put ((char*)"three", 3); map->put ((char*)"four", 4); map->put ((char*)"five", 5); map->put ((char*)"six", 6); map->put ((char*)"seven", 7); map->put ((char*)"eight", 8); map->put ((char*)"nine", 9); map->put ((char*)"ten", 10); int values[11]; for (int i = 0; i < 11; i++) values[i] = 0; for (HashMap<char*, int>::Iterator it = map->begin (); it != map->end (); it++) { values[*it] = 1; } for (int i = 0; i < 11; i++) CPPUNIT_ASSERT_EQUAL (values[i], 1); }
void PannerNode::notifyAudioSourcesConnectedToNode(AudioNode* node, HashMap<AudioNode*, bool>& visitedNodes) { ASSERT(node); if (!node) return; // First check if this node is an AudioBufferSourceNode. If so, let it know about us so that doppler shift pitch can be taken into account. if (node->nodeType() == NodeTypeAudioBufferSource) { AudioBufferSourceNode* bufferSourceNode = static_cast<AudioBufferSourceNode*>(node); bufferSourceNode->setPannerNode(this); } else { // Go through all inputs to this node. for (unsigned i = 0; i < node->numberOfInputs(); ++i) { AudioNodeInput* input = node->input(i); // For each input, go through all of its connections, looking for AudioBufferSourceNodes. for (unsigned j = 0; j < input->numberOfRenderingConnections(); ++j) { AudioNodeOutput* connectedOutput = input->renderingOutput(j); AudioNode* connectedNode = connectedOutput->node(); HashMap<AudioNode*, bool>::iterator iterator = visitedNodes.find(connectedNode); // If we've seen this node already, we don't need to process it again. Otherwise, // mark it as visited and recurse through the node looking for sources. if (iterator == visitedNodes.end()) { visitedNodes.set(connectedNode, true); notifyAudioSourcesConnectedToNode(connectedNode, visitedNodes); // recurse } } } } }
static BOOL __stdcall CryptDestroyHash_done(BOOL retval, HCRYPTHASH hHash) { DWORD err = GetLastError(); int ret_addr = *((DWORD *) ((DWORD) &retval - 4)); if (retval) { LOCK(); HashMap::iterator iter = hash_map.find(hHash); if (iter != hash_map.end()) { HashContext *ctx = iter->second; if (!called_internally(ret_addr)) { message_logger_log(_T("CryptDestroyHash"), (char *) &retval - 4, ctx->get_id(), MESSAGE_TYPE_MESSAGE, MESSAGE_CTX_INFO, PACKET_DIRECTION_INVALID, NULL, NULL, NULL, 0, _T("hHash=0x%p"), hHash); } hash_map.erase(iter); delete ctx; } UNLOCK(); } SetLastError(err); return retval; }
void LocalStorageDatabase::updateDatabaseWithChangedItems(const HashMap<String, String>& changedItems) { if (!m_database.isOpen()) openDatabase(CreateIfNonExistent); if (!m_database.isOpen()) return; if (m_shouldClearItems) { m_shouldClearItems = false; SQLiteStatement clearStatement(m_database, "DELETE FROM ItemTable"); if (clearStatement.prepare() != SQLITE_OK) { LOG_ERROR("Failed to prepare clear statement - cannot write to local storage database"); return; } int result = clearStatement.step(); if (result != SQLITE_DONE) { LOG_ERROR("Failed to clear all items in the local storage database - %i", result); return; } } SQLiteStatement insertStatement(m_database, "INSERT INTO ItemTable VALUES (?, ?)"); if (insertStatement.prepare() != SQLITE_OK) { LOG_ERROR("Failed to prepare insert statement - cannot write to local storage database"); return; } SQLiteStatement deleteStatement(m_database, "DELETE FROM ItemTable WHERE key=?"); if (deleteStatement.prepare() != SQLITE_OK) { LOG_ERROR("Failed to prepare delete statement - cannot write to local storage database"); return; } SQLiteTransaction transaction(m_database); transaction.begin(); for (auto it = changedItems.begin(), end = changedItems.end(); it != end; ++it) { // A null value means that the key/value pair should be deleted. SQLiteStatement& statement = it->value.isNull() ? deleteStatement : insertStatement; statement.bindText(1, it->key); // If we're inserting a key/value pair, bind the value as well. if (!it->value.isNull()) statement.bindBlob(2, it->value); int result = statement.step(); if (result != SQLITE_DONE) { LOG_ERROR("Failed to update item in the local storage database - %i", result); break; } statement.reset(); } transaction.commit(); }
void StorageAreaSync::performImport() { ASSERT(!isMainThread()); ASSERT(!m_database.isOpen()); String databaseFilename = m_syncManager->fullDatabaseFilename(m_storageArea->securityOrigin()); if (databaseFilename.isEmpty()) { LOG_ERROR("Filename for local storage database is empty - cannot open for persistent storage"); markImported(); return; } if (!m_database.open(databaseFilename)) { LOG_ERROR("Failed to open database file %s for local storage", databaseFilename.utf8().data()); markImported(); return; } if (!m_database.executeCommand("CREATE TABLE IF NOT EXISTS ItemTable (key TEXT UNIQUE ON CONFLICT REPLACE, value TEXT NOT NULL ON CONFLICT FAIL)")) { LOG_ERROR("Failed to create table ItemTable for local storage"); markImported(); return; } SQLiteStatement query(m_database, "SELECT key, value FROM ItemTable"); if (query.prepare() != SQLResultOk) { LOG_ERROR("Unable to select items from ItemTable for local storage"); markImported(); return; } HashMap<String, String> itemMap; int result = query.step(); while (result == SQLResultRow) { itemMap.set(query.getColumnText(0), query.getColumnText(1)); result = query.step(); } if (result != SQLResultDone) { LOG_ERROR("Error reading items from ItemTable for local storage"); markImported(); return; } MutexLocker locker(m_importLock); HashMap<String, String>::iterator it = itemMap.begin(); HashMap<String, String>::iterator end = itemMap.end(); for (; it != end; ++it) m_storageArea->importItem(it->first, it->second); // Break the (ref count) cycle. m_storageArea = 0; m_importComplete = true; m_importCondition.signal(); }
void findGoodTouchTargets(const IntRect& touchBox, LocalFrame* mainFrame, Vector<IntRect>& goodTargets, Vector<Node*>& highlightNodes) { goodTargets.clear(); int touchPointPadding = ceil(max(touchBox.width(), touchBox.height()) * 0.5); IntPoint touchPoint = touchBox.center(); IntPoint contentsPoint = mainFrame->view()->windowToContents(touchPoint); HitTestResult result = mainFrame->eventHandler().hitTestResultAtPoint(contentsPoint, HitTestRequest::ReadOnly | HitTestRequest::Active | HitTestRequest::ConfusingAndOftenMisusedDisallowShadowContent, IntSize(touchPointPadding, touchPointPadding)); const ListHashSet<RefPtr<Node> >& hitResults = result.rectBasedTestResult(); // Blacklist nodes that are container of disambiguated nodes. // It is not uncommon to have a clickable <div> that contains other clickable objects. // This heuristic avoids excessive disambiguation in that case. HashSet<Node*> blackList; for (ListHashSet<RefPtr<Node> >::const_iterator it = hitResults.begin(); it != hitResults.end(); ++it) { // Ignore any Nodes that can't be clicked on. RenderObject* renderer = it->get()->renderer(); if (!renderer || !it->get()->willRespondToMouseClickEvents()) continue; // Blacklist all of the Node's containers. for (RenderBlock* container = renderer->containingBlock(); container; container = container->containingBlock()) { Node* containerNode = container->node(); if (!containerNode) continue; if (!blackList.add(containerNode).isNewEntry) break; } } HashMap<Node*, TouchTargetData> touchTargets; float bestScore = 0; for (ListHashSet<RefPtr<Node> >::const_iterator it = hitResults.begin(); it != hitResults.end(); ++it) { for (Node* node = it->get(); node; node = node->parentNode()) { if (blackList.contains(node)) continue; if (node->isDocumentNode() || isHTMLHtmlElement(*node) || isHTMLBodyElement(*node)) break; if (node->willRespondToMouseClickEvents()) { TouchTargetData& targetData = touchTargets.add(node, TouchTargetData()).storedValue->value; targetData.windowBoundingBox = boundingBoxForEventNodes(node); targetData.score = scoreTouchTarget(touchPoint, touchPointPadding, targetData.windowBoundingBox); bestScore = max(bestScore, targetData.score); break; } } } for (HashMap<Node*, TouchTargetData>::iterator it = touchTargets.begin(); it != touchTargets.end(); ++it) { // Currently the scoring function uses the overlap area with the fat point as the score. // We ignore the candidates that has less than 1/2 overlap (we consider not really ambiguous enough) than the best candidate to avoid excessive popups. if (it->value.score < bestScore * 0.5) continue; goodTargets.append(it->value.windowBoundingBox); highlightNodes.append(it->key); } }
CSSSelector::PseudoType CSSSelector::parsePseudoType(const AtomicString& name) { if (name.isNull()) return PseudoUnknown; HashMap<AtomicStringImpl*, CSSSelector::PseudoType>* nameToPseudoType = nameToPseudoTypeMap(); HashMap<AtomicStringImpl*, CSSSelector::PseudoType>::iterator slot = nameToPseudoType->find(name.impl()); return slot == nameToPseudoType->end() ? PseudoUnknown : slot->second; }
bool MeshSerializerTests::isHashMapClone(const HashMap<K, V>& a, const HashMap<K, V>& b) { // if you recreate a HashMap with same elements, then iteration order may differ! // So isContainerClone is not always working on HashMap. if (a.size() != b.size()) { return false; } typename HashMap<K, V>::const_iterator it, itFind, itEnd; it = a.begin(); itEnd = a.end(); for (; it != itEnd; it++) { itFind = b.find(it->first); if (itFind == b.end() || itFind->second != it->second) { return false; } } return true; }
HashMap<Key, T, Hasher, EqualKey, Alloc>::HashMap(const HashMap& rhs) :bucket_list_(nullptr) ,bucket_count_(rhs.bucket_count() ) ,size_(rhs.size() ) ,mlf_(rhs.max_load_factor() ) ,hash_function_pred_(rhs.hash_function(), rhs.key_equal() ){ rehash(rhs.bucket_count() ); insert(rhs.begin(), rhs.end() ); }
ScriptValue DictionaryTest::getDictionaryMemberProperties(ScriptState* scriptState) { if (!m_dictionaryMemberProperties) return ScriptValue(); V8ObjectBuilder builder(scriptState); HashMap<String, String> properties = m_dictionaryMemberProperties.get(); for (HashMap<String, String>::iterator it = properties.begin(); it != properties.end(); ++it) builder.addString(it->key, it->value); return builder.scriptValue(); }
~_Ewk_Context() { if (cookieManager) ewk_cookie_manager_free(cookieManager); HashMap<uint64_t, Ewk_Download_Job*>::iterator it = downloadJobs.begin(); HashMap<uint64_t, Ewk_Download_Job*>::iterator end = downloadJobs.end(); for ( ; it != end; ++it) ewk_download_job_unref(it->second); }
void StorageAreaSync::sync(bool clearItems, const HashMap<String, String>& items) { ASSERT(!isMainThread()); if (!m_database.isOpen()) return; // If the clear flag is set, then we clear all items out before we write any new ones in. if (clearItems) { SQLiteStatement clear(m_database, "DELETE FROM ItemTable"); if (clear.prepare() != SQLResultOk) { LOG_ERROR("Failed to prepare clear statement - cannot write to local storage database"); return; } int result = clear.step(); if (result != SQLResultDone) { LOG_ERROR("Failed to clear all items in the local storage database - %i", result); return; } } SQLiteStatement insert(m_database, "INSERT INTO ItemTable VALUES (?, ?)"); if (insert.prepare() != SQLResultOk) { LOG_ERROR("Failed to prepare insert statement - cannot write to local storage database"); return; } SQLiteStatement remove(m_database, "DELETE FROM ItemTable WHERE key=?"); if (remove.prepare() != SQLResultOk) { LOG_ERROR("Failed to prepare delete statement - cannot write to local storage database"); return; } HashMap<String, String>::const_iterator end = items.end(); for (HashMap<String, String>::const_iterator it = items.begin(); it != end; ++it) { // Based on the null-ness of the second argument, decide whether this is an insert or a delete. SQLiteStatement& query = it->second.isNull() ? remove : insert; query.bindText(1, it->first); // If the second argument is non-null, we're doing an insert, so bind it as the value. if (!it->second.isNull()) query.bindText(2, it->second); int result = query.step(); if (result != SQLResultDone) { LOG_ERROR("Failed to update item in the local storage database - %i", result); break; } query.reset(); } }
static void cleanWeakMap(HashMap<K, WeakPtr<M> >& map) { HashMap<K, WeakPtr<M> > other; other.swap(map); typename HashMap<K, WeakPtr<M> >::const_iterator iter = other.begin(); while (iter != other.end()) { if (iter->value.get()) map.set(iter->key, iter->value); ++iter; } }
static BOOL __stdcall CryptGetHashParam_done (BOOL retval, HCRYPTHASH hHash, DWORD dwParam, BYTE *pbData, DWORD *pdwDataLen, DWORD dwFlags) { DWORD err = GetLastError(); int ret_addr = *((DWORD *) ((DWORD) &retval - 4)); if (retval && !called_internally(ret_addr)) { HashMap::iterator iter; LOCK(); iter = hash_map.find(hHash); if (iter != hash_map.end()) { HashContext *ctx = iter->second; const TCHAR *param_str; switch (dwParam) { case HP_ALGID: param_str = _T("ALGID"); break; case HP_HASHSIZE: param_str = _T("HASHSIZE"); break; case HP_HASHVAL: param_str = _T("HASHVAL"); break; default: param_str = _T("UNKNOWN"); break; } message_logger_log(_T("CryptGetHashParam"), (char *) &retval - 4, ctx->get_id(), MESSAGE_TYPE_PACKET, MESSAGE_CTX_INFO, PACKET_DIRECTION_INVALID, NULL, NULL, (const char *) pbData, *pdwDataLen, _T("hHash=0x%p, Algid=%s, dwParam=%s"), hHash, ctx->get_alg_id_as_string(), param_str); } UNLOCK(); } SetLastError(err); return retval; }
static String cachedStorageDirectory(DWORD pathIdentifier) { static HashMap<DWORD, String> directories; HashMap<DWORD, String>::iterator it = directories.find(pathIdentifier); if (it != directories.end()) return it->second; String directory = storageDirectory(pathIdentifier); directories.add(pathIdentifier, directory); return directory; }
static bool isAxisName(const String& name, Step::Axis& type) { static HashMap<String, Step::Axis> axisNames; if (axisNames.isEmpty()) setUpAxisNamesMap(axisNames); HashMap<String, Step::Axis>::iterator it = axisNames.find(name); if (it == axisNames.end()) return false; type = it->second; return true; }
void setSSLClientCertificate(ResourceHandle* handle) { String host = handle->firstRequest().url().host(); HashMap<String, clientCertificate>::iterator it = allowedClientHosts.find(host.lower()); if (it == allowedClientHosts.end()) return; ResourceHandleInternal* d = handle->getInternal(); clientCertificate clientInfo = it->value; curl_easy_setopt(d->m_handle, CURLOPT_SSLCERT, std::get<0>(clientInfo).utf8().data()); curl_easy_setopt(d->m_handle, CURLOPT_SSLCERTTYPE, "P12"); curl_easy_setopt(d->m_handle, CURLOPT_SSLCERTPASSWD, std::get<1>(clientInfo).utf8().data()); }
void PointsToTracksOp::_sortTracks(shared_ptr<OsmMap>& map, HashMap<QString, deque<long> >& tracks) { SortFunctor sf; sf.map = map; for (HashMap<QString, deque<long> >::iterator it = tracks.begin(); it != tracks.end(); ++it) { deque<long>& d = it->second; sort(d.begin(), d.end(), sf); } }
void DOMWindowExtensionBasic::willDestroyPage(WKBundleRef, WKBundlePageRef) { HashMap<WKBundleDOMWindowExtensionRef, int>::iterator it = m_extensionToRecordMap.begin(); HashMap<WKBundleDOMWindowExtensionRef, int>::iterator end = m_extensionToRecordMap.end(); for (; it != end; ++it) { updateExtensionStateRecord(it->key, Removed); WKRelease(it->key); } m_extensionToRecordMap.clear(); sendExtensionStateMessage(); sendBundleMessage("TestComplete"); }
bool canMatch(const char* head, const char * end, HashMap & flags, int l){ if (head == end+1) return true; string s=""; for (int i=0; i<l; i++) s+= *(head+i); HashMap::iterator hi = flags.find(s); if (hi==flags.end() || hi->second<=0) return false; flags[s]--; bool rlt = canMatch(head+l,end,flags,l); flags[s]++; return rlt; }