void AvailabilityMap::prune()
{
    if (m_heap.isEmpty())
        return;
    
    HashSet<Node*> possibleNodes;
    
    for (unsigned i = m_locals.size(); i--;) {
        if (m_locals[i].hasNode())
            possibleNodes.add(m_locals[i].node());
    }

    int oldPossibleNodesSize;
    do {
        oldPossibleNodesSize = possibleNodes.size();
        for (auto pair : m_heap) {
            if (pair.value.hasNode() && possibleNodes.contains(pair.key.base()))
                possibleNodes.add(pair.value.node());
        }
    } while (oldPossibleNodesSize != possibleNodes.size());
    
    HashMap<PromotedHeapLocation, Availability> newHeap;
    for (auto pair : m_heap) {
        if (possibleNodes.contains(pair.key.base()))
            newHeap.add(pair.key, pair.value);
    }
    m_heap = newHeap;
}
示例#2
0
static void testU32Set()
{
	HashSet<U32> set;

	enum { maxI = 1024 * 1024 };

	for(Uptr i = 0;i < maxI;++i)
	{
		errorUnless(!set.contains(U32(i)));
	}

	errorUnless(set.size() == 0);
	for(Uptr i = 0;i < maxI;++i)
	{
		errorUnless(!set.contains(U32(i)));
		errorUnless(!set.get(U32(i)));
		errorUnless(set.add(U32(i)));
		errorUnless(set.contains(U32(i)));
		errorUnless(set.get(U32(i)));
		errorUnless(set.size() == i + 1);
	}

	for(Uptr i = 0;i < maxI;++i)
	{
		errorUnless(set.contains(U32(i)));
		errorUnless(set.remove(U32(i)));
		errorUnless(!set.contains(U32(i)));
		errorUnless(set.size() == maxI - i - 1);
	}

	for(Uptr i = 0;i < maxI;++i)
	{
		errorUnless(!set.contains(U32(i)));
	}
}
    void expectSiblingDescendantInvalidation(unsigned maxDirectAdjacentSelectors, const AtomicString& siblingName, const AtomicString& descendantName, InvalidationSetVector& invalidationSets)
    {
        EXPECT_EQ(1u, invalidationSets.size());
        const SiblingInvalidationSet& siblingInvalidationSet = toSiblingInvalidationSet(*invalidationSets[0]);
        HashSet<AtomicString> classes = classSet(siblingInvalidationSet);
        EXPECT_EQ(1u, classes.size());
        EXPECT_TRUE(classes.contains(siblingName));
        EXPECT_EQ(maxDirectAdjacentSelectors, siblingInvalidationSet.maxDirectAdjacentSelectors());

        HashSet<AtomicString> descendantClasses = classSet(siblingInvalidationSet.descendants());
        EXPECT_EQ(1u, descendantClasses.size());
        EXPECT_TRUE(descendantClasses.contains(descendantName));
    }
 void expectClassInvalidation(const AtomicString& className, InvalidationSetVector& invalidationSets)
 {
     EXPECT_EQ(1u, invalidationSets.size());
     HashSet<AtomicString> classes = classSet(*invalidationSets[0]);
     EXPECT_EQ(1u, classes.size());
     EXPECT_TRUE(classes.contains(className));
 }
 void expectIdInvalidation(const AtomicString& id, InvalidationSetVector& invalidationSets)
 {
     EXPECT_EQ(1u, invalidationSets.size());
     HashSet<AtomicString> ids = idSet(*invalidationSets[0]);
     EXPECT_EQ(1u, ids.size());
     EXPECT_TRUE(ids.contains(id));
 }
void WebApplicationCacheManager::getApplicationCacheOrigins(uint64_t callbackID)
{
    WebProcess::LocalTerminationDisabler terminationDisabler(WebProcess::shared());

    HashSet<RefPtr<SecurityOrigin>, SecurityOriginHash> origins;

    cacheStorage().getOriginsWithCache(origins);

    Vector<SecurityOriginData> identifiers;
    identifiers.reserveCapacity(origins.size());

    HashSet<RefPtr<SecurityOrigin>, SecurityOriginHash>::iterator end = origins.end();
    HashSet<RefPtr<SecurityOrigin>, SecurityOriginHash>::iterator i = origins.begin();
    for (; i != end; ++i) {
        RefPtr<SecurityOrigin> origin = *i;
        
        SecurityOriginData originData;
        originData.protocol = origin->protocol();
        originData.host = origin->host();
        originData.port = origin->port();

        identifiers.uncheckedAppend(originData);
    }

    WebProcess::shared().connection()->send(Messages::WebApplicationCacheManagerProxy::DidGetApplicationCacheOrigins(identifiers, callbackID), 0);
}
 void expectTagNameInvalidation(const AtomicString& tagName, InvalidationSetVector& invalidationSets)
 {
     EXPECT_EQ(1u, invalidationSets.size());
     HashSet<AtomicString> tagNames = tagNameSet(*invalidationSets[0]);
     EXPECT_EQ(1u, tagNames.size());
     EXPECT_TRUE(tagNames.contains(tagName));
 }
 void expectAttributeInvalidation(const AtomicString& attribute, InvalidationSetVector& invalidationSets)
 {
     EXPECT_EQ(1u, invalidationSets.size());
     HashSet<AtomicString> attributes = attributeSet(*invalidationSets[0]);
     EXPECT_EQ(1u, attributes.size());
     EXPECT_TRUE(attributes.contains(attribute));
 }
示例#9
0
void printSimpleLineLayoutBlockList()
{
    HashSet<const RenderBlockFlow*> leafRenderers;
    collectNonEmptyLeafRenderBlockFlowsForCurrentPage(leafRenderers);
    if (!leafRenderers.size()) {
        WTFLogAlways("No text found in this document\n");
        return;
    }
    TextStream stream;
    stream << "---------------------------------------------------\n";
    for (const auto* flow : leafRenderers) {
        auto reason = canUseForWithReason(*flow, FallThrough::Yes);
        if (reason == NoReason)
            continue;
        unsigned printedLength = 30;
        stream << "\"";
        printTextForSubtree(*flow, printedLength, stream);
        for (;printedLength > 0; --printedLength)
            stream << " ";
        stream << "\"(" << textLengthForSubtree(*flow) << "):";
        printReasons(reason, stream);
        stream << "\n";
    }
    stream << "---------------------------------------------------\n";
    WTFLogAlways("%s", stream.release().utf8().data());
}
示例#10
0
TEST_F(MediaListDirectiveTest, GetIntersect) {
  MediaListDirective A(
      "plugin-types",
      "application/x-shockwave-flash application/pdf text/plain", csp.get());
  MediaListDirective emptyA("plugin-types", "", csp.get());

  struct TestCase {
    const char* policyB;
    const std::vector<const char*> expected;
  } cases[] = {
      {"", std::vector<const char*>()},
      {"text/", std::vector<const char*>()},
      {"text/*", std::vector<const char*>()},
      {"*/plain", std::vector<const char*>()},
      {"text/plain */plain", {"text/plain"}},
      {"text/plain application/*", {"text/plain"}},
      {"text/plain", {"text/plain"}},
      {"application/pdf", {"application/pdf"}},
      {"application/x-shockwave-flash", {"application/x-shockwave-flash"}},
      {"application/x-shockwave-flash text/plain",
       {"application/x-shockwave-flash", "text/plain"}},
      {"application/pdf text/plain", {"text/plain", "application/pdf"}},
      {"application/x-shockwave-flash application/pdf text/plain",
       {"application/x-shockwave-flash", "application/pdf", "text/plain"}},
  };

  for (const auto& test : cases) {
    MediaListDirective B("plugin-types", test.policyB, csp.get());

    HashSet<String> result = A.getIntersect(B.m_pluginTypes);
    EXPECT_EQ(result.size(), test.expected.size());

    for (const auto& type : test.expected)
      EXPECT_TRUE(result.contains(type));

    // If we change the order of `A` and `B`, intersection should not change.
    result = B.getIntersect(A.m_pluginTypes);
    EXPECT_EQ(result.size(), test.expected.size());

    for (const auto& type : test.expected)
      EXPECT_TRUE(result.contains(type));

    // When `A` is empty, there should not be any intersection.
    result = emptyA.getIntersect(B.m_pluginTypes);
    EXPECT_FALSE(result.size());
  }
}
示例#11
0
void IncrementalSweeper::startSweeping(const HashSet<MarkedBlock*>& blockSnapshot)
{
    m_blocksToSweep.resize(blockSnapshot.size());
    CopyFunctor functor(m_blocksToSweep);
    m_globalData->heap.objectSpace().forEachBlock(functor);
    m_currentBlockToSweepIndex = 0;
    m_structuresCanBeSwept = false;
    scheduleTimer();
}
void SharedWorkerProxy::documentDetached(Document* document)
{
    if (isClosing())
        return;
    // Remove the document from our set (if it's there) and if that was the last document in the set, mark the proxy as closed.
    MutexLocker lock(m_workerDocumentsLock);
    m_workerDocuments.remove(document);
    if (!m_workerDocuments.size())
        close();
}
示例#13
0
static unsigned collectEffects(const FilterEffect*effect, HashSet<const FilterEffect*>& allEffects)
{
    allEffects.add(effect);
    unsigned size = effect->numberOfEffectInputs();
    for (unsigned i = 0; i < size; ++i) {
        FilterEffect* in = effect->inputEffect(i);
        collectEffects(in, allEffects);
    }
    return allEffects.size();
}
GroupSettings* SharedWorkerProxy::groupSettings() const
{
    if (isClosing())
        return 0;
    ASSERT(m_workerDocuments.size());
    // Just pick the first active document, and use the groupsettings of that page.
    Document* document = *(m_workerDocuments.begin());
    if (document->page())
        return &document->page()->group().groupSettings();

    return 0;
}
示例#15
0
void printSimpleLineLayoutCoverage()
{
    HashSet<const RenderBlockFlow*> leafRenderers;
    collectNonEmptyLeafRenderBlockFlowsForCurrentPage(leafRenderers);
    if (!leafRenderers.size()) {
        WTFLogAlways("No text found in this document\n");
        return;
    }
    TextStream stream;
    HashMap<AvoidanceReason, unsigned> flowStatistics;
    unsigned textLength = 0;
    unsigned unsupportedTextLength = 0;
    unsigned numberOfUnsupportedLeafBlocks = 0;
    for (const auto* flow : leafRenderers) {
        auto flowLength = textLengthForSubtree(*flow);
        textLength += flowLength;
        auto reasons = canUseForWithReason(*flow, FallThrough::Yes);
        if (reasons == NoReason)
            continue;
        ++numberOfUnsupportedLeafBlocks;
        unsupportedTextLength += flowLength;
        for (auto reasonItem = EndOfReasons >> 1; reasonItem != NoReason; reasonItem >>= 1) {
            if (!(reasons & reasonItem))
                continue;
            auto result = flowStatistics.add(reasonItem, flowLength);
            if (!result.isNewEntry)
                result.iterator->value += flowLength;
        }
    }
    stream << "---------------------------------------------------\n";
    stream << "Number of text blocks: total(" <<  leafRenderers.size() << ") non-simple(" << numberOfUnsupportedLeafBlocks << ")\nText length: total(" <<
        textLength << ") non-simple(" << unsupportedTextLength << ")\n";
    for (const auto reasonEntry : flowStatistics) {
        printReason(reasonEntry.key, stream);
        stream << ": " << (float)reasonEntry.value / (float)textLength * 100 << "%\n";
    }
    stream << "simple line layout coverage: " << (float)(textLength - unsupportedTextLength) / (float)textLength * 100 << "%\n";
    stream << "---------------------------------------------------\n";
    WTFLogAlways("%s", stream.release().utf8().data());
}
TIMED_TEST(GridTests, hashCodeTest_Grid, TEST_TIMEOUT_DEFAULT) {
    Grid<int> grid(2, 3);
    grid.fill(42);
    assertEqualsInt("hashcode of self Grid", hashCode(grid), hashCode(grid));

    Grid<int> copy = grid;
    assertEqualsInt("hashcode of copy Grid", hashCode(grid), hashCode(copy));

    Grid<int> empty;   // empty
    HashSet<Grid<int> > hashgrid {grid, copy, empty, empty};

    assertEqualsInt("hashset of Grid size", 2, hashgrid.size());
}
TIMED_TEST(StackTests, hashCodeTest_Stack, TEST_TIMEOUT_DEFAULT) {
    Stack<int> stack;
    stack.add(69);
    stack.add(42);
    assertEqualsInt("hashcode of self Stack", hashCode(stack), hashCode(stack));

    Stack<int> copy = stack;
    assertEqualsInt("hashcode of copy Stack", hashCode(stack), hashCode(copy));

    Stack<int> empty;
    HashSet<Stack<int> > hashstack {stack, copy, empty, empty};
    assertEqualsInt("hashset of Stack size", 2, hashstack.size());
}
示例#18
0
WKArrayRef WKBundlePageCopyOriginsWithApplicationCache(WKBundlePageRef page)
{
    HashSet<RefPtr<WebCore::SecurityOrigin>> origins;
    toImpl(page)->corePage()->applicationCacheStorage().getOriginsWithCache(origins);

    Vector<RefPtr<API::Object>> originIdentifiers;
    originIdentifiers.reserveInitialCapacity(origins.size());

    for (const auto& origin : origins)
        originIdentifiers.uncheckedAppend(API::String::create(origin->databaseIdentifier()));

    return toAPI(&API::Array::create(WTFMove(originIdentifiers)).leakRef());
}
TIMED_TEST(MapTests, hashCodeTest_Map, TEST_TIMEOUT_DEFAULT) {
    Map<int, int> map;
    map.add(69, 96);
    map.add(42, 24);
    assertEqualsInt("hashcode of self Map", hashCode(map), hashCode(map));

    Map<int, int> copy = map;
    assertEqualsInt("hashcode of copy Map", hashCode(map), hashCode(copy));

    Map<int, int> empty;

    HashSet<Map<int, int> > hashmap {map, copy, empty, empty};
    assertEqualsInt("hashset of Map size", 2, hashmap.size());
}
bool GLPlatformContext::supportsGLExtension(const String& name)
{
    static HashSet<String> supportedExtensions;

    if (!supportedExtensions.size()) {
        String rawExtensions = reinterpret_cast<const char*>(::glGetString(GL_EXTENSIONS));
        supportedExtensions = parseExtensions(rawExtensions);
    }

    if (supportedExtensions.contains(name))
        return true;

    return false;
}
void SharedWorkerProxy::postTaskToLoader(PassRefPtr<ScriptExecutionContext::Task> task)
{
    MutexLocker lock(m_workerDocumentsLock);

    if (isClosing())
        return;

    // If we aren't closing, then we must have at least one document.
    ASSERT(m_workerDocuments.size());

    // Just pick an arbitrary active document from the HashSet and pass load requests to it.
    // FIXME: Do we need to deal with the case where the user closes the document mid-load, via a shadow document or some other solution?
    Document* document = *(m_workerDocuments.begin());
    document->postTask(task);
}
TIMED_TEST(LexiconTests, hashCodeTest_Lexicon, TEST_TIMEOUT_DEFAULT) {
    Lexicon lex;
    lex.add("a");
    lex.add("bc");
    assertEqualsInt("hashcode of self lexicon", hashCode(lex), hashCode(lex));

    Lexicon copy = lex;
    assertEqualsInt("hashcode of copy lexicon", hashCode(lex), hashCode(copy));

    Lexicon lex2;   // empty

    // shouldn't add two copies of same lexicon
    HashSet<Lexicon> hashlex {lex, copy, lex2};
    assertEqualsInt("hashset of lexicon size", 2, hashlex.size());
}
TIMED_TEST(DawgLexiconTests, hashCodeTest_DawgLexicon, TEST_TIMEOUT_DEFAULT) {
    DawgLexicon dlex;
    dlex.add("a");
    dlex.add("abc");
    assertEqualsInt("hashcode of self dawglexicon", hashCode(dlex), hashCode(dlex));

    DawgLexicon copy = dlex;
    assertEqualsInt("hashcode of copy dawglexicon", hashCode(dlex), hashCode(copy));

    DawgLexicon dlex2;   // empty

    // shouldn't add two copies of same lexicon
    HashSet<DawgLexicon> hashdawg {dlex, copy, dlex2};
    assertEqualsInt("hashset of dawglexicon size", 2, hashdawg.size());
}
示例#24
0
MIDIOutputMap* MIDIAccess::outputs() const {
  HeapVector<Member<MIDIOutput>> outputs;
  HashSet<String> ids;
  for (size_t i = 0; i < m_outputs.size(); ++i) {
    MIDIOutput* output = m_outputs[i];
    if (output->getState() != PortState::DISCONNECTED) {
      outputs.append(output);
      ids.add(output->id());
    }
  }
  if (outputs.size() != ids.size()) {
    // There is id duplication that violates the spec.
    outputs.clear();
  }
  return new MIDIOutputMap(outputs);
}
示例#25
0
void SearchFile::insertHashes( const HashSet& vHashes )
{
	Q_ASSERT( !vHashes.empty() );

	// TODO: hash collision detection
	const Hash* const * const pHashes = &vHashes[0];
	for ( quint8 i = 0, nSize = vHashes.size(); i < nSize; ++i )
	{
		if ( pHashes[i] && !manages( *pHashes[i] ) )
		{
			// Note: We cannot use the insert(CHash*) here as that takes control of the hash
			// (e.g. deletes it later)
			m_vHashes.insert( *pHashes[i] );
			( ( TreeRoot* )m_pParentItem )->registerHash( *pHashes[i], this );
		}
	}
}
示例#26
0
MIDIInputMap* MIDIAccess::inputs() const
{
    HeapVector<Member<MIDIInput>> inputs;
    HashSet<String> ids;
    for (size_t i = 0; i < m_inputs.size(); ++i) {
        MIDIInput* input = m_inputs[i];
        if (input->getState() != PortState::MIDIPortStateDisconnected) {
            inputs.append(input);
            ids.add(input->id());
        }
    }
    if (inputs.size() != ids.size()) {
        // There is id duplication that violates the spec.
        inputs.clear();
    }
    return new MIDIInputMap(inputs);
}
v8::Handle<v8::Value> V8Clipboard::typesAccessorGetter(v8::Local<v8::String> name, const v8::AccessorInfo& info)
{
    INC_STATS("DOM.Clipboard.types()");
    Clipboard* clipboard = V8Clipboard::toNative(info.Holder());

    HashSet<String> types = clipboard->types();
    if (types.isEmpty())
        return v8::Null();

    v8::Local<v8::Array> result = v8::Array::New(types.size());
    HashSet<String>::const_iterator end = types.end();
    int index = 0;
    for (HashSet<String>::const_iterator it = types.begin(); it != end; ++it, ++index)
        result->Set(v8::Integer::New(index), v8String(*it));

    return result;
}
bool GLPlatformContext::supportsEGLExtension(EGLDisplay display, const String& name)
{
    static HashSet<String> supportedExtensions;

    if (!supportedExtensions.size()) {
        if (display == EGL_NO_DISPLAY)
            return false;

        String rawExtensions = reinterpret_cast<const char*>(eglQueryString(display, EGL_EXTENSIONS));
        supportedExtensions = parseExtensions(rawExtensions);
    }

    if (supportedExtensions.contains(name))
        return true;

    return false;
}
bool GLPlatformContext::supportsGLXExtension(Display* display, const String& name)
{
    static HashSet<String> supportedExtensions;

    if (!supportedExtensions.size()) {
        if (!display)
            return false;

        String rawExtensions = glXQueryExtensionsString(display, DefaultScreen(display));
        supportedExtensions = parseExtensions(rawExtensions);
    }

    if (supportedExtensions.contains(name))
        return true;

    return false;
}
TIMED_TEST(LinkedHashSetTests, hashcodeTest_LinkedHashSet, TEST_TIMEOUT_DEFAULT) {
    LinkedHashSet<int> hset;
    hset.add(69);
    hset.add(42);
    assertEqualsInt("hashcode of self LinkedHashSet", hashCode(hset), hashCode(hset));

    LinkedHashSet<int> hset2;
    hset2.add(42);
    hset2.add(69);
    assertEqualsInt("hashcode of LinkedHashSet other order", hashCode(hset), hashCode(hset2));

    LinkedHashSet<int> copy = hset;
    assertEqualsInt("hashcode of copy LinkedHashSet", hashCode(hset), hashCode(copy));

    LinkedHashSet<int> empty;
    HashSet<LinkedHashSet<int> > hashhashset {hset, copy, hset2, empty, empty};
    assertEqualsInt("hashset of LinkedHashset size", 2, hashhashset.size());
}