Пример #1
0
SharedContextRateLimiter::SharedContextRateLimiter(unsigned maxPendingTicks)
    : m_maxPendingTicks(maxPendingTicks), m_canUseSyncQueries(false) {
  m_contextProvider = wrapUnique(
      Platform::current()->createSharedOffscreenGraphicsContext3DProvider());
  if (!m_contextProvider)
    return;

  gpu::gles2::GLES2Interface* gl = m_contextProvider->contextGL();
  if (gl && gl->GetGraphicsResetStatusKHR() == GL_NO_ERROR) {
    std::unique_ptr<Extensions3DUtil> extensionsUtil =
        Extensions3DUtil::create(gl);
    // TODO(junov): when the GLES 3.0 command buffer is ready, we could use
    // fenceSync instead.
    m_canUseSyncQueries =
        extensionsUtil->supportsExtension("GL_CHROMIUM_sync_query");
  }
}
Пример #2
0
std::unique_ptr<WebMediaPlayer> FrameLoaderClientImpl::createWebMediaPlayer(
    HTMLMediaElement& htmlMediaElement,
    const WebMediaPlayerSource& source,
    WebMediaPlayerClient* client) {
  WebLocalFrameImpl* webFrame =
      WebLocalFrameImpl::fromFrame(htmlMediaElement.document().frame());

  if (!webFrame || !webFrame->client())
    return nullptr;

  HTMLMediaElementEncryptedMedia& encryptedMedia =
      HTMLMediaElementEncryptedMedia::from(htmlMediaElement);
  WebString sinkId(HTMLMediaElementAudioOutputDevice::sinkId(htmlMediaElement));
  return wrapUnique(webFrame->client()->createMediaPlayer(
      source, client, &encryptedMedia, encryptedMedia.contentDecryptionModule(),
      sinkId));
}
Пример #3
0
void SelectorFilter::pushParent(Element& parent) {
  ASSERT(parent.document().inStyleRecalc());
  ASSERT(parent.inActiveDocument());
  if (m_parentStack.isEmpty()) {
    ASSERT(parent == parent.document().documentElement());
    ASSERT(!m_ancestorIdentifierFilter);
    m_ancestorIdentifierFilter = wrapUnique(new IdentifierFilter);
    pushParentStackFrame(parent);
    return;
  }
  ASSERT(m_ancestorIdentifierFilter);
  // We may get invoked for some random elements in some wacky cases during
  // style resolve. Pause maintaining the stack in this case.
  if (m_parentStack.last().element != parent.parentOrShadowHostElement())
    return;
  pushParentStackFrame(parent);
}
Пример #4
0
static bool tryGetMessageFrom(
    WebMessagePortChannel& webChannel,
    RefPtr<SerializedScriptValue>& message,
    std::unique_ptr<MessagePortChannelArray>& channels) {
  WebString messageString;
  WebMessagePortChannelArray webChannels;
  if (!webChannel.tryGetMessage(&messageString, webChannels))
    return false;

  if (webChannels.size()) {
    channels = wrapUnique(new MessagePortChannelArray(webChannels.size()));
    for (size_t i = 0; i < webChannels.size(); ++i)
      (*channels)[i] = WebMessagePortChannelUniquePtr(webChannels[i]);
  }
  message = SerializedScriptValue::create(messageString);
  return true;
}
void HTMLTextFormControlElementTest::SetUp() {
  Page::PageClients pageClients;
  fillWithEmptyClients(pageClients);
  m_spellCheckerClient = wrapUnique(new DummySpellCheckerClient);
  pageClients.spellCheckerClient = m_spellCheckerClient.get();
  m_dummyPageHolder = DummyPageHolder::create(IntSize(800, 600), &pageClients);

  m_document = &m_dummyPageHolder->document();
  m_document->documentElement()->setInnerHTML(
      "<body><textarea id=textarea></textarea><input id=input /></body>",
      ASSERT_NO_EXCEPTION);
  m_document->view()->updateAllLifecyclePhases();
  m_textControl =
      toHTMLTextFormControlElement(m_document->getElementById("textarea"));
  m_textControl->focus();
  m_input = toHTMLInputElement(m_document->getElementById("input"));
}
Пример #6
0
std::unique_ptr<PatternData> LayoutSVGResourcePattern::buildPatternData(
    const LayoutObject& object) {
  // If we couldn't determine the pattern content element root, stop here.
  const PatternAttributes& attributes = this->attributes();
  if (!attributes.patternContentElement())
    return nullptr;

  // An empty viewBox disables layout.
  if (attributes.hasViewBox() && attributes.viewBox().isEmpty())
    return nullptr;

  ASSERT(element());
  // Compute tile metrics.
  FloatRect clientBoundingBox = object.objectBoundingBox();
  FloatRect tileBounds = SVGLengthContext::resolveRectangle(
      element(), attributes.patternUnits(), clientBoundingBox, *attributes.x(),
      *attributes.y(), *attributes.width(), *attributes.height());
  if (tileBounds.isEmpty())
    return nullptr;

  AffineTransform tileTransform;
  if (attributes.hasViewBox()) {
    if (attributes.viewBox().isEmpty())
      return nullptr;
    tileTransform = SVGFitToViewBox::viewBoxToViewTransform(
        attributes.viewBox(), attributes.preserveAspectRatio(),
        tileBounds.width(), tileBounds.height());
  } else {
    // A viewbox overrides patternContentUnits, per spec.
    if (attributes.patternContentUnits() ==
        SVGUnitTypes::kSvgUnitTypeObjectboundingbox)
      tileTransform.scale(clientBoundingBox.width(),
                          clientBoundingBox.height());
  }

  std::unique_ptr<PatternData> patternData = wrapUnique(new PatternData);
  patternData->pattern =
      Pattern::createPicturePattern(asPicture(tileBounds, tileTransform));

  // Compute pattern space transformation.
  patternData->transform.translate(tileBounds.x(), tileBounds.y());
  patternData->transform.preMultiply(attributes.patternTransform());

  return patternData;
}
Пример #7
0
MediaRecorder::MediaRecorder(ExecutionContext* context,
                             MediaStream* stream,
                             const MediaRecorderOptions& options,
                             ExceptionState& exceptionState)
    : ActiveScriptWrappable(this),
      ActiveDOMObject(context),
      m_stream(stream),
      m_streamAmountOfTracks(stream->getTracks().size()),
      m_mimeType(options.hasMimeType() ? options.mimeType() : kDefaultMimeType),
      m_stopped(true),
      m_ignoreMutedMedia(true),
      m_audioBitsPerSecond(0),
      m_videoBitsPerSecond(0),
      m_state(State::Inactive),
      m_dispatchScheduledEventRunner(AsyncMethodRunner<MediaRecorder>::create(
          this,
          &MediaRecorder::dispatchScheduledEvent)) {
  DCHECK(m_stream->getTracks().size());

  m_recorderHandler =
      wrapUnique(Platform::current()->createMediaRecorderHandler());
  DCHECK(m_recorderHandler);

  if (!m_recorderHandler) {
    exceptionState.throwDOMException(
        NotSupportedError, "No MediaRecorder handler can be created.");
    return;
  }

  AllocateVideoAndAudioBitrates(exceptionState, context, options, stream,
                                &m_audioBitsPerSecond, &m_videoBitsPerSecond);

  const ContentType contentType(m_mimeType);
  if (!m_recorderHandler->initialize(
          this, stream->descriptor(), contentType.type(),
          contentType.parameter("codecs"), m_audioBitsPerSecond,
          m_videoBitsPerSecond)) {
    exceptionState.throwDOMException(
        NotSupportedError,
        "Failed to initialize native MediaRecorder the type provided (" +
            m_mimeType + ") is not supported.");
    return;
  }
  m_stopped = false;
}
Пример #8
0
void PresentationReceiver::onReceiverConnectionAvailable(
    WebPresentationConnectionClient* connectionClient) {
  DCHECK(connectionClient);
  // take() will call PresentationReceiver::registerConnection()
  // and register the connection.
  auto connection =
      PresentationConnection::take(this, wrapUnique(connectionClient));

  // receiver.connectionList property not accessed
  if (!m_connectionListProperty)
    return;

  if (m_connectionListProperty->getState() ==
      ScriptPromisePropertyBase::Pending)
    m_connectionListProperty->resolve(m_connectionList);
  else if (m_connectionListProperty->getState() ==
           ScriptPromisePropertyBase::Resolved)
    m_connectionList->dispatchConnectionAvailableEvent(connection);
}
void PaintPropertyTreeBuilder::updateLocalBorderBoxContext(
    const LayoutObject& object,
    PaintPropertyTreeBuilderContext& context) {
  // Avoid adding an ObjectPaintProperties for non-boxes to save memory, since
  // we don't need them at the moment.
  if (!object.isBox() && !object.hasLayer())
    return;

  std::unique_ptr<ObjectPaintProperties::PropertyTreeStateWithOffset>
      borderBoxContext =
          wrapUnique(new ObjectPaintProperties::PropertyTreeStateWithOffset(
              context.current.paintOffset,
              PropertyTreeState(context.current.transform, context.current.clip,
                                context.currentEffect,
                                context.current.scroll)));
  object.getMutableForPainting()
      .ensurePaintProperties()
      .setLocalBorderBoxProperties(std::move(borderBoxContext));
}
Пример #10
0
TEST(ImageDecoderTest, clearCacheExceptFramePreverveClearExceptFrame)
{
    const size_t numFrames = 10;
    std::unique_ptr<TestImageDecoder> decoder(wrapUnique(new TestImageDecoder()));
    decoder->initFrames(numFrames);
    Vector<ImageFrame, 1>& frameBuffers = decoder->frameBufferCache();
    for (size_t i = 0; i < numFrames; ++i)
        frameBuffers[i].setStatus(ImageFrame::FrameComplete);

    decoder->resetRequiredPreviousFrames();
    decoder->clearCacheExceptFrame(5);
    for (size_t i = 0; i < numFrames; ++i) {
        SCOPED_TRACE(testing::Message() << i);
        if (i == 5)
            EXPECT_EQ(ImageFrame::FrameComplete, frameBuffers[i].getStatus());
        else
            EXPECT_EQ(ImageFrame::FrameEmpty, frameBuffers[i].getStatus());
    }
}
Пример #11
0
void WebSharedWorkerImpl::didFinishDocumentLoad(WebLocalFrame* frame) {
  DCHECK(!m_loadingDocument);
  DCHECK(!m_mainScriptLoader);
  m_networkProvider = wrapUnique(
      m_client->createServiceWorkerNetworkProvider(frame->dataSource()));
  m_mainScriptLoader = WorkerScriptLoader::create();
  m_mainScriptLoader->setRequestContext(
      WebURLRequest::RequestContextSharedWorker);
  m_loadingDocument = toWebLocalFrameImpl(frame)->frame()->document();
  m_mainScriptLoader->loadAsynchronously(
      *m_loadingDocument.get(), m_url, DenyCrossOriginRequests,
      m_creationAddressSpace,
      bind(&WebSharedWorkerImpl::didReceiveScriptLoaderResponse,
           WTF::unretained(this)),
      bind(&WebSharedWorkerImpl::onScriptLoaderFinished,
           WTF::unretained(this)));
  // Do nothing here since onScriptLoaderFinished() might have been already
  // invoked and |this| might have been deleted at this point.
}
Пример #12
0
inline SearchBuffer::SearchBuffer(const String& target, FindOptions options)
    : m_options(options),
      m_prefixLength(0),
      m_numberOfCharactersJustAppended(0),
      m_atBreak(true),
      m_needsMoreContext(options & AtWordStarts),
      m_targetRequiresKanaWorkaround(containsKanaLetters(target)) {
  DCHECK(!target.isEmpty()) << target;
  target.appendTo(m_target);

  // FIXME: We'd like to tailor the searcher to fold quote marks for us instead
  // of doing it in a separate replacement pass here, but ICU doesn't offer a
  // way to add tailoring on top of the locale-specific tailoring as of this
  // writing.
  foldQuoteMarksAndSoftHyphens(m_target.data(), m_target.size());

  size_t targetLength = m_target.size();
  m_buffer.reserveInitialCapacity(
      std::max(targetLength * 8, kMinimumSearchBufferSize));
  m_overlap = m_buffer.capacity() / 4;

  if ((m_options & AtWordStarts) && targetLength) {
    const UChar32 targetFirstCharacter =
        getCodePointAt(m_target.data(), 0, targetLength);
    // Characters in the separator category never really occur at the beginning
    // of a word, so if the target begins with such a character, we just ignore
    // the AtWordStart option.
    if (isSeparator(targetFirstCharacter)) {
      m_options &= ~AtWordStarts;
      m_needsMoreContext = false;
    }
  }

  m_textSearcher = wrapUnique(new TextSearcherICU());
  m_textSearcher->setPattern(StringView(m_target.data(), m_target.size()),
                             !(m_options & CaseInsensitive));

  // The kana workaround requires a normalized copy of the target string.
  if (m_targetRequiresKanaWorkaround)
    normalizeCharactersIntoNFCForm(m_target.data(), m_target.size(),
                                   m_normalizedTarget);
}
bool SerializedScriptValueReaderForModules::readRTCCertificate(
    v8::Local<v8::Value>* value) {
  String pemPrivateKey;
  if (!readWebCoreString(&pemPrivateKey))
    return false;
  String pemCertificate;
  if (!readWebCoreString(&pemCertificate))
    return false;

  std::unique_ptr<WebRTCCertificateGenerator> certificateGenerator =
      wrapUnique(Platform::current()->createRTCCertificateGenerator());

  std::unique_ptr<WebRTCCertificate> certificate(
      certificateGenerator->fromPEM(pemPrivateKey, pemCertificate));
  RTCCertificate* jsCertificate = new RTCCertificate(std::move(certificate));

  *value =
      toV8(jsCertificate, getScriptState()->context()->Global(), isolate());
  return !value->IsEmpty();
}
Пример #14
0
RTCDTMFSender* RTCDTMFSender::create(
    ExecutionContext* context,
    WebRTCPeerConnectionHandler* peerConnectionHandler,
    MediaStreamTrack* track,
    ExceptionState& exceptionState) {
  std::unique_ptr<WebRTCDTMFSenderHandler> handler =
      wrapUnique(peerConnectionHandler->createDTMFSender(track->component()));
  if (!handler) {
    exceptionState.throwDOMException(NotSupportedError,
                                     "The MediaStreamTrack provided is not an "
                                     "element of a MediaStream that's "
                                     "currently in the local streams set.");
    return nullptr;
  }

  RTCDTMFSender* dtmfSender =
      new RTCDTMFSender(context, track, std::move(handler));
  dtmfSender->suspendIfNeeded();
  return dtmfSender;
}
Пример #15
0
void PaintLayerStackingNode::collectLayers(
    std::unique_ptr<Vector<PaintLayerStackingNode*>>& posBuffer,
    std::unique_ptr<Vector<PaintLayerStackingNode*>>& negBuffer) {
  if (layer()->isInTopLayer())
    return;

  if (isStacked()) {
    std::unique_ptr<Vector<PaintLayerStackingNode*>>& buffer =
        (zIndex() >= 0) ? posBuffer : negBuffer;
    if (!buffer)
      buffer = wrapUnique(new Vector<PaintLayerStackingNode*>);
    buffer->append(this);
  }

  if (!isStackingContext()) {
    for (PaintLayer* child = layer()->firstChild(); child;
         child = child->nextSibling())
      child->stackingNode()->collectLayers(posBuffer, negBuffer);
  }
}
Пример #16
0
TEST(ImageLayerChromiumTest, opaqueImages) {
  FakeGraphicsLayerClient client;
  std::unique_ptr<FakeGraphicsLayer> graphicsLayer =
      wrapUnique(new FakeGraphicsLayer(&client));
  ASSERT_TRUE(graphicsLayer.get());

  bool opaque = true;
  RefPtr<Image> opaqueImage = TestImage::create(IntSize(100, 100), opaque);
  ASSERT_TRUE(opaqueImage.get());
  RefPtr<Image> nonOpaqueImage = TestImage::create(IntSize(100, 100), !opaque);
  ASSERT_TRUE(nonOpaqueImage.get());

  ASSERT_FALSE(graphicsLayer->contentsLayer());

  graphicsLayer->setContentsToImage(opaqueImage.get());
  ASSERT_TRUE(graphicsLayer->contentsLayer()->opaque());

  graphicsLayer->setContentsToImage(nonOpaqueImage.get());
  ASSERT_FALSE(graphicsLayer->contentsLayer()->opaque());
}
Пример #17
0
V8PerContextData::V8PerContextData(v8::Local<v8::Context> context)
    : m_isolate(context->GetIsolate())
    , m_wrapperBoilerplates(m_isolate)
    , m_constructorMap(m_isolate)
    , m_contextHolder(wrapUnique(new gin::ContextHolder(m_isolate)))
    , m_context(m_isolate, context)
    , m_activityLogger(0)
    , m_compiledPrivateScript(m_isolate)
{
    m_contextHolder->SetContext(context);

    v8::Context::Scope contextScope(context);
    ASSERT(m_errorPrototype.isEmpty());
    v8::Local<v8::Value> objectValue = context->Global()->Get(context, v8AtomicString(m_isolate, "Error")).ToLocalChecked();
    v8::Local<v8::Value> prototypeValue = objectValue.As<v8::Object>()->Get(context, v8AtomicString(m_isolate, "prototype")).ToLocalChecked();
    m_errorPrototype.set(m_isolate, prototypeValue);

    if (isMainThread())
        InstanceCounters::incrementCounter(InstanceCounters::V8PerContextDataCounter);
}
Пример #18
0
// Tests that filling a glyph buffer for a specific range returns the same
// results when shaping word by word as when shaping the full run in one go.
TEST_F(CachingWordShaperTest, CommonAccentLeftToRightFillGlyphBuffer) {
    // "/. ." with an accent mark over the first dot.
    const UChar str[] = {0x2F, 0x301, 0x2E, 0x20, 0x2E, 0x0};
    TextRun textRun(str, 5);

    CachingWordShaper shaper(cache.get());
    GlyphBuffer glyphBuffer;
    shaper.fillGlyphBuffer(&font, textRun, fallbackFonts, &glyphBuffer, 0, 3);

    std::unique_ptr<ShapeCache> referenceCache = wrapUnique(new ShapeCache());
    CachingWordShaper referenceShaper(referenceCache.get());
    GlyphBuffer referenceGlyphBuffer;
    font.setCanShapeWordByWordForTesting(false);
    referenceShaper.fillGlyphBuffer(&font, textRun, fallbackFonts,
                                    &referenceGlyphBuffer, 0, 3);

    ASSERT_EQ(referenceGlyphBuffer.glyphAt(0), glyphBuffer.glyphAt(0));
    ASSERT_EQ(referenceGlyphBuffer.glyphAt(1), glyphBuffer.glyphAt(1));
    ASSERT_EQ(referenceGlyphBuffer.glyphAt(2), glyphBuffer.glyphAt(2));
}
Пример #19
0
bool RealtimeAnalyser::setFftSize(size_t size) {
  DCHECK(isMainThread());

  // Only allow powers of two.
  unsigned log2size = static_cast<unsigned>(log2(size));
  bool isPOT(1UL << log2size == size);

  if (!isPOT || size > MaxFFTSize || size < MinFFTSize)
    return false;

  if (m_fftSize != size) {
    m_analysisFrame = wrapUnique(new FFTFrame(size));
    // m_magnitudeBuffer has size = fftSize / 2 because it contains floats
    // reduced from complex values in m_analysisFrame.
    m_magnitudeBuffer.allocate(size / 2);
    m_fftSize = size;
  }

  return true;
}
Пример #20
0
void V8Inspector::connectFrontend(protocol::FrontendChannel* channel)
{
    DCHECK(!m_frontend);
    m_frontend = wrapUnique(new protocol::Frontend(channel));
    m_dispatcher = protocol::Dispatcher::create(channel);

    m_dispatcher->registerAgent((protocol::Backend::Debugger*)m_session->debuggerAgent());
    m_dispatcher->registerAgent(m_session->heapProfilerAgent());
    m_dispatcher->registerAgent(m_session->profilerAgent());
    m_dispatcher->registerAgent(m_session->runtimeAgent());

    m_session->debuggerAgent()->setFrontend(
        protocol::Frontend::Debugger::from(m_frontend.get()));
    m_session->heapProfilerAgent()->setFrontend(
        protocol::Frontend::HeapProfiler::from(m_frontend.get()));
    m_session->profilerAgent()->setFrontend(
        protocol::Frontend::Profiler::from(m_frontend.get()));
    m_session->runtimeAgent()->setFrontend(
        protocol::Frontend::Runtime::from(m_frontend.get()));
}
Пример #21
0
std::unique_ptr<FFTFrame> FFTFrame::createInterpolatedFrame(
    const FFTFrame& frame1,
    const FFTFrame& frame2,
    double x) {
  std::unique_ptr<FFTFrame> newFrame =
      wrapUnique(new FFTFrame(frame1.fftSize()));

  newFrame->interpolateFrequencyComponents(frame1, frame2, x);

  // In the time-domain, the 2nd half of the response must be zero, to avoid
  // circular convolution aliasing...
  int fftSize = newFrame->fftSize();
  AudioFloatArray buffer(fftSize);
  newFrame->doInverseFFT(buffer.data());
  buffer.zeroRange(fftSize / 2, fftSize);

  // Put back into frequency domain.
  newFrame->doFFT(buffer.data());

  return newFrame;
}
Пример #22
0
TEST(ImageLayerChromiumTest, imageLayerContentReset) {
  FakeGraphicsLayerClient client;
  std::unique_ptr<FakeGraphicsLayer> graphicsLayer =
      wrapUnique(new FakeGraphicsLayer(&client));
  ASSERT_TRUE(graphicsLayer.get());

  ASSERT_FALSE(graphicsLayer->hasContentsLayer());
  ASSERT_FALSE(graphicsLayer->contentsLayer());

  bool opaque = false;
  RefPtr<Image> image = TestImage::create(IntSize(100, 100), opaque);
  ASSERT_TRUE(image.get());

  graphicsLayer->setContentsToImage(image.get());
  ASSERT_TRUE(graphicsLayer->hasContentsLayer());
  ASSERT_TRUE(graphicsLayer->contentsLayer());

  graphicsLayer->setContentsToImage(0);
  ASSERT_FALSE(graphicsLayer->hasContentsLayer());
  ASSERT_FALSE(graphicsLayer->contentsLayer());
}
Пример #23
0
void V8HeapProfilerAgentImpl::takeHeapSnapshot(ErrorString* errorString, const protocol::Maybe<bool>& reportProgress)
{
    v8::HeapProfiler* profiler = m_isolate->GetHeapProfiler();
    if (!profiler) {
        *errorString = "Cannot access v8 heap profiler";
        return;
    }
    std::unique_ptr<HeapSnapshotProgress> progress;
    if (reportProgress.fromMaybe(false))
        progress = wrapUnique(new HeapSnapshotProgress(&m_frontend));

    GlobalObjectNameResolver resolver(m_session);
    const v8::HeapSnapshot* snapshot = profiler->TakeHeapSnapshot(progress.get(), &resolver);
    if (!snapshot) {
        *errorString = "Failed to take heap snapshot";
        return;
    }
    HeapSnapshotOutputStream stream(&m_frontend);
    snapshot->Serialize(&stream);
    const_cast<v8::HeapSnapshot*>(snapshot)->Delete();
}
Пример #24
0
void DataConsumerHandleTestUtil::HandleReader::didGetReadable() {
  WebDataConsumerHandle::Result r = WebDataConsumerHandle::UnexpectedError;
  char buffer[3];
  while (true) {
    size_t size;
    r = m_reader->read(buffer, sizeof(buffer), WebDataConsumerHandle::FlagNone,
                       &size);
    if (r == WebDataConsumerHandle::ShouldWait)
      return;
    if (r != WebDataConsumerHandle::Ok)
      break;
    m_data.append(buffer, size);
  }
  std::unique_ptr<HandleReadResult> result =
      wrapUnique(new HandleReadResult(r, m_data));
  m_data.clear();
  Platform::current()->currentThread()->getWebTaskRunner()->postTask(
      BLINK_FROM_HERE,
      WTF::bind(&HandleReader::runOnFinishedReading, WTF::unretained(this),
                passed(std::move(result))));
  m_reader = nullptr;
}
Пример #25
0
void DatabaseTracker::addOpenDatabase(Database* database)
{
    MutexLocker openDatabaseMapLock(m_openDatabaseMapGuard);
    if (!m_openDatabaseMap)
        m_openDatabaseMap = wrapUnique(new DatabaseOriginMap);

    String originString = database->getSecurityOrigin()->toRawString();
    DatabaseNameMap* nameMap = m_openDatabaseMap->get(originString);
    if (!nameMap) {
        nameMap = new DatabaseNameMap();
        m_openDatabaseMap->set(originString, nameMap);
    }

    String name(database->stringIdentifier());
    DatabaseSet* databaseSet = nameMap->get(name);
    if (!databaseSet) {
        databaseSet = new DatabaseSet();
        nameMap->set(name, databaseSet);
    }

    databaseSet->add(database);
}
Пример #26
0
std::unique_ptr<MessagePortChannelArray> MessagePort::disentanglePorts(
    ExecutionContext* context,
    const MessagePortArray& ports,
    ExceptionState& exceptionState) {
  if (!ports.size())
    return nullptr;

  HeapHashSet<Member<MessagePort>> visited;

  // Walk the incoming array - if there are any duplicate ports, or null ports
  // or cloned ports, throw an error (per section 8.3.3 of the HTML5 spec).
  for (unsigned i = 0; i < ports.size(); ++i) {
    MessagePort* port = ports[i];
    if (!port || port->isNeutered() || visited.contains(port)) {
      String type;
      if (!port)
        type = "null";
      else if (port->isNeutered())
        type = "already neutered";
      else
        type = "a duplicate";
      exceptionState.throwDOMException(
          DataCloneError,
          "Port at index " + String::number(i) + " is " + type + ".");
      return nullptr;
    }
    visited.add(port);
  }

  UseCounter::count(context, UseCounter::MessagePortsTransferred);

  // Passed-in ports passed validity checks, so we can disentangle them.
  std::unique_ptr<MessagePortChannelArray> portArray =
      wrapUnique(new MessagePortChannelArray(ports.size()));
  for (unsigned i = 0; i < ports.size(); ++i)
    (*portArray)[i] = ports[i]->disentangle();
  return portArray;
}
Пример #27
0
TEST(ImageDecoderTest, requiredPreviousFrameIndexKnownOpaque)
{
    std::unique_ptr<TestImageDecoder> decoder(wrapUnique(new TestImageDecoder()));
    decoder->initFrames(3);
    Vector<ImageFrame, 1>& frameBuffers = decoder->frameBufferCache();

    frameBuffers[1].setOriginalFrameRect(IntRect(25, 25, 50, 50));

    // A full frame that is known to be opaque doesn't depend on any prior frames.
    for (int disposeMethod = ImageFrame::DisposeNotSpecified; disposeMethod <= ImageFrame::DisposeOverwritePrevious; ++disposeMethod) {
        frameBuffers[1].setDisposalMethod(static_cast<ImageFrame::DisposalMethod>(disposeMethod));
        decoder->resetRequiredPreviousFrames(true);
        EXPECT_EQ(kNotFound, frameBuffers[2].requiredPreviousFrameIndex());
    }

    // A non-full frame that is known to be opaque does depend on a prior frame.
    frameBuffers[2].setOriginalFrameRect(IntRect(50, 50, 50, 50));
    for (int disposeMethod = ImageFrame::DisposeNotSpecified; disposeMethod <= ImageFrame::DisposeOverwritePrevious; ++disposeMethod) {
        frameBuffers[1].setDisposalMethod(static_cast<ImageFrame::DisposalMethod>(disposeMethod));
        decoder->resetRequiredPreviousFrames(true);
        EXPECT_NE(kNotFound, frameBuffers[2].requiredPreviousFrameIndex());
    }
}
InProcessWorkerMessagingProxy*
DedicatedWorkerMessagingProxyProviderImpl::createWorkerMessagingProxy(
    Worker* worker) {
  if (worker->getExecutionContext()->isDocument()) {
    Document* document = toDocument(worker->getExecutionContext());
    WebLocalFrameImpl* webFrame =
        WebLocalFrameImpl::fromFrame(document->frame());
    WorkerClients* workerClients = WorkerClients::create();
    provideIndexedDBClientToWorker(workerClients,
                                   IndexedDBClientImpl::create());
    provideLocalFileSystemToWorker(workerClients,
                                   LocalFileSystemClient::create());
    provideContentSettingsClientToWorker(
        workerClients,
        wrapUnique(
            webFrame->client()->createWorkerContentSettingsClientProxy()));
    // FIXME: call provideServiceWorkerContainerClientToWorker here when we
    // support ServiceWorker in dedicated workers (http://crbug.com/371690)
    return new DedicatedWorkerMessagingProxy(worker, workerClients);
  }
  NOTREACHED();
  return 0;
}
Пример #29
0
void V8HeapProfilerAgentImpl::addInspectedHeapObject(ErrorString* errorString, const String16& inspectedHeapObjectId)
{
    bool ok;
    int id = inspectedHeapObjectId.toInt(&ok);
    if (!ok) {
        *errorString = "Invalid heap snapshot object id";
        return;
    }

    v8::HandleScope handles(m_isolate);
    v8::Local<v8::Object> heapObject = objectByHeapObjectId(m_isolate, id);
    if (heapObject.IsEmpty()) {
        *errorString = "Object is not available";
        return;
    }

    if (!m_session->inspector()->client()->isInspectableHeapObject(heapObject)) {
        *errorString = "Object is not available";
        return;
    }

    m_session->addInspectedObject(wrapUnique(new InspectableHeapObject(id)));
}
Пример #30
0
std::unique_ptr<JPEGImageEncoderState> JPEGImageEncoderState::create(const IntSize& imageSize, const double& quality, Vector<unsigned char>* output)
{
    if (imageSize.width() <= 0 || imageSize.height() <= 0)
        return nullptr;

    std::unique_ptr<JPEGImageEncoderStateImpl> encoderState = wrapUnique(new JPEGImageEncoderStateImpl());

    jpeg_compress_struct* cinfo = encoderState->cinfo();
    jpeg_error_mgr* error = encoderState->error();
    cinfo->err = jpeg_std_error(error);
    error->error_exit = handleError;

    SET_JUMP_BUFFER(cinfo, nullptr);

    JPEGOutputBuffer* destination = encoderState->outputBuffer();
    destination->output = output;

    jpeg_create_compress(cinfo);
    cinfo->dest = destination;
    cinfo->dest->init_destination = prepareOutput;
    cinfo->dest->empty_output_buffer = writeOutput;
    cinfo->dest->term_destination = finishOutput;

    cinfo->image_height = imageSize.height();
    cinfo->image_width = imageSize.width();
    cinfo->in_color_space = JCS_RGB;
    cinfo->input_components = 3;

    jpeg_set_defaults(cinfo);
    int compressionQuality = JPEGImageEncoder::computeCompressionQuality(quality);
    jpeg_set_quality(cinfo, compressionQuality, TRUE);
    disableSubsamplingForHighQuality(cinfo, compressionQuality);
    jpeg_start_compress(cinfo, TRUE);

    cinfo->client_data = 0;
    return std::move(encoderState);
}