Пример #1
0
IntSize RenderThemeGtk::sliderTickSize() const
{
    // FIXME: We need to set this to the size of one tick mark.
    return IntSize(0, 0);
}
 explicit PrintContextTest(PassOwnPtrWillBeRawPtr<FrameLoaderClient> frameLoaderClient = nullptr)
     : m_pageHolder(DummyPageHolder::create(IntSize(kPageWidth, kPageHeight), nullptr, frameLoaderClient))
     , m_printContext(adoptPtrWillBeNoop(new MockPrintContext(document().frame()))) { }
void
MediaEngineTabVideoSource::Draw() {
  if (!mWindow) {
    return;
  }

  if (mScrollWithPage || mViewportWidth == INT32_MAX) {
    mWindow->GetInnerWidth(&mViewportWidth);
  }
  if (mScrollWithPage || mViewportHeight == INT32_MAX) {
    mWindow->GetInnerHeight(&mViewportHeight);
  }
  if (!mViewportWidth || !mViewportHeight) {
    return;
  }

  IntSize size;
  {
    float pixelRatio;
    mWindow->GetDevicePixelRatio(&pixelRatio);
    const int32_t deviceWidth = (int32_t)(pixelRatio * mViewportWidth);
    const int32_t deviceHeight = (int32_t)(pixelRatio * mViewportHeight);

    if ((deviceWidth <= mBufWidthMax) && (deviceHeight <= mBufHeightMax)) {
      size = IntSize(deviceWidth, deviceHeight);
    } else {
      const float scaleWidth = (float)mBufWidthMax / (float)deviceWidth;
      const float scaleHeight = (float)mBufHeightMax / (float)deviceHeight;
      const float scale = scaleWidth < scaleHeight ? scaleWidth : scaleHeight;

      size = IntSize((int)(scale * deviceWidth), (int)(scale * deviceHeight));
    }
  }

  gfxImageFormat format = SurfaceFormat::X8R8G8B8_UINT32;
  uint32_t stride = gfxASurface::FormatStrideForWidth(format, size.width);

  if (mDataSize < static_cast<size_t>(stride * size.height)) {
    mDataSize = stride * size.height;
    mData = static_cast<unsigned char*>(malloc(mDataSize));
  }
  if (!mData) {
    return;
  }

  nsCOMPtr<nsIPresShell> presShell;
  {
    RefPtr<nsPresContext> presContext;
    nsIDocShell* docshell = mWindow->GetDocShell();
    if (docshell) {
      docshell->GetPresContext(getter_AddRefs(presContext));
    }
    if (!presContext) {
      return;
    }
    presShell = presContext->PresShell();
  }

  nscolor bgColor = NS_RGB(255, 255, 255);
  uint32_t renderDocFlags = mScrollWithPage? 0 :
      (nsIPresShell::RENDER_IGNORE_VIEWPORT_SCROLLING |
       nsIPresShell::RENDER_DOCUMENT_RELATIVE);
  nsRect r(nsPresContext::CSSPixelsToAppUnits((float)mViewportOffsetX),
           nsPresContext::CSSPixelsToAppUnits((float)mViewportOffsetY),
           nsPresContext::CSSPixelsToAppUnits((float)mViewportWidth),
           nsPresContext::CSSPixelsToAppUnits((float)mViewportHeight));

  RefPtr<layers::ImageContainer> container = layers::LayerManager::CreateImageContainer();
  RefPtr<DrawTarget> dt =
    Factory::CreateDrawTargetForData(BackendType::CAIRO,
                                     mData.rwget(),
                                     size,
                                     stride,
                                     SurfaceFormat::B8G8R8X8);
  if (!dt) {
    return;
  }
  RefPtr<gfxContext> context = new gfxContext(dt);
  context->SetMatrix(context->CurrentMatrix().Scale((((float) size.width)/mViewportWidth),
                                                    (((float) size.height)/mViewportHeight)));

  NS_ENSURE_SUCCESS_VOID(presShell->RenderDocument(r, renderDocFlags, bgColor, context));

  RefPtr<SourceSurface> surface = dt->Snapshot();
  if (!surface) {
    return;
  }

  RefPtr<layers::SourceSurfaceImage> image = new layers::SourceSurfaceImage(size, surface);

  MonitorAutoLock mon(mMonitor);
  mImage = image;
}
 WGC3Duint createGpuMemoryBufferImageCHROMIUM(WGC3Dsizei width, WGC3Dsizei height, WGC3Denum internalformat, WGC3Denum usage) override
 {
     m_imageSizes.set(m_currentImageId, IntSize(width, height));
     return m_currentImageId++;
 }
TEST_F(DrawingBufferImageChromiumTest, verifyResizingReallocatesImages)
{
    WebExternalTextureMailbox mailbox;

    IntSize initialSize(initialWidth, initialHeight);
    IntSize alternateSize(initialWidth, alternateHeight);

    WGC3Duint m_imageId1 = webContext()->nextImageIdToBeCreated();
    EXPECT_CALL(*webContext(), bindTexImage2DMock(m_imageId1)).Times(1);
    // Produce one mailbox at size 100x100.
    m_drawingBuffer->markContentsChanged();
    EXPECT_TRUE(m_drawingBuffer->prepareMailbox(&mailbox, 0));
    EXPECT_EQ(initialSize, webContext()->mostRecentlyProducedSize());
    EXPECT_TRUE(mailbox.allowOverlay);
    testing::Mock::VerifyAndClearExpectations(webContext());

    WGC3Duint m_imageId2 = webContext()->nextImageIdToBeCreated();
    EXPECT_CALL(*webContext(), bindTexImage2DMock(m_imageId2)).Times(1);
    EXPECT_CALL(*webContext(), destroyImageMock(m_imageId0)).Times(1);
    EXPECT_CALL(*webContext(), releaseTexImage2DMock(m_imageId0)).Times(1);
    // Resize to 100x50.
    m_drawingBuffer->reset(IntSize(initialWidth, alternateHeight));
    m_drawingBuffer->mailboxReleased(mailbox, false);
    testing::Mock::VerifyAndClearExpectations(webContext());

    WGC3Duint m_imageId3 = webContext()->nextImageIdToBeCreated();
    EXPECT_CALL(*webContext(), bindTexImage2DMock(m_imageId3)).Times(1);
    EXPECT_CALL(*webContext(), destroyImageMock(m_imageId1)).Times(1);
    EXPECT_CALL(*webContext(), releaseTexImage2DMock(m_imageId1)).Times(1);
    // Produce a mailbox at this size.
    m_drawingBuffer->markContentsChanged();
    EXPECT_TRUE(m_drawingBuffer->prepareMailbox(&mailbox, 0));
    EXPECT_EQ(alternateSize, webContext()->mostRecentlyProducedSize());
    EXPECT_TRUE(mailbox.allowOverlay);
    testing::Mock::VerifyAndClearExpectations(webContext());

    WGC3Duint m_imageId4 = webContext()->nextImageIdToBeCreated();
    EXPECT_CALL(*webContext(), bindTexImage2DMock(m_imageId4)).Times(1);
    EXPECT_CALL(*webContext(), destroyImageMock(m_imageId2)).Times(1);
    EXPECT_CALL(*webContext(), releaseTexImage2DMock(m_imageId2)).Times(1);
    // Reset to initial size.
    m_drawingBuffer->reset(IntSize(initialWidth, initialHeight));
    m_drawingBuffer->mailboxReleased(mailbox, false);
    testing::Mock::VerifyAndClearExpectations(webContext());

    WGC3Duint m_imageId5 = webContext()->nextImageIdToBeCreated();
    EXPECT_CALL(*webContext(), bindTexImage2DMock(m_imageId5)).Times(1);
    EXPECT_CALL(*webContext(), destroyImageMock(m_imageId3)).Times(1);
    EXPECT_CALL(*webContext(), releaseTexImage2DMock(m_imageId3)).Times(1);
    // Prepare another mailbox and verify that it's the correct size.
    m_drawingBuffer->markContentsChanged();
    EXPECT_TRUE(m_drawingBuffer->prepareMailbox(&mailbox, 0));
    EXPECT_EQ(initialSize, webContext()->mostRecentlyProducedSize());
    EXPECT_TRUE(mailbox.allowOverlay);
    testing::Mock::VerifyAndClearExpectations(webContext());

    // Prepare one final mailbox and verify that it's the correct size.
    m_drawingBuffer->mailboxReleased(mailbox, false);
    m_drawingBuffer->markContentsChanged();
    EXPECT_TRUE(m_drawingBuffer->prepareMailbox(&mailbox, 0));
    EXPECT_EQ(initialSize, webContext()->mostRecentlyProducedSize());
    EXPECT_TRUE(mailbox.allowOverlay);
    m_drawingBuffer->mailboxReleased(mailbox, false);

    EXPECT_CALL(*webContext(), destroyImageMock(m_imageId5)).Times(1);
    EXPECT_CALL(*webContext(), releaseTexImage2DMock(m_imageId5)).Times(1);
    EXPECT_CALL(*webContext(), destroyImageMock(m_imageId4)).Times(1);
    EXPECT_CALL(*webContext(), releaseTexImage2DMock(m_imageId4)).Times(1);
    m_drawingBuffer->beginDestruction();
    testing::Mock::VerifyAndClearExpectations(webContext());
}
Пример #6
0
bool GraphicsContext3D::ImageExtractor::extractImage(bool premultiplyAlpha, bool ignoreGammaAndColorProfile)
{
    if (!m_image)
        return false;
    // We need this to stay in scope because the native image is just a shallow copy of the data.
    m_decoder = new ImageSource(premultiplyAlpha ? ImageSource::AlphaPremultiplied : ImageSource::AlphaNotPremultiplied, ignoreGammaAndColorProfile ? ImageSource::GammaAndColorProfileIgnored : ImageSource::GammaAndColorProfileApplied);
    if (!m_decoder)
        return false;
    ImageSource& decoder = *m_decoder;

    m_alphaOp = AlphaDoNothing;
    if (m_image->data()) {
        decoder.setData(m_image->data(), true);
        if (!decoder.frameCount() || !decoder.frameIsCompleteAtIndex(0))
            return false;
        m_imageSurface = decoder.createFrameAtIndex(0);
    } else {
        m_imageSurface = m_image->nativeImageForCurrentFrame();
        // 1. For texImage2D with HTMLVideoElment input, assume no PremultiplyAlpha had been applied and the alpha value is 0xFF for each pixel,
        // which is true at present and may be changed in the future and needs adjustment accordingly.
        // 2. For texImage2D with HTMLCanvasElement input in which Alpha is already Premultiplied in this port, 
        // do AlphaDoUnmultiply if UNPACK_PREMULTIPLY_ALPHA_WEBGL is set to false.
        if (!premultiplyAlpha && m_imageHtmlDomSource != HtmlDomVideo)
            m_alphaOp = AlphaDoUnmultiply;

        // if m_imageSurface is not an image, extract a copy of the surface
        if (m_imageSurface && cairo_surface_get_type(m_imageSurface.get()) != CAIRO_SURFACE_TYPE_IMAGE) {
            RefPtr<cairo_surface_t> tmpSurface = adoptRef(cairo_image_surface_create(CAIRO_FORMAT_ARGB32, m_imageWidth, m_imageHeight));
            copyRectFromOneSurfaceToAnother(m_imageSurface.get(), tmpSurface.get(), IntSize(), IntRect(0, 0, m_imageWidth, m_imageHeight), IntSize(), CAIRO_OPERATOR_SOURCE);
            m_imageSurface = tmpSurface.release();
        }
    }

    if (!m_imageSurface)
        return false;

    ASSERT(cairo_surface_get_type(m_imageSurface.get()) == CAIRO_SURFACE_TYPE_IMAGE);

    IntSize imageSize = cairoSurfaceSize(m_imageSurface.get());
    m_imageWidth = imageSize.width();
    m_imageHeight = imageSize.height();
    if (!m_imageWidth || !m_imageHeight)
        return false;

    if (cairo_image_surface_get_format(m_imageSurface.get()) != CAIRO_FORMAT_ARGB32)
        return false;

    unsigned int srcUnpackAlignment = 1;
    size_t bytesPerRow = cairo_image_surface_get_stride(m_imageSurface.get());
    size_t bitsPerPixel = 32;
    unsigned padding = bytesPerRow - bitsPerPixel / 8 * m_imageWidth;
    if (padding) {
        srcUnpackAlignment = padding + 1;
        while (bytesPerRow % srcUnpackAlignment)
            ++srcUnpackAlignment;
    }

    m_imagePixelData = cairo_image_surface_get_data(m_imageSurface.get());
    m_imageSourceFormat = DataFormatBGRA8;
    m_imageSourceUnpackAlignment = srcUnpackAlignment;
    return true;
}
Пример #7
0
WebCore::IntSize PageClientImpl::viewSize()
{
    auto* drawingArea = static_cast<DrawingAreaProxyImpl*>(webkitWebViewBaseGetPage(WEBKIT_WEB_VIEW_BASE(m_viewWidget))->drawingArea());
    return drawingArea ? drawingArea->size() : IntSize();
}
IntRect TileController::boundsAtLastRevalidateWithoutMargin() const
{
    IntRect boundsWithoutMargin = IntRect(IntPoint(), m_boundsAtLastRevalidate.size());
    boundsWithoutMargin.contract(IntSize(leftMarginWidth() + rightMarginWidth(), topMarginHeight() + bottomMarginHeight()));
    return boundsWithoutMargin;
}
void SurroundingTextTest::SetUp()
{
    m_dummyPageHolder = DummyPageHolder::create(IntSize(800, 600));
}
Пример #10
0
/* static */
nsresult
ImageEncoder::ExtractDataInternal(const nsAString& aType,
                                  const nsAString& aOptions,
                                  uint8_t* aImageBuffer,
                                  int32_t aFormat,
                                  const nsIntSize aSize,
                                  layers::Image* aImage,
                                  nsICanvasRenderingContextInternal* aContext,
                                  layers::AsyncCanvasRenderer* aRenderer,
                                  nsIInputStream** aStream,
                                  imgIEncoder* aEncoder)
{
  if (aSize.IsEmpty()) {
    return NS_ERROR_INVALID_ARG;
  }

  nsCOMPtr<nsIInputStream> imgStream;

  // get image bytes
  nsresult rv;
  if (aImageBuffer) {
    rv = ImageEncoder::GetInputStream(
      aSize.width,
      aSize.height,
      aImageBuffer,
      aFormat,
      aEncoder,
      nsPromiseFlatString(aOptions).get(),
      getter_AddRefs(imgStream));
  } else if (aContext) {
    NS_ConvertUTF16toUTF8 encoderType(aType);
    rv = aContext->GetInputStream(encoderType.get(),
                                  nsPromiseFlatString(aOptions).get(),
                                  getter_AddRefs(imgStream));
  } else if (aRenderer) {
    NS_ConvertUTF16toUTF8 encoderType(aType);
    rv = aRenderer->GetInputStream(encoderType.get(),
                                   nsPromiseFlatString(aOptions).get(),
                                   getter_AddRefs(imgStream));
  } else if (aImage) {
    // It is safe to convert PlanarYCbCr format from YUV to RGB off-main-thread.
    // Other image formats could have problem to convert format off-main-thread.
    // So here it uses a help function GetBRGADataSourceSurfaceSync() to convert
    // format on main thread.
    if (aImage->GetFormat() == ImageFormat::PLANAR_YCBCR) {
      nsTArray<uint8_t> data;
      layers::PlanarYCbCrImage* ycbcrImage = static_cast<layers::PlanarYCbCrImage*> (aImage);
      gfxImageFormat format = SurfaceFormat::A8R8G8B8_UINT32;
      uint32_t stride = GetAlignedStride<16>(aSize.width * 4);
      size_t length = BufferSizeFromStrideAndHeight(stride, aSize.height);
      data.SetCapacity(length);

      gfxUtils::ConvertYCbCrToRGB(*ycbcrImage->GetData(),
                                  format,
                                  aSize,
                                  data.Elements(),
                                  stride);

      rv = aEncoder->InitFromData(data.Elements(),
                                  aSize.width * aSize.height * 4,
                                  aSize.width,
                                  aSize.height,
                                  aSize.width * 4,
                                  imgIEncoder::INPUT_FORMAT_HOSTARGB,
                                  aOptions);
    } else {
      RefPtr<gfx::DataSourceSurface> dataSurface;
      RefPtr<layers::Image> image(aImage);
      dataSurface = GetBRGADataSourceSurfaceSync(image.forget());

      DataSourceSurface::MappedSurface map;
      if (!dataSurface->Map(gfx::DataSourceSurface::MapType::READ, &map)) {
        return NS_ERROR_INVALID_ARG;
      }
      rv = aEncoder->InitFromData(map.mData,
                                  aSize.width * aSize.height * 4,
                                  aSize.width,
                                  aSize.height,
                                  aSize.width * 4,
                                  imgIEncoder::INPUT_FORMAT_HOSTARGB,
                                  aOptions);
      dataSurface->Unmap();
    }

    if (NS_SUCCEEDED(rv)) {
      imgStream = do_QueryInterface(aEncoder);
    }
  } else {
    // no context, so we have to encode an empty image
    // note that if we didn't have a current context, the spec says we're
    // supposed to just return transparent black pixels of the canvas
    // dimensions.
    RefPtr<DataSourceSurface> emptyCanvas =
      Factory::CreateDataSourceSurfaceWithStride(IntSize(aSize.width, aSize.height),
                                                 SurfaceFormat::B8G8R8A8,
                                                 4 * aSize.width, true);
    if (NS_WARN_IF(!emptyCanvas)) {
      return NS_ERROR_INVALID_ARG;
    }

    DataSourceSurface::MappedSurface map;
    if (!emptyCanvas->Map(DataSourceSurface::MapType::WRITE, &map)) {
      return NS_ERROR_INVALID_ARG;
    }
    rv = aEncoder->InitFromData(map.mData,
                                aSize.width * aSize.height * 4,
                                aSize.width,
                                aSize.height,
                                aSize.width * 4,
                                imgIEncoder::INPUT_FORMAT_HOSTARGB,
                                aOptions);
    emptyCanvas->Unmap();
    if (NS_SUCCEEDED(rv)) {
      imgStream = do_QueryInterface(aEncoder);
    }
  }
  NS_ENSURE_SUCCESS(rv, rv);

  imgStream.forget(aStream);
  return rv;
}
Пример #11
0
IntSize ColorChooserPopupUIController::contentSize()
{
    return IntSize(0, 0);
}
Пример #12
0
void CoordinatedGraphicsLayer::setContentsToCanvas(PlatformLayer* platformLayer)
{
#if USE(GRAPHICS_SURFACE)
    if (m_canvasPlatformLayer) {
        ASSERT(m_canvasToken.isValid());
        if (!platformLayer) {
            m_pendingCanvasOperation |= DestroyCanvas;
            m_pendingCanvasOperation &= ~CreateCanvas;
        }  else if ((m_canvasSize != platformLayer->platformLayerSize()) || (m_canvasToken != platformLayer->graphicsSurfaceToken())) {
            // m_canvasToken can be different to platformLayer->graphicsSurfaceToken(), even if m_canvasPlatformLayer equals platformLayer.
            m_pendingCanvasOperation |= RecreateCanvas;
        }
    } else {
        if (platformLayer)
            m_pendingCanvasOperation |= CreateAndSyncCanvas;
    }

    m_canvasPlatformLayer = platformLayer;
    // m_canvasToken is updated only here. In detail, when GraphicsContext3D is changed or reshaped, m_canvasToken is changed and setContentsToCanvas() is always called.
    m_canvasSize = m_canvasPlatformLayer ? m_canvasPlatformLayer->platformLayerSize() : IntSize();
    m_canvasToken = m_canvasPlatformLayer ? m_canvasPlatformLayer->graphicsSurfaceToken() : GraphicsSurfaceToken();
    ASSERT(!(!m_canvasToken.isValid() && m_canvasPlatformLayer));

    if (client())
        client()->notifyFlushRequired(this);
#else
    UNUSED_PARAM(platformLayer);
#endif
}
Пример #13
0
void HTMLPlugInImageElement::subframeLoaderWillCreatePlugIn(const KURL& url)
{
    LOG(Plugins, "%p Plug-in URL: %s", this, m_url.utf8().data());
    LOG(Plugins, "   Loaded URL: %s", url.string().utf8().data());

    m_loadedUrl = url;
    m_plugInWasCreated = false;
    m_deferredPromotionToPrimaryPlugIn = false;

    if (!document()->page() || !document()->page()->settings()->plugInSnapshottingEnabled()) {
        m_snapshotDecision = NeverSnapshot;
        return;
    }

    if (displayState() == Restarting) {
        LOG(Plugins, "%p Plug-in is explicitly restarting", this);
        m_snapshotDecision = NeverSnapshot;
        setDisplayState(Playing);
        return;
    }

    if (displayState() == RestartingWithPendingMouseClick) {
        LOG(Plugins, "%p Plug-in is explicitly restarting but also waiting for a click", this);
        m_snapshotDecision = NeverSnapshot;
        return;
    }

    if (m_snapshotDecision == NeverSnapshot) {
        LOG(Plugins, "%p Plug-in is blessed, allow it to start", this);
        return;
    }

    bool inMainFrame = document()->frame() == document()->page()->mainFrame();

    if (document()->isPluginDocument() && inMainFrame) {
        LOG(Plugins, "%p Plug-in document in main frame", this);
        m_snapshotDecision = NeverSnapshot;
        return;
    }

    if (ScriptController::processingUserGesture()) {
        LOG(Plugins, "%p Script is currently processing user gesture, set to play", this);
        m_snapshotDecision = NeverSnapshot;
        return;
    }

    if (m_createdDuringUserGesture) {
        LOG(Plugins, "%p Plug-in was created when processing user gesture, set to play", this);
        m_snapshotDecision = NeverSnapshot;
        return;
    }

    if (documentHadRecentUserGesture(document())) {
        LOG(Plugins, "%p Plug-in was created shortly after a user gesture, set to play", this);
        m_snapshotDecision = NeverSnapshot;
        return;
    }

    if (document()->page()->settings()->snapshotAllPlugIns()) {
        LOG(Plugins, "%p Plug-in forced to snapshot by user preference", this);
        m_snapshotDecision = Snapshotted;
        setDisplayState(WaitingForSnapshot);
        return;
    }

    if (document()->page()->settings()->autostartOriginPlugInSnapshottingEnabled() && document()->page()->plugInClient() && document()->page()->plugInClient()->shouldAutoStartFromOrigin(document()->page()->mainFrame()->document()->baseURL().host(), url.host(), loadedMimeType())) {
        LOG(Plugins, "%p Plug-in from (%s, %s) is marked to auto-start, set to play", this, document()->page()->mainFrame()->document()->baseURL().host().utf8().data(), url.host().utf8().data());
        m_snapshotDecision = NeverSnapshot;
        return;
    }

    RenderBox* renderEmbeddedObject = toRenderBox(renderer());
    Length styleWidth = renderEmbeddedObject->style()->width();
    Length styleHeight = renderEmbeddedObject->style()->height();
    LayoutRect contentBoxRect = renderEmbeddedObject->contentBoxRect();
    int contentWidth = contentBoxRect.width();
    int contentHeight = contentBoxRect.height();
    int contentArea = contentWidth * contentHeight;
    IntSize visibleViewSize = document()->frame()->view()->visibleSize();
    int visibleArea = visibleViewSize.width() * visibleViewSize.height();

    if (inMainFrame && styleWidth.isPercent() && (styleWidth.percent() == 100)
        && styleHeight.isPercent() && (styleHeight.percent() == 100)
        && (static_cast<float>(contentArea) / visibleArea > sizingFullPageAreaRatioThreshold)) {
        LOG(Plugins, "%p Plug-in is top level full page, set to play", this);
        m_snapshotDecision = NeverSnapshot;
        return;
    }

    if (contentWidth <= sizingTinyDimensionThreshold || contentHeight <= sizingTinyDimensionThreshold) {
        LOG(Plugins, "%p Plug-in is very small %dx%d, set to play", this, contentWidth, contentHeight);
        m_sizeWhenSnapshotted = IntSize(contentBoxRect.width().toInt(), contentBoxRect.height().toInt());
        m_snapshotDecision = MaySnapshotWhenResized;
        return;
    }

    if (!document()->page()->plugInClient()) {
        LOG(Plugins, "%p There is no plug-in client. Set to wait for snapshot", this);
        m_snapshotDecision = NeverSnapshot;
        setDisplayState(WaitingForSnapshot);
        return;
    }

    LOG(Plugins, "%p Plug-in from (%s, %s) is not auto-start, sized at %dx%d, set to wait for snapshot", this, document()->page()->mainFrame()->document()->baseURL().host().utf8().data(), url.host().utf8().data(), contentWidth, contentHeight);
    m_snapshotDecision = Snapshotted;
    setDisplayState(WaitingForSnapshot);
}
Пример #14
0
	void MultiList2::setSize(int _width, int _height)
	{
		setSize(IntSize(_width, _height));
	}
Пример #15
0
void IconController::continueLoadWithDecision(IconLoadDecision iconLoadDecision)
{
    ASSERT(iconLoadDecision != IconLoadUnknown);

    if (iconLoadDecision == IconLoadNo) {
        KURL iconURL(url());
        String urlString(iconURL.string());
        if (urlString.isEmpty())
            return;

        LOG(IconDatabase, "IconController::startLoader() - Told not to load this icon, committing iconURL %s to database for pageURL mapping", urlString.ascii().data());
        commitToDatabase(iconURL);

        if (iconDatabase().supportsAsynchronousMode()) {
            m_frame->loader()->documentLoader()->getIconDataForIconURL(urlString);
            return;
        }

        // We were told not to load this icon - that means this icon is already known by the database
        // If the icon data hasn't been read in from disk yet, kick off the read of the icon from the database to make sure someone
        // has done it. This is after registering for the notification so the WebView can call the appropriate delegate method.
        // Otherwise if the icon data *is* available, notify the delegate
        if (!iconDatabase().synchronousIconDataKnownForIconURL(urlString)) {
            LOG(IconDatabase, "Told not to load icon %s but icon data is not yet available - registering for notification and requesting load from disk", urlString.ascii().data());
            m_frame->loader()->client()->registerForIconNotification();
            iconDatabase().synchronousIconForPageURL(m_frame->document()->url().string(), IntSize(0, 0));
            iconDatabase().synchronousIconForPageURL(m_frame->loader()->initialRequest().url().string(), IntSize(0, 0));
        } else
            m_frame->loader()->client()->dispatchDidReceiveIcon();

        return;
    } 

    if (!m_iconLoader)
        m_iconLoader = IconLoader::create(m_frame);

    m_iconLoader->startLoading();
}
Пример #16
0
IntSize RenderSVGRoot::parentOriginToBorderBox() const
{
    return IntSize(x(), y());
}
Пример #17
0
IntSize TextureMapperImageBuffer::maxTextureSize() const
{
    return IntSize(s_maximumAllowedImageBufferDimension, s_maximumAllowedImageBufferDimension);
}
Пример #18
0
IntSize RenderSVGRoot::borderOriginToContentBox() const
{
    return IntSize(borderLeft() + paddingLeft(), borderTop() + paddingTop());
}
Пример #19
0
 virtual IntSize size() const { return IntSize(m_data->m_bitmap->width(), m_data->m_bitmap->height()); }
Пример #20
0
FloatRect screenRect(Widget* widget)
{
    return FloatRect(FloatPoint(), FloatSize(IntSize(BlackBerry::Platform::Graphics::Screen::primaryScreen()->size())));
}
Пример #21
0
void Image::drawPattern(GraphicsContext* context, const FloatRect& tileRect, const TransformationMatrix& patternTransform,
                        const FloatPoint& phase, CompositeOperator op, const FloatRect& destRect)
{
    if (destRect.isEmpty())
        return;

    SDL_Surface* image = nativeImageForCurrentFrame();
    if (!image) // If it's too early we won't have an image yet.
        return;

    SDL_Surface* cr = context->platformContext();
    context->save();
    context->setCompositeOperation(op);

    // Check and see if a single draw of the image can cover the entire area we are supposed to tile.
    // save context info
    context->clip(IntRect(destRect)); // don't draw outside this


    IntRect dest(IntPoint(), IntSize(image->w, image->h));
    IntRect src(static_cast<int>(phase.x()), static_cast<int>(phase.y()), static_cast<int>(tileRect.size().width()), static_cast<int>(tileRect.size().height()));

    int xMax = static_cast<int>(destRect.x() + destRect.width());
    int yMax = static_cast<int>(destRect.y() + destRect.height());


    SDL_Rect srcRect, dstRect;

    srcRect.x = 0;
    srcRect.y = 0;
    if (0 == src.width())
        srcRect.w = image->w;
    else
        srcRect.w = static_cast<Uint16>(src.width());
    if (0 == src.height())
        srcRect.h = image->h;
    else
        srcRect.h = static_cast<Uint16>(src.height());

    dstRect.x = static_cast<Sint16>(dest.x());
    dstRect.y = static_cast<Sint16>(dest.y());
    dstRect.w = static_cast<Sint16>(dest.width());
    dstRect.h = static_cast<Sint16>(dest.height());

    //compute ratio of the zoomed part:
    double  ratioW = (((double)dest.width() / (double)srcRect.w));
    double  ratioH = ((double)dest.height() / ((double)srcRect.h));

    SDL_Surface *surface = NULL;
    if ((ratioW != 1.0)||(ratioH != 1.0)) {
        surface = zoomSurface(image,
                              ratioW,
                              ratioH,
                              SMOOTHING_OFF);
        //adjust offset to the new referentiel (zoomed)
        srcRect.x = static_cast<Sint16>(src.x() * ratioW);
        srcRect.y = static_cast<Sint16>(src.y() * ratioH);
    }

    for (int x = static_cast<int>(phase.x()); x < xMax; x += image->w) {
        for (int y = static_cast<int>(phase.y()); y < yMax; y += image->h) {
            dest.setLocation(IntPoint(x, y) + IntSize(context->origin().x(), context->origin().y()));
            dstRect.x = static_cast<Sint16>(dest.x());
            dstRect.y = static_cast<Sint16>(dest.y());
            dstRect.w = static_cast<Sint16>(dest.width());
            dstRect.h = static_cast<Sint16>(dest.height());

            if (surface) {
                if (context->transparencyLayer() == 1.0)
                    SDL_BlitSurface(surface, &srcRect, cr, &dstRect);
                else {
                    SDL_Surface *surfaceWithAlpha = applyTransparency(surface, static_cast<int> (context->transparencyLayer() * 255));
                    SDL_BlitSurface(surfaceWithAlpha, &srcRect, cr, &dstRect);
                    SDL_FreeSurface(surfaceWithAlpha);
                }
            }
            else {
                if (context->transparencyLayer() == 1.0)
                    SDL_BlitSurface(image, &srcRect, cr, &dstRect);
                else {
                    SDL_Surface *surfaceWithAlpha = applyTransparency(image, static_cast<int> (context->transparencyLayer() * 255));
                    SDL_BlitSurface(surfaceWithAlpha, &srcRect, cr, &dstRect);
                    SDL_FreeSurface(surfaceWithAlpha);
                }
            }
        }
    }
    if(surface)
        SDL_FreeSurface(surface);

    context->restore();

    if (imageObserver())
        imageObserver()->didDraw(this);
}
Пример #22
0
IntSize dragImageSize(DragImageRef)
{
    notImplemented();
    return IntSize(0, 0);
}
TEST_F(DrawingBufferTest, testPaintRenderingResultsToCanvas)
{
    OwnPtr<ImageBufferSurface> imageBufferSurface = adoptPtr(new UnacceleratedImageBufferSurface(IntSize(initialWidth, initialHeight)));
    EXPECT_FALSE(!imageBufferSurface);
    EXPECT_TRUE(imageBufferSurface->isValid());
    OwnPtr<ImageBuffer> imageBuffer = ImageBuffer::create(imageBufferSurface.release());
    EXPECT_FALSE(!imageBuffer);
    EXPECT_FALSE(imageBuffer->isAccelerated());
    EXPECT_FALSE(!imageBuffer->newImageSnapshot());
    m_drawingBuffer->paintRenderingResultsToCanvas(imageBuffer.get());
    EXPECT_FALSE(imageBuffer->isAccelerated());
    EXPECT_FALSE(!imageBuffer->newImageSnapshot());
    m_drawingBuffer->beginDestruction();
}
Пример #24
0
static inline IntSize pointToSize(const IntPoint& point)
{
    return IntSize(point.x(), point.y());
}
 void texImage2D(WGC3Denum target, WGC3Dint level, WGC3Denum internalformat, WGC3Dsizei width, WGC3Dsizei height, WGC3Dint border, WGC3Denum format, WGC3Denum type, const void* pixels) override
 {
     if (target == GL_TEXTURE_2D && !level) {
         m_textureSizes.set(m_boundTexture, IntSize(width, height));
     }
 }
Пример #26
0
/* static */
nsresult
ImageEncoder::ExtractDataInternal(const nsAString& aType,
                                  const nsAString& aOptions,
                                  uint8_t* aImageBuffer,
                                  int32_t aFormat,
                                  const nsIntSize aSize,
                                  nsICanvasRenderingContextInternal* aContext,
                                  nsIInputStream** aStream,
                                  imgIEncoder* aEncoder)
{
  if (aSize.IsEmpty()) {
    return NS_ERROR_INVALID_ARG;
  }

  nsCOMPtr<nsIInputStream> imgStream;

  // get image bytes
  nsresult rv;
  if (aImageBuffer) {
    rv = ImageEncoder::GetInputStream(
      aSize.width,
      aSize.height,
      aImageBuffer,
      aFormat,
      aEncoder,
      nsPromiseFlatString(aOptions).get(),
      getter_AddRefs(imgStream));
  } else if (aContext) {
    NS_ConvertUTF16toUTF8 encoderType(aType);
    rv = aContext->GetInputStream(encoderType.get(),
                                  nsPromiseFlatString(aOptions).get(),
                                  getter_AddRefs(imgStream));
  } else {
    // no context, so we have to encode an empty image
    // note that if we didn't have a current context, the spec says we're
    // supposed to just return transparent black pixels of the canvas
    // dimensions.
    RefPtr<DataSourceSurface> emptyCanvas =
      Factory::CreateDataSourceSurfaceWithStride(IntSize(aSize.width, aSize.height),
                                                 SurfaceFormat::B8G8R8A8,
                                                 4 * aSize.width, true);
    if (NS_WARN_IF(!emptyCanvas)) {
      return NS_ERROR_INVALID_ARG;
    }

    DataSourceSurface::MappedSurface map;
    if (!emptyCanvas->Map(DataSourceSurface::MapType::WRITE, &map)) {
      return NS_ERROR_INVALID_ARG;
    }
    rv = aEncoder->InitFromData(map.mData,
                                aSize.width * aSize.height * 4,
                                aSize.width,
                                aSize.height,
                                aSize.width * 4,
                                imgIEncoder::INPUT_FORMAT_HOSTARGB,
                                aOptions);
    emptyCanvas->Unmap();
    if (NS_SUCCEEDED(rv)) {
      imgStream = do_QueryInterface(aEncoder);
    }
  }
  NS_ENSURE_SUCCESS(rv, rv);

  imgStream.forget(aStream);
  return rv;
}
// Returns the size of the video
IntSize MediaPlayerPrivateGStreamerBase::naturalSize() const
{
    if (!hasVideo())
        return IntSize();

    if (!m_videoSize.isEmpty())
        return m_videoSize;

#ifdef GST_API_VERSION_1
    /* FIXME this has a race with the pad setting caps as the buffer (m_buffer)
     * and the caps won't match and might cause a crash. (In case a
     * renegotiation happens)
     */
    GRefPtr<GstCaps> caps = webkitGstGetPadCaps(m_videoSinkPad.get());
#else
    g_mutex_lock(m_bufferMutex);
    GRefPtr<GstCaps> caps = m_buffer ? GST_BUFFER_CAPS(m_buffer) : 0;
    g_mutex_unlock(m_bufferMutex);
#endif
    if (!caps)
        return IntSize();


    // TODO: handle possible clean aperture data. See
    // https://bugzilla.gnome.org/show_bug.cgi?id=596571
    // TODO: handle possible transformation matrix. See
    // https://bugzilla.gnome.org/show_bug.cgi?id=596326

    // Get the video PAR and original size, if this fails the
    // video-sink has likely not yet negotiated its caps.
    int pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride;
    IntSize originalSize;
    GstVideoFormat format;
    if (!getVideoSizeAndFormatFromCaps(caps.get(), originalSize, format, pixelAspectRatioNumerator, pixelAspectRatioDenominator, stride))
        return IntSize();

    LOG_MEDIA_MESSAGE("Original video size: %dx%d", originalSize.width(), originalSize.height());
    LOG_MEDIA_MESSAGE("Pixel aspect ratio: %d/%d", pixelAspectRatioNumerator, pixelAspectRatioDenominator);

    // Calculate DAR based on PAR and video size.
    int displayWidth = originalSize.width() * pixelAspectRatioNumerator;
    int displayHeight = originalSize.height() * pixelAspectRatioDenominator;

    // Divide display width and height by their GCD to avoid possible overflows.
    int displayAspectRatioGCD = greatestCommonDivisor(displayWidth, displayHeight);
    displayWidth /= displayAspectRatioGCD;
    displayHeight /= displayAspectRatioGCD;

    // Apply DAR to original video size. This is the same behavior as in xvimagesink's setcaps function.
    guint64 width = 0, height = 0;
    if (!(originalSize.height() % displayHeight)) {
        LOG_MEDIA_MESSAGE("Keeping video original height");
        width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
        height = static_cast<guint64>(originalSize.height());
    } else if (!(originalSize.width() % displayWidth)) {
        LOG_MEDIA_MESSAGE("Keeping video original width");
        height = gst_util_uint64_scale_int(originalSize.width(), displayHeight, displayWidth);
        width = static_cast<guint64>(originalSize.width());
    } else {
        LOG_MEDIA_MESSAGE("Approximating while keeping original video height");
        width = gst_util_uint64_scale_int(originalSize.height(), displayWidth, displayHeight);
        height = static_cast<guint64>(originalSize.height());
    }

    LOG_MEDIA_MESSAGE("Natural size: %" G_GUINT64_FORMAT "x%" G_GUINT64_FORMAT, width, height);
    m_videoSize = IntSize(static_cast<int>(width), static_cast<int>(height));
    return m_videoSize;
}
Пример #28
0
void DrawingAreaImpl::display(UpdateInfo& updateInfo)
{
    ASSERT(!m_isPaintingSuspended);
    ASSERT(!m_layerTreeHost);
    ASSERT(!m_webPage->size().isEmpty());

    // FIXME: It would be better if we could avoid painting altogether when there is a custom representation.
    if (m_webPage->mainFrameHasCustomRepresentation()) {
        // ASSUMPTION: the custom representation will be painting the dirty region for us.
        m_dirtyRegion = Region();
        return;
    }

    m_webPage->layoutIfNeeded();

    // The layout may have put the page into accelerated compositing mode. If the LayerTreeHost is
    // in charge of displaying, we have nothing more to do.
    if (m_layerTreeHost)
        return;

    updateInfo.viewSize = m_webPage->size();
    updateInfo.deviceScaleFactor = m_webPage->corePage()->deviceScaleFactor();

    IntRect bounds = m_dirtyRegion.bounds();
    ASSERT(m_webPage->bounds().contains(bounds));

    IntSize bitmapSize = bounds.size();
    float deviceScaleFactor = m_webPage->corePage()->deviceScaleFactor();
    bitmapSize.scale(deviceScaleFactor);
    RefPtr<ShareableBitmap> bitmap = ShareableBitmap::createShareable(bitmapSize, ShareableBitmap::SupportsAlpha);
    if (!bitmap)
        return;

    if (!bitmap->createHandle(updateInfo.bitmapHandle))
        return;

    Vector<IntRect> rects = m_dirtyRegion.rects();

    if (shouldPaintBoundsRect(bounds, rects)) {
        rects.clear();
        rects.append(bounds);
    }

    updateInfo.scrollRect = m_scrollRect;
    updateInfo.scrollOffset = m_scrollOffset;

    m_dirtyRegion = Region();
    m_scrollRect = IntRect();
    m_scrollOffset = IntSize();

    OwnPtr<GraphicsContext> graphicsContext = createGraphicsContext(bitmap.get());
    graphicsContext->applyDeviceScaleFactor(deviceScaleFactor);
    
    updateInfo.updateRectBounds = bounds;

    graphicsContext->translate(-bounds.x(), -bounds.y());

    for (size_t i = 0; i < rects.size(); ++i) {
        m_webPage->drawRect(*graphicsContext, rects[i]);
        if (m_webPage->hasPageOverlay())
            m_webPage->drawPageOverlay(*graphicsContext, rects[i]);
        updateInfo.updateRects.append(rects[i]);
    }

    // Layout can trigger more calls to setNeedsDisplay and we don't want to process them
    // until the UI process has painted the update, so we stop the timer here.
    m_displayTimer.stop();
}
Пример #29
0
void GraphicsContext::drawWindowsBitmap(WindowsBitmap* bitmap, const IntPoint& point)
{
    drawBitmapToContext(m_data, platformContext()->cr(), bitmap->windowsDIB(), IntSize(point.x(), bitmap->size().height() + point.y()));
}
Пример #30
0
void GraphicsLayer::dumpProperties(TextStream& ts, int indent, LayerTreeAsTextBehavior behavior) const
{
    if (m_position != FloatPoint()) {
        writeIndent(ts, indent + 1);
        ts << "(position " << m_position.x() << " " << m_position.y() << ")\n";
    }

    if (m_boundsOrigin != FloatPoint()) {
        writeIndent(ts, indent + 1);
        ts << "(bounds origin " << m_boundsOrigin.x() << " " << m_boundsOrigin.y() << ")\n";
    }

    if (m_anchorPoint != FloatPoint3D(0.5f, 0.5f, 0)) {
        writeIndent(ts, indent + 1);
        ts << "(anchor " << m_anchorPoint.x() << " " << m_anchorPoint.y() << ")\n";
    }

    if (m_size != IntSize()) {
        writeIndent(ts, indent + 1);
        ts << "(bounds " << m_size.width() << " " << m_size.height() << ")\n";
    }

    if (m_opacity != 1) {
        writeIndent(ts, indent + 1);
        ts << "(opacity " << m_opacity << ")\n";
    }

#if ENABLE(CSS_COMPOSITING)
    if (m_blendMode != BlendModeNormal) {
        writeIndent(ts, indent + 1);
        ts << "(blendMode " << compositeOperatorName(CompositeSourceOver, m_blendMode) << ")\n";
    }
#endif

    if (m_usingTiledBacking) {
        writeIndent(ts, indent + 1);
        ts << "(usingTiledLayer " << m_usingTiledBacking << ")\n";
    }

    if (m_contentsOpaque) {
        writeIndent(ts, indent + 1);
        ts << "(contentsOpaque " << m_contentsOpaque << ")\n";
    }

    if (m_preserves3D) {
        writeIndent(ts, indent + 1);
        ts << "(preserves3D " << m_preserves3D << ")\n";
    }

    if (m_drawsContent && m_client.shouldDumpPropertyForLayer(this, "drawsContent")) {
        writeIndent(ts, indent + 1);
        ts << "(drawsContent " << m_drawsContent << ")\n";
    }

    if (!m_contentsVisible) {
        writeIndent(ts, indent + 1);
        ts << "(contentsVisible " << m_contentsVisible << ")\n";
    }

    if (!m_backfaceVisibility) {
        writeIndent(ts, indent + 1);
        ts << "(backfaceVisibility " << (m_backfaceVisibility ? "visible" : "hidden") << ")\n";
    }

    if (behavior & LayerTreeAsTextDebug) {
        writeIndent(ts, indent + 1);
        ts << "(primary-layer-id " << primaryLayerID() << ")\n";
        writeIndent(ts, indent + 1);
        ts << "(client " << static_cast<void*>(&m_client) << ")\n";
    }

    if (m_backgroundColor.isValid() && m_client.shouldDumpPropertyForLayer(this, "backgroundColor")) {
        writeIndent(ts, indent + 1);
        ts << "(backgroundColor " << m_backgroundColor.nameForRenderTreeAsText() << ")\n";
    }

    if (!m_transform.isIdentity()) {
        writeIndent(ts, indent + 1);
        ts << "(transform ";
        ts << "[" << m_transform.m11() << " " << m_transform.m12() << " " << m_transform.m13() << " " << m_transform.m14() << "] ";
        ts << "[" << m_transform.m21() << " " << m_transform.m22() << " " << m_transform.m23() << " " << m_transform.m24() << "] ";
        ts << "[" << m_transform.m31() << " " << m_transform.m32() << " " << m_transform.m33() << " " << m_transform.m34() << "] ";
        ts << "[" << m_transform.m41() << " " << m_transform.m42() << " " << m_transform.m43() << " " << m_transform.m44() << "])\n";
    }

    // Avoid dumping the sublayer transform on the root layer, because it's used for geometry flipping, whose behavior
    // differs between platforms.
    if (parent() && !m_childrenTransform.isIdentity()) {
        writeIndent(ts, indent + 1);
        ts << "(childrenTransform ";
        ts << "[" << m_childrenTransform.m11() << " " << m_childrenTransform.m12() << " " << m_childrenTransform.m13() << " " << m_childrenTransform.m14() << "] ";
        ts << "[" << m_childrenTransform.m21() << " " << m_childrenTransform.m22() << " " << m_childrenTransform.m23() << " " << m_childrenTransform.m24() << "] ";
        ts << "[" << m_childrenTransform.m31() << " " << m_childrenTransform.m32() << " " << m_childrenTransform.m33() << " " << m_childrenTransform.m34() << "] ";
        ts << "[" << m_childrenTransform.m41() << " " << m_childrenTransform.m42() << " " << m_childrenTransform.m43() << " " << m_childrenTransform.m44() << "])\n";
    }

    if (m_replicaLayer) {
        writeIndent(ts, indent + 1);
        ts << "(replica layer";
        if (behavior & LayerTreeAsTextDebug)
            ts << " " << m_replicaLayer;
        ts << ")\n";
        m_replicaLayer->dumpLayer(ts, indent + 2, behavior);
    }

    if (m_replicatedLayer) {
        writeIndent(ts, indent + 1);
        ts << "(replicated layer";
        if (behavior & LayerTreeAsTextDebug)
            ts << " " << m_replicatedLayer;
        ts << ")\n";
    }

    if (behavior & LayerTreeAsTextIncludeRepaintRects && repaintRectMap().contains(this) && !repaintRectMap().get(this).isEmpty() && m_client.shouldDumpPropertyForLayer(this, "repaintRects")) {
        writeIndent(ts, indent + 1);
        ts << "(repaint rects\n";
        for (size_t i = 0; i < repaintRectMap().get(this).size(); ++i) {
            if (repaintRectMap().get(this)[i].isEmpty())
                continue;
            writeIndent(ts, indent + 2);
            ts << "(rect ";
            ts << repaintRectMap().get(this)[i].x() << " ";
            ts << repaintRectMap().get(this)[i].y() << " ";
            ts << repaintRectMap().get(this)[i].width() << " ";
            ts << repaintRectMap().get(this)[i].height();
            ts << ")\n";
        }
        writeIndent(ts, indent + 1);
        ts << ")\n";
    }

    if (behavior & LayerTreeAsTextIncludePaintingPhases && paintingPhase()) {
        writeIndent(ts, indent + 1);
        ts << "(paintingPhases\n";
        if (paintingPhase() & GraphicsLayerPaintBackground) {
            writeIndent(ts, indent + 2);
            ts << "GraphicsLayerPaintBackground\n";
        }
        if (paintingPhase() & GraphicsLayerPaintForeground) {
            writeIndent(ts, indent + 2);
            ts << "GraphicsLayerPaintForeground\n";
        }
        if (paintingPhase() & GraphicsLayerPaintMask) {
            writeIndent(ts, indent + 2);
            ts << "GraphicsLayerPaintMask\n";
        }
        if (paintingPhase() & GraphicsLayerPaintOverflowContents) {
            writeIndent(ts, indent + 2);
            ts << "GraphicsLayerPaintOverflowContents\n";
        }
        if (paintingPhase() & GraphicsLayerPaintCompositedScroll) {
            writeIndent(ts, indent + 2);
            ts << "GraphicsLayerPaintCompositedScroll\n";
        }
        writeIndent(ts, indent + 1);
        ts << ")\n";
    }

    dumpAdditionalProperties(ts, indent, behavior);
    
    if (m_children.size()) {
        TextStream childrenStream;

        unsigned totalChildCount = 0;
        dumpChildren(childrenStream, m_children, totalChildCount, indent, behavior);

        writeIndent(childrenStream, indent + 1);
        childrenStream << ")\n";

        if (totalChildCount) {
            writeIndent(ts, indent + 1);
            ts << "(children " << totalChildCount << "\n";
            ts << childrenStream.release();
        }
    }
}