Exemple #1
0
void FEBlend::apply(Filter* filter)
{
    FilterEffect* in = inputEffect(0);
    FilterEffect* in2 = inputEffect(1);
    in->apply(filter);
    in2->apply(filter);
    if (!in->resultImage() || !in2->resultImage())
        return;

    if (m_mode == FEBLEND_MODE_UNKNOWN)
        return;

    if (!effectContext(filter))
        return;

    IntRect effectADrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
    RefPtr<CanvasPixelArray> srcPixelArrayA(in->resultImage()->getPremultipliedImageData(effectADrawingRect)->data());

    IntRect effectBDrawingRect = requestedRegionOfInputImageData(in2->absolutePaintRect());
    RefPtr<CanvasPixelArray> srcPixelArrayB(in2->resultImage()->getPremultipliedImageData(effectBDrawingRect)->data());

    IntRect imageRect(IntPoint(), resultImage()->size());
    RefPtr<ImageData> imageData = ImageData::create(imageRect.width(), imageRect.height());

    // Keep synchronized with BlendModeType
    static const BlendType callEffect[] = {unknown, normal, multiply, screen, darken, lighten};

    ASSERT(srcPixelArrayA->length() == srcPixelArrayB->length());
    for (unsigned pixelOffset = 0; pixelOffset < srcPixelArrayA->length(); pixelOffset += 4) {
        unsigned char alphaA = srcPixelArrayA->get(pixelOffset + 3);
        unsigned char alphaB = srcPixelArrayB->get(pixelOffset + 3);
        for (unsigned channel = 0; channel < 3; ++channel) {
            unsigned char colorA = srcPixelArrayA->get(pixelOffset + channel);
            unsigned char colorB = srcPixelArrayB->get(pixelOffset + channel);

            unsigned char result = (*callEffect[m_mode])(colorA, colorB, alphaA, alphaB);
            imageData->data()->set(pixelOffset + channel, result);
        }
        unsigned char alphaR = 255 - ((255 - alphaA) * (255 - alphaB)) / 255;
        imageData->data()->set(pixelOffset + 3, alphaR);
    }

    resultImage()->putPremultipliedImageData(imageData.get(), imageRect, IntPoint());
}
Exemple #2
0
void FEComposite::platformApplySoftware()
{
    FilterEffect* in = inputEffect(0);
    FilterEffect* in2 = inputEffect(1);

    if (m_type == FECOMPOSITE_OPERATOR_ARITHMETIC) {
        ByteArray* dstPixelArray = createPremultipliedImageResult();
        if (!dstPixelArray)
            return;

        IntRect effectADrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
        RefPtr<ByteArray> srcPixelArray = in->asPremultipliedImage(effectADrawingRect);

        IntRect effectBDrawingRect = requestedRegionOfInputImageData(in2->absolutePaintRect());
        in2->copyPremultipliedImage(dstPixelArray, effectBDrawingRect);

        arithmetic(srcPixelArray.get(), dstPixelArray, m_k1, m_k2, m_k3, m_k4);
        return;
    }

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;
    GraphicsContext* filterContext = resultImage->context();

    FloatRect srcRect = FloatRect(0, 0, -1, -1);
    switch (m_type) {
    case FECOMPOSITE_OPERATOR_OVER:
        filterContext->drawImageBuffer(in2->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in2->absolutePaintRect()));
        filterContext->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in->absolutePaintRect()));
        break;
    case FECOMPOSITE_OPERATOR_IN: {
        GraphicsContextStateSaver stateSaver(*filterContext);
        filterContext->clipToImageBuffer(in2->asImageBuffer(), drawingRegionOfInputImage(in2->absolutePaintRect()));
        filterContext->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in->absolutePaintRect()));
        break;
    }
    case FECOMPOSITE_OPERATOR_OUT:
        filterContext->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in->absolutePaintRect()));
        filterContext->drawImageBuffer(in2->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in2->absolutePaintRect()), srcRect, CompositeDestinationOut);
        break;
    case FECOMPOSITE_OPERATOR_ATOP:
        filterContext->drawImageBuffer(in2->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in2->absolutePaintRect()));
        filterContext->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in->absolutePaintRect()), srcRect, CompositeSourceAtop);
        break;
    case FECOMPOSITE_OPERATOR_XOR:
        filterContext->drawImageBuffer(in2->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in2->absolutePaintRect()));
        filterContext->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in->absolutePaintRect()), srcRect, CompositeXOR);
        break;
    default:
        break;
    }
}
Exemple #3
0
void FEBlend::apply()
{
    if (hasResult())
        return;
    FilterEffect* in = inputEffect(0);
    FilterEffect* in2 = inputEffect(1);
    in->apply();
    in2->apply();
    if (!in->hasResult() || !in2->hasResult())
        return;

    ASSERT(m_mode > FEBLEND_MODE_UNKNOWN);
    ASSERT(m_mode <= FEBLEND_MODE_LIGHTEN);

    ByteArray* dstPixelArray = createPremultipliedImageResult();
    if (!dstPixelArray)
        return;

    IntRect effectADrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
    RefPtr<ByteArray> srcPixelArrayA = in->asPremultipliedImage(effectADrawingRect);

    IntRect effectBDrawingRect = requestedRegionOfInputImageData(in2->absolutePaintRect());
    RefPtr<ByteArray> srcPixelArrayB = in2->asPremultipliedImage(effectBDrawingRect);

    // Keep synchronized with BlendModeType
    static const BlendType callEffect[] = {unknown, normal, multiply, screen, darken, lighten};

    unsigned pixelArrayLength = srcPixelArrayA->length();
    ASSERT(pixelArrayLength == srcPixelArrayB->length());
    for (unsigned pixelOffset = 0; pixelOffset < pixelArrayLength; pixelOffset += 4) {
        unsigned char alphaA = srcPixelArrayA->get(pixelOffset + 3);
        unsigned char alphaB = srcPixelArrayB->get(pixelOffset + 3);
        for (unsigned channel = 0; channel < 3; ++channel) {
            unsigned char colorA = srcPixelArrayA->get(pixelOffset + channel);
            unsigned char colorB = srcPixelArrayB->get(pixelOffset + channel);

            unsigned char result = (*callEffect[m_mode])(colorA, colorB, alphaA, alphaB);
            dstPixelArray->set(pixelOffset + channel, result);
        }
        unsigned char alphaR = 255 - ((255 - alphaA) * (255 - alphaB)) / 255;
        dstPixelArray->set(pixelOffset + 3, alphaR);
    }
}
Exemple #4
0
FloatRect FEGaussianBlur::determineAbsolutePaintRect(const FloatRect& originalRequestedRect)
{
    FloatRect requestedRect = originalRequestedRect;
    if (clipsToBounds())
        requestedRect.intersect(maxEffectRect());

    FilterEffect* input = inputEffect(0);
    FloatRect inputRect = input->determineAbsolutePaintRect(mapRect(requestedRect, false));
    FloatRect outputRect = mapRect(inputRect, true);
    outputRect.intersect(requestedRect);
    addAbsolutePaintRect(outputRect);

    // Blur needs space for both input and output pixels in the paint area.
    // Input is also clipped to subregion.
    if (clipsToBounds())
        inputRect.intersect(maxEffectRect());
    addAbsolutePaintRect(inputRect);
    return outputRect;
}
Exemple #5
0
void FEDropShadow::platformApplySoftware()
{
    FilterEffect* in = inputEffect(0);

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    Filter& filter = this->filter();
    FloatSize blurRadius(2 * filter.applyHorizontalScale(m_stdX), 2 * filter.applyVerticalScale(m_stdY));
    blurRadius.scale(filter.filterScale());
    FloatSize offset(filter.applyHorizontalScale(m_dx), filter.applyVerticalScale(m_dy));

    FloatRect drawingRegion = drawingRegionOfInputImage(in->absolutePaintRect());
    FloatRect drawingRegionWithOffset(drawingRegion);
    drawingRegionWithOffset.move(offset);

    ImageBuffer* sourceImage = in->asImageBuffer();
    if (!sourceImage)
        return;

    GraphicsContext& resultContext = resultImage->context();
    resultContext.setAlpha(m_shadowOpacity);
    resultContext.drawImageBuffer(*sourceImage, drawingRegionWithOffset);
    resultContext.setAlpha(1);

    ShadowBlur contextShadow(blurRadius, offset, m_shadowColor);

    // TODO: Direct pixel access to ImageBuffer would avoid copying the ImageData.
    IntRect shadowArea(IntPoint(), resultImage->internalSize());
    RefPtr<Uint8ClampedArray> srcPixelArray = resultImage->getPremultipliedImageData(shadowArea, ImageBuffer::BackingStoreCoordinateSystem);

    contextShadow.blurLayerImage(srcPixelArray->data(), shadowArea.size(), 4 * shadowArea.size().width());

    resultImage->putByteArray(Premultiplied, srcPixelArray.get(), shadowArea.size(), shadowArea, IntPoint(), ImageBuffer::BackingStoreCoordinateSystem);

    resultContext.setCompositeOperation(CompositeSourceIn);
    resultContext.fillRect(FloatRect(FloatPoint(), absolutePaintRect().size()), m_shadowColor);
    resultContext.setCompositeOperation(CompositeDestinationOver);

    resultImage->context().drawImageBuffer(*sourceImage, drawingRegion);
}
Exemple #6
0
void FilterEffect::apply()
{
    if (hasResult())
        return;
    unsigned size = m_inputEffects.size();
    for (unsigned i = 0; i < size; ++i) {
        FilterEffect* in = m_inputEffects.at(i).get();
        in->apply();
        if (!in->hasResult())
            return;
    }
    determineAbsolutePaintRect();
    
    // Add platform specific apply functions here and return earlier.
#if USE(SKIA)
    if (platformApplySkia())
        return;
#endif
    platformApplySoftware();
}
static void paintFilteredContent(const LayoutObject& object, GraphicsContext& context, FilterData* filterData)
{
    ASSERT(filterData->m_state == FilterData::ReadyToPaint);
    ASSERT(filterData->filter->sourceGraphic());

    filterData->m_state = FilterData::PaintingFilter;

    SkiaImageFilterBuilder builder;
    RefPtr<SkImageFilter> imageFilter = builder.build(filterData->filter->lastEffect(), ColorSpaceDeviceRGB);
    FloatRect boundaries = filterData->filter->filterRegion();
    context.save();

    // Clip drawing of filtered image to the minimum required paint rect.
    FilterEffect* lastEffect = filterData->filter->lastEffect();
    context.clipRect(lastEffect->determineAbsolutePaintRect(lastEffect->maxEffectRect()));

#ifdef CHECK_CTM_FOR_TRANSFORMED_IMAGEFILTER
    // TODO: Remove this workaround once skew/rotation support is added in Skia
    // (https://code.google.com/p/skia/issues/detail?id=3288, crbug.com/446935).
    // If the CTM contains rotation or shearing, apply the filter to
    // the unsheared/unrotated matrix, and do the shearing/rotation
    // as a final pass.
    AffineTransform ctm = SVGLayoutSupport::deprecatedCalculateTransformToLayer(&object);
    if (ctm.b() || ctm.c()) {
        AffineTransform scaleAndTranslate;
        scaleAndTranslate.translate(ctm.e(), ctm.f());
        scaleAndTranslate.scale(ctm.xScale(), ctm.yScale());
        ASSERT(scaleAndTranslate.isInvertible());
        AffineTransform shearAndRotate = scaleAndTranslate.inverse();
        shearAndRotate.multiply(ctm);
        context.concatCTM(shearAndRotate.inverse());
        imageFilter = builder.buildTransform(shearAndRotate, imageFilter.get());
    }
#endif

    context.beginLayer(1, SkXfermode::kSrcOver_Mode, &boundaries, ColorFilterNone, imageFilter.get());
    context.endLayer();
    context.restore();

    filterData->m_state = FilterData::ReadyToPaint;
}
bool FEDisplacementMap::applySkia()
{
    // For now, only use the skia implementation for accelerated rendering.
    if (!filter()->isAccelerated())
        return false;

    FilterEffect* in = inputEffect(0);
    FilterEffect* in2 = inputEffect(1);

    if (!in || !in2)
        return false;

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return false;

    RefPtr<Image> color = in->asImageBuffer()->copyImage(DontCopyBackingStore);
    RefPtr<Image> displ = in2->asImageBuffer()->copyImage(DontCopyBackingStore);

    RefPtr<NativeImageSkia> colorNativeImage = color->nativeImageForCurrentFrame();
    RefPtr<NativeImageSkia> displNativeImage = displ->nativeImageForCurrentFrame();

    if (!colorNativeImage || !displNativeImage)
        return false;

    SkBitmap colorBitmap = colorNativeImage->bitmap();
    SkBitmap displBitmap = displNativeImage->bitmap();

    SkAutoTUnref<SkImageFilter> colorSource(new SkBitmapSource(colorBitmap));
    SkAutoTUnref<SkImageFilter> displSource(new SkBitmapSource(displBitmap));
    SkDisplacementMapEffect::ChannelSelectorType typeX = toSkiaMode(m_xChannelSelector);
    SkDisplacementMapEffect::ChannelSelectorType typeY = toSkiaMode(m_yChannelSelector);
    // FIXME : Only applyHorizontalScale is used and applyVerticalScale is ignored
    // This can be fixed by adding a 2nd scale parameter to SkDisplacementMapEffect
    SkAutoTUnref<SkImageFilter> displEffect(new SkDisplacementMapEffect(
        typeX, typeY, SkFloatToScalar(filter()->applyHorizontalScale(m_scale)), displSource, colorSource));
    SkPaint paint;
    paint.setImageFilter(displEffect);
    resultImage->context()->drawBitmap(colorBitmap, 0, 0, &paint);
    return true;
}
Exemple #9
0
void FELighting::applySoftware()
{
    FilterEffect* in = inputEffect(0);

    Uint8ClampedArray* srcPixelArray = createPremultipliedImageResult();
    if (!srcPixelArray)
        return;

    setIsAlphaImage(false);

    IntRect effectDrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
    in->copyPremultipliedImage(srcPixelArray, effectDrawingRect);

    // FIXME: support kernelUnitLengths other than (1,1). The issue here is that the W3
    // standard has no test case for them, and other browsers (like Firefox) has strange
    // output for various kernelUnitLengths, and I am not sure they are reliable.
    // Anyway, feConvolveMatrix should also use the implementation

    IntSize absolutePaintSize = absolutePaintRect().size();
    drawLighting(srcPixelArray, absolutePaintSize.width(), absolutePaintSize.height());
}
Exemple #10
0
// This function will be changed to abstract virtual when all filters are landed.
bool FilterEffect::platformApplyOpenCL()
{
    if (!FilterContextOpenCL::context())
        return false;

    unsigned size = m_inputEffects.size();
    for (unsigned i = 0; i < size; ++i) {
        FilterEffect* in = m_inputEffects.at(i).get();
        // Software code path expects that at least one of the following fileds is valid.
        if (!in->m_imageBufferResult && !in->m_unmultipliedImageResult && !in->m_premultipliedImageResult)
            in->asImageBuffer();
    }

    platformApplySoftware();
    ImageBuffer* sourceImage = asImageBuffer();
    if (sourceImage) {
        RefPtr<Uint8ClampedArray> sourceImageData = sourceImage->getUnmultipliedImageData(IntRect(IntPoint(), sourceImage->internalSize()));
        createOpenCLImageResult(sourceImageData->data());
    }
    return true;
}
Exemple #11
0
bool FEBlend::applySoftwareNEON()
{
    if (m_mode != WebBlendModeNormal
        && m_mode != WebBlendModeMultiply
        && m_mode != WebBlendModeScreen
        && m_mode != WebBlendModeDarken
        && m_mode != WebBlendModeLighten)
        return false;

    Uint8ClampedArray* dstPixelArray = createPremultipliedImageResult();
    if (!dstPixelArray)
        return true;

    FilterEffect* in = inputEffect(0);
    FilterEffect* in2 = inputEffect(1);

    IntRect effectADrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
    RefPtr<Uint8ClampedArray> srcPixelArrayA = in->asPremultipliedImage(effectADrawingRect);

    IntRect effectBDrawingRect = requestedRegionOfInputImageData(in2->absolutePaintRect());
    RefPtr<Uint8ClampedArray> srcPixelArrayB = in2->asPremultipliedImage(effectBDrawingRect);

    unsigned pixelArrayLength = srcPixelArrayA->length();
    ASSERT(pixelArrayLength == srcPixelArrayB->length());

    if (pixelArrayLength >= 8) {
        platformApplyNEON(srcPixelArrayA->data(), srcPixelArrayB->data(), dstPixelArray->data(), pixelArrayLength);
    } else {
        // If there is just one pixel we expand it to two.
        ASSERT(pixelArrayLength > 0);
        uint32_t sourceA[2] = {0, 0};
        uint32_t sourceBAndDest[2] = {0, 0};

        sourceA[0] = reinterpret_cast<uint32_t*>(srcPixelArrayA->data())[0];
        sourceBAndDest[0] = reinterpret_cast<uint32_t*>(srcPixelArrayB->data())[0];
        platformApplyNEON(reinterpret_cast<uint8_t*>(sourceA), reinterpret_cast<uint8_t*>(sourceBAndDest), reinterpret_cast<uint8_t*>(sourceBAndDest), 8);
        reinterpret_cast<uint32_t*>(dstPixelArray->data())[0] = sourceBAndDest[0];
    }
    return true;
}
void FECustomFilter::platformApplySoftware()
{
    ByteArray* dstPixelArray = createPremultipliedImageResult();
    if (!dstPixelArray)
        return;

    FilterEffect* in = inputEffect(0);
    IntRect effectDrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
    RefPtr<ByteArray> srcPixelArray = in->asPremultipliedImage(effectDrawingRect);
    
    IntSize newContextSize(effectDrawingRect.size());
    bool hadContext = m_context;
    if (!m_context)
        initializeContext(newContextSize);
    
    if (!hadContext || m_contextSize != newContextSize)
        resizeContext(newContextSize);
    
    // Do not draw the filter if the input image cannot fit inside a single GPU texture.
    if (m_inputTexture->tiles().numTilesX() != 1 || m_inputTexture->tiles().numTilesY() != 1)
        return;
    
    // The shader had compiler errors. We cannot draw anything.
    if (!m_shader->isInitialized())
        return;
    
    m_context->clearColor(0, 0, 0, 0);
    m_context->clear(GraphicsContext3D::COLOR_BUFFER_BIT | GraphicsContext3D::DEPTH_BUFFER_BIT);
    
    bindProgramAndBuffers(srcPixelArray.get());
    
    m_context->drawElements(GraphicsContext3D::TRIANGLES, m_mesh->indicesCount(), GraphicsContext3D::UNSIGNED_SHORT, 0);
    
    m_drawingBuffer->commit();

    RefPtr<ImageData> imageData = m_context->paintRenderingResultsToImageData(m_drawingBuffer.get());
    ByteArray* gpuResult = imageData->data()->data();
    ASSERT(gpuResult->length() == dstPixelArray->length());
    memcpy(dstPixelArray->data(), gpuResult->data(), gpuResult->length());
}
Exemple #13
0
void FEDropShadow::platformApplySoftware()
{
    FilterEffect* in = inputEffect(0);

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    Filter* filter = this->filter();
    FloatSize blurRadius(filter->applyHorizontalScale(m_stdX), filter->applyVerticalScale(m_stdY));
    FloatSize offset(filter->applyHorizontalScale(m_dx), filter->applyVerticalScale(m_dy));

    FloatRect drawingRegion = drawingRegionOfInputImage(in->absolutePaintRect());
    FloatRect drawingRegionWithOffset(drawingRegion);
    drawingRegionWithOffset.move(offset);

    ImageBuffer* sourceImage = in->asImageBuffer();
    ASSERT(sourceImage);
    GraphicsContext* resultContext = resultImage->context();
    ASSERT(resultContext);
    resultContext->setAlpha(m_shadowOpacity);
    resultContext->drawImageBuffer(sourceImage, ColorSpaceDeviceRGB, drawingRegionWithOffset);
    resultContext->setAlpha(1);

    ShadowBlur contextShadow(blurRadius, offset, m_shadowColor, ColorSpaceDeviceRGB);

    // TODO: Direct pixel access to ImageBuffer would avoid copying the ImageData.
    IntRect shadowArea(IntPoint(), resultImage->size());
    RefPtr<ByteArray> srcPixelArray = resultImage->getPremultipliedImageData(shadowArea);

    contextShadow.blurLayerImage(srcPixelArray->data(), shadowArea.size(), 4 * shadowArea.size().width());

    resultImage->putPremultipliedImageData(srcPixelArray.get(), shadowArea.size(), shadowArea, IntPoint());

    resultContext->setCompositeOperation(CompositeSourceIn);
    resultContext->fillRect(FloatRect(FloatPoint(), absolutePaintRect().size()), m_shadowColor, ColorSpaceDeviceRGB);
    resultContext->setCompositeOperation(CompositeDestinationOver);

    resultImage->context()->drawImageBuffer(sourceImage, ColorSpaceDeviceRGB, drawingRegion);
}
Exemple #14
0
void FEMorphology::apply()
{
    if (hasResult())
        return;
    FilterEffect* in = inputEffect(0);
    in->apply();
    if (!in->hasResult())
        return;

    ByteArray* dstPixelArray = createPremultipliedImageResult();
    if (!dstPixelArray)
        return;

    setIsAlphaImage(in->isAlphaImage());
    if (m_radiusX <= 0 || m_radiusY <= 0)
        return;

    Filter* filter = this->filter();
    int radiusX = static_cast<int>(floorf(filter->applyHorizontalScale(m_radiusX)));
    int radiusY = static_cast<int>(floorf(filter->applyVerticalScale(m_radiusY)));

    IntRect effectDrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
    RefPtr<ByteArray> srcPixelArray = in->asPremultipliedImage(effectDrawingRect);

    PaintingData paintingData;
    paintingData.srcPixelArray = srcPixelArray.get();
    paintingData.dstPixelArray = dstPixelArray;
    paintingData.width = effectDrawingRect.width();
    paintingData.height = effectDrawingRect.height();
    paintingData.radiusX = min(effectDrawingRect.width() - 1, radiusX);
    paintingData.radiusY = min(effectDrawingRect.height() - 1, radiusY);

    platformApply(&paintingData);
}
Exemple #15
0
void FEGaussianBlur::apply()
{
    if (hasResult())
        return;
    FilterEffect* in = inputEffect(0);
    in->apply();
    if (!in->hasResult())
        return;

    ByteArray* srcPixelArray = createPremultipliedImageResult();
    if (!srcPixelArray)
        return;

    setIsAlphaImage(in->isAlphaImage());

    IntRect effectDrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
    in->copyPremultipliedImage(srcPixelArray, effectDrawingRect);

    if (!m_stdX && !m_stdY)
        return;

    unsigned kernelSizeX = 0;
    unsigned kernelSizeY = 0;
    calculateKernelSize(filter(), kernelSizeX, kernelSizeY, m_stdX, m_stdY);

    IntSize paintSize = absolutePaintRect().size();
    RefPtr<ByteArray> tmpImageData = ByteArray::create(paintSize.width() * paintSize.height() * 4);
    ByteArray* tmpPixelArray = tmpImageData.get();

    platformApply(srcPixelArray, tmpPixelArray, kernelSizeX, kernelSizeY, paintSize);
}
Exemple #16
0
void FECustomFilter::platformApplySoftware()
{
    Uint8ClampedArray* dstPixelArray = createPremultipliedImageResult();
    if (!dstPixelArray)
        return;

    FilterEffect* in = inputEffect(0);
    IntRect effectDrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
    RefPtr<Uint8ClampedArray> srcPixelArray = in->asPremultipliedImage(effectDrawingRect);
    
    IntSize newContextSize(effectDrawingRect.size());
    bool hadContext = m_context;
    if (!m_context)
        initializeContext();
    
    if (!hadContext || m_contextSize != newContextSize)
        resizeContext(newContextSize);

    // Do not draw the filter if the input image cannot fit inside a single GPU texture.
    if (m_inputTexture->tiles().numTilesX() != 1 || m_inputTexture->tiles().numTilesY() != 1)
        return;
    
    // The shader had compiler errors. We cannot draw anything.
    if (!m_shader->isInitialized())
        return;

    m_context->bindFramebuffer(GraphicsContext3D::FRAMEBUFFER, m_frameBuffer);
    m_context->viewport(0, 0, newContextSize.width(), newContextSize.height());
    
    m_context->clearColor(0, 0, 0, 0);
    m_context->clear(GraphicsContext3D::COLOR_BUFFER_BIT | GraphicsContext3D::DEPTH_BUFFER_BIT);
    
    bindProgramAndBuffers(srcPixelArray.get());
    
    m_context->drawElements(GraphicsContext3D::TRIANGLES, m_mesh->indicesCount(), GraphicsContext3D::UNSIGNED_SHORT, 0);
    
    ASSERT(static_cast<size_t>(newContextSize.width() * newContextSize.height() * 4) == dstPixelArray->length());
    m_context->readPixels(0, 0, newContextSize.width(), newContextSize.height(), GraphicsContext3D::RGBA, GraphicsContext3D::UNSIGNED_BYTE, dstPixelArray->data());
}
Exemple #17
0
void FEColorMatrix::apply()
{
    if (hasResult())
        return;
    FilterEffect* in = inputEffect(0);
    in->apply();
    if (!in->hasResult())
        return;

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    resultImage->context()->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in->absolutePaintRect()));

    IntRect imageRect(IntPoint(), absolutePaintRect().size());
    RefPtr<ImageData> imageData = resultImage->getUnmultipliedImageData(imageRect);
    ByteArray* pixelArray = imageData->data()->data();

    switch (m_type) {
        case FECOLORMATRIX_TYPE_UNKNOWN:
            break;
        case FECOLORMATRIX_TYPE_MATRIX:
            effectType<FECOLORMATRIX_TYPE_MATRIX>(pixelArray, m_values);
            break;
        case FECOLORMATRIX_TYPE_SATURATE: 
            effectType<FECOLORMATRIX_TYPE_SATURATE>(pixelArray, m_values);
            break;
        case FECOLORMATRIX_TYPE_HUEROTATE:
            effectType<FECOLORMATRIX_TYPE_HUEROTATE>(pixelArray, m_values);
            break;
        case FECOLORMATRIX_TYPE_LUMINANCETOALPHA:
            effectType<FECOLORMATRIX_TYPE_LUMINANCETOALPHA>(pixelArray, m_values);
            setIsAlphaImage(true);
            break;
    }

    resultImage->putUnmultipliedImageData(imageData.get(), imageRect, IntPoint());
}
bool FEComponentTransfer::platformApplySkia()
{
    FilterEffect* in = inputEffect(0);
    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return false;

    RefPtr<Image> image = in->asImageBuffer()->copyImage(DontCopyBackingStore);
    NativeImageSkia* nativeImage = image->nativeImageForCurrentFrame();
    if (!nativeImage)
        return false;

    unsigned char rValues[256], gValues[256], bValues[256], aValues[256];
    getValues(rValues, gValues, bValues, aValues);

    SkPaint paint;
    paint.setColorFilter(SkTableColorFilter::CreateARGB(aValues, rValues, gValues, bValues))->unref();
    paint.setXfermodeMode(SkXfermode::kSrc_Mode);
    resultImage->context()->platformContext()->drawBitmap(nativeImage->bitmap(), 0, 0, &paint);

    return true;
}
void FEBlend::platformApplySoftware()
{
    FilterEffect* in = inputEffect(0);
    FilterEffect* in2 = inputEffect(1);

    ASSERT(m_mode > FEBLEND_MODE_UNKNOWN);
    ASSERT(m_mode <= FEBLEND_MODE_LIGHTEN);

    Uint8ClampedArray* dstPixelArray = createPremultipliedImageResult();
    if (!dstPixelArray)
        return;

    IntRect effectADrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
    RefPtr<Uint8ClampedArray> srcPixelArrayA = in->asPremultipliedImage(effectADrawingRect);

    IntRect effectBDrawingRect = requestedRegionOfInputImageData(in2->absolutePaintRect());
    RefPtr<Uint8ClampedArray> srcPixelArrayB = in2->asPremultipliedImage(effectBDrawingRect);

    unsigned pixelArrayLength = srcPixelArrayA->length();
    ASSERT(pixelArrayLength == srcPixelArrayB->length());

#if HAVE(ARM_NEON_INTRINSICS)
    if (pixelArrayLength >= 8)
        platformApplyNEON(srcPixelArrayA->data(), srcPixelArrayB->data(), dstPixelArray->data(), pixelArrayLength);
    else { // If there is just one pixel we expand it to two.
        ASSERT(pixelArrayLength > 0);
        uint32_t sourceA[2] = {0, 0};
        uint32_t sourceBAndDest[2] = {0, 0};

        sourceA[0] = reinterpret_cast<uint32_t*>(srcPixelArrayA->data())[0];
        sourceBAndDest[0] = reinterpret_cast<uint32_t*>(srcPixelArrayB->data())[0];
        platformApplyNEON(reinterpret_cast<uint8_t*>(sourceA), reinterpret_cast<uint8_t*>(sourceBAndDest), reinterpret_cast<uint8_t*>(sourceBAndDest), 8);
        reinterpret_cast<uint32_t*>(dstPixelArray->data())[0] = sourceBAndDest[0];
    }
#else
    platformApplyGeneric(srcPixelArrayA->data(), srcPixelArrayB->data(), dstPixelArray->data(), pixelArrayLength);
#endif
}
Exemple #20
0
void FEComponentTransfer::platformApplySoftware()
{
    FilterEffect* in = inputEffect(0);

    Uint8ClampedArray* pixelArray = createUnmultipliedImageResult();
    if (!pixelArray)
        return;

    unsigned char rValues[256], gValues[256], bValues[256], aValues[256];
    getValues(rValues, gValues, bValues, aValues);
    unsigned char* tables[] = { rValues, gValues, bValues, aValues };

    IntRect drawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
    in->copyUnmultipliedImage(pixelArray, drawingRect);

    unsigned pixelArrayLength = pixelArray->length();
    for (unsigned pixelOffset = 0; pixelOffset < pixelArrayLength; pixelOffset += 4) {
        for (unsigned channel = 0; channel < 4; ++channel) {
            unsigned char c = pixelArray->item(pixelOffset + channel);
            pixelArray->set(pixelOffset + channel, tables[channel][c]);
        }
    }
}
bool FEColorMatrix::applySkia()
{
    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return false;

    FilterEffect* in = inputEffect(0);

    SkRect drawingRegion = drawingRegionOfInputImage(in->absolutePaintRect());

    SkAutoTUnref<SkColorFilter> filter(createColorFilter(m_type, m_values.data()));

    RefPtr<Image> image = in->asImageBuffer()->copyImage(DontCopyBackingStore);
    RefPtr<NativeImageSkia> nativeImage = image->nativeImageForCurrentFrame();
    if (!nativeImage)
        return false;

    SkPaint paint;
    paint.setColorFilter(filter);
    paint.setXfermodeMode(SkXfermode::kSrc_Mode);
    resultImage->context()->drawBitmap(nativeImage->bitmap(), drawingRegion.fLeft, drawingRegion.fTop, &paint);
    return true;
}
Exemple #22
0
void FEBlend::applySoftware()
{
#if HAVE(ARM_NEON_INTRINSICS)
    if (applySoftwareNEON())
        return;
#endif

    FilterEffect* in = inputEffect(0);
    FilterEffect* in2 = inputEffect(1);

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;
    GraphicsContext* filterContext = resultImage->context();

    ImageBuffer* imageBuffer = in->asImageBuffer();
    ImageBuffer* imageBuffer2 = in2->asImageBuffer();
    ASSERT(imageBuffer);
    ASSERT(imageBuffer2);

    filterContext->drawImageBuffer(imageBuffer2, drawingRegionOfInputImage(in2->absolutePaintRect()));
    filterContext->drawImageBuffer(imageBuffer, drawingRegionOfInputImage(in->absolutePaintRect()), 0, CompositeSourceOver, m_mode);
}
Exemple #23
0
void FEMerge::apply()
{
    if (hasResult())
        return;
    unsigned size = numberOfEffectInputs();
    ASSERT(size > 0);
    for (unsigned i = 0; i < size; ++i) {
        FilterEffect* in = inputEffect(i);
        in->apply();
        if (!in->hasResult())
            return;
    }

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    GraphicsContext* filterContext = resultImage->context();
    for (unsigned i = 0; i < size; ++i) {
        FilterEffect* in = inputEffect(i);
        filterContext->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in->absolutePaintRect()));
    }
}
Exemple #24
0
void FETile::apply()
{
// FIXME: See bug 47315. This is a hack to work around a compile failure, but is incorrect behavior otherwise.
#if ENABLE(SVG)
    FilterEffect* in = inputEffect(0);
    in->apply();
    if (!in->resultImage())
        return;

    GraphicsContext* filterContext = effectContext();
    if (!filterContext)
        return;

    setIsAlphaImage(in->isAlphaImage());

    // Source input needs more attention. It has the size of the filterRegion but gives the
    // size of the cutted sourceImage back. This is part of the specification and optimization.
    FloatRect tileRect = in->maxEffectRect();
    FloatPoint inMaxEffectLocation = tileRect.location();
    FloatPoint maxEffectLocation = maxEffectRect().location();
    if (in->filterEffectType() == FilterEffectTypeSourceInput) {
        Filter* filter = this->filter();
        tileRect = filter->filterRegion();
        tileRect.scale(filter->filterResolution().width(), filter->filterResolution().height());
    }

    OwnPtr<ImageBuffer> tileImage;
    if (!SVGImageBufferTools::createImageBuffer(tileRect, tileRect, tileImage, ColorSpaceDeviceRGB))
        return;

    GraphicsContext* tileImageContext = tileImage->context();
    tileImageContext->translate(-inMaxEffectLocation.x(), -inMaxEffectLocation.y());
    tileImageContext->drawImageBuffer(in->resultImage(), ColorSpaceDeviceRGB, in->absolutePaintRect().location());

    RefPtr<Pattern> pattern = Pattern::create(tileImage->copyImage(), true, true);

    AffineTransform patternTransform;
    patternTransform.translate(inMaxEffectLocation.x() - maxEffectLocation.x(), inMaxEffectLocation.y() - maxEffectLocation.y());
    pattern->setPatternSpaceTransform(patternTransform);
    filterContext->setFillPattern(pattern);
    filterContext->fillRect(FloatRect(FloatPoint(), absolutePaintRect().size()));
#endif
}
Exemple #25
0
void RenderSVGResourceFilter::postApplyResource(RenderObject* object, GraphicsContext*& context, unsigned short resourceMode)
{
    ASSERT(object);
    ASSERT(context);
#ifndef NDEBUG
    ASSERT(resourceMode == ApplyToDefaultMode);
#else
    UNUSED_PARAM(resourceMode);
#endif

    if (!m_filter.contains(object))
        return;

    FilterData* filterData = m_filter.get(object);
    if (!filterData->builded) {
        if (!filterData->savedContext) {
            removeClientFromCache(object);
            return;
        }

        context = filterData->savedContext;
        filterData->savedContext = 0;
#if !PLATFORM(CG)
        filterData->sourceGraphicBuffer->transformColorSpace(DeviceRGB, LinearRGB);
#endif
    }

    FilterEffect* lastEffect = filterData->builder->lastEffect();
    
    if (lastEffect && !filterData->boundaries.isEmpty() && !lastEffect->subRegion().isEmpty()) {
        // This is the real filtering of the object. It just needs to be called on the
        // initial filtering process. We just take the stored filter result on a
        // second drawing.
        if (!filterData->builded) {
            filterData->filter->setSourceImage(filterData->sourceGraphicBuffer.release());
            lastEffect->apply(filterData->filter.get());
#if !PLATFORM(CG)
            ImageBuffer* resultImage = lastEffect->resultImage();
            if (resultImage)
                resultImage->transformColorSpace(LinearRGB, DeviceRGB);
#endif
            filterData->builded = true;
        }

        ImageBuffer* resultImage = lastEffect->resultImage();
        if (resultImage)
            context->drawImageBuffer(resultImage, object->style()->colorSpace(), lastEffect->subRegion());
    }

    filterData->sourceGraphicBuffer.clear();
}
Exemple #26
0
void FEOffset::apply()
{
    FilterEffect* in = inputEffect(0);
    in->apply();
    if (!in->resultImage())
        return;

    GraphicsContext* filterContext = effectContext();
    if (!filterContext)
        return;

    setIsAlphaImage(in->isAlphaImage());

    FloatRect drawingRegion = drawingRegionOfInputImage(in->absolutePaintRect());
    Filter* filter = this->filter();
    drawingRegion.move(filter->applyHorizontalScale(m_dx), filter->applyVerticalScale(m_dy));
    filterContext->drawImageBuffer(in->resultImage(), ColorSpaceDeviceRGB, drawingRegion);
}
Exemple #27
0
void FETile::platformApplySoftware()
{
// FIXME: See bug 47315. This is a hack to work around a compile failure, but is incorrect behavior otherwise.
    FilterEffect* in = inputEffect(0);

    ImageBuffer* resultImage = createImageBufferResult();
    ImageBuffer* inBuffer = in->asImageBuffer();
    if (!resultImage || !inBuffer)
        return;

    setIsAlphaImage(in->isAlphaImage());

    // Source input needs more attention. It has the size of the filterRegion but gives the
    // size of the cutted sourceImage back. This is part of the specification and optimization.
    FloatRect tileRect = in->maxEffectRect();
    FloatPoint inMaxEffectLocation = tileRect.location();
    FloatPoint maxEffectLocation = maxEffectRect().location();
    if (in->filterEffectType() == FilterEffectTypeSourceInput) {
        Filter& filter = this->filter();
        tileRect = filter.filterRegion();
        tileRect.scale(filter.filterResolution().width(), filter.filterResolution().height());
    }

    auto tileImage = SVGRenderingContext::createImageBuffer(tileRect, tileRect, ColorSpaceSRGB, filter().renderingMode());
    if (!tileImage)
        return;

    GraphicsContext& tileImageContext = tileImage->context();
    tileImageContext.translate(-inMaxEffectLocation.x(), -inMaxEffectLocation.y());
    tileImageContext.drawImageBuffer(*inBuffer, in->absolutePaintRect().location());

    auto tileImageCopy = ImageBuffer::sinkIntoImage(WTFMove(tileImage));
    if (!tileImageCopy)
        return;

    auto pattern = Pattern::create(WTFMove(tileImageCopy), true, true);

    AffineTransform patternTransform;
    patternTransform.translate(inMaxEffectLocation.x() - maxEffectLocation.x(), inMaxEffectLocation.y() - maxEffectLocation.y());
    pattern.get().setPatternSpaceTransform(patternTransform);
    GraphicsContext& filterContext = resultImage->context();
    filterContext.setFillPattern(WTFMove(pattern));
    filterContext.fillRect(FloatRect(FloatPoint(), absolutePaintRect().size()));
}
void FEOffset::apply()
{
    if (hasResult())
        return;
    FilterEffect* in = inputEffect(0);
    in->apply();
    if (!in->hasResult())
        return;

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    setIsAlphaImage(in->isAlphaImage());

    FloatRect drawingRegion = drawingRegionOfInputImage(in->absolutePaintRect());
    Filter* filter = this->filter();
    drawingRegion.move(filter->applyHorizontalScale(m_dx), filter->applyVerticalScale(m_dy));
    resultImage->context()->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegion);
}
Exemple #29
0
void FETile::applySoftware()
{
    FilterEffect* in = inputEffect(0);

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    setIsAlphaImage(in->isAlphaImage());

    // Source input needs more attention. It has the size of the filterRegion but gives the
    // size of the cutted sourceImage back. This is part of the specification and optimization.
    FloatRect tileRect = in->maxEffectRect();
    FloatPoint inMaxEffectLocation = tileRect.location();
    FloatPoint maxEffectLocation = maxEffectRect().location();
    if (in->filterEffectType() == FilterEffectTypeSourceInput) {
        Filter* filter = this->filter();
        tileRect = filter->absoluteFilterRegion();
    }

    OwnPtr<ImageBufferSurface> surface;
    IntSize intTileSize = roundedIntSize(tileRect.size());
    surface = adoptPtr(new UnacceleratedImageBufferSurface(intTileSize));
    OwnPtr<ImageBuffer> tileImage = ImageBuffer::create(surface.release());
    if (!tileImage)
        return;

    GraphicsContext* tileImageContext = tileImage->context();
    tileImageContext->scale(FloatSize(intTileSize.width() / tileRect.width(), intTileSize.height() / tileRect.height()));
    tileImageContext->translate(-inMaxEffectLocation.x(), -inMaxEffectLocation.y());
    tileImageContext->drawImageBuffer(in->asImageBuffer(), in->absolutePaintRect().location());

    RefPtr<Pattern> pattern = Pattern::create(tileImage->copyImage(CopyBackingStore), true, true);

    AffineTransform patternTransform;
    patternTransform.translate(inMaxEffectLocation.x() - maxEffectLocation.x(), inMaxEffectLocation.y() - maxEffectLocation.y());
    pattern->setPatternSpaceTransform(patternTransform);
    GraphicsContext* filterContext = resultImage->context();
    filterContext->setFillPattern(pattern);
    filterContext->fillRect(FloatRect(FloatPoint(), absolutePaintRect().size()));
}
Exemple #30
0
void FETile::applySoftware()
{
    FilterEffect* in = inputEffect(0);

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    setIsAlphaImage(in->isAlphaImage());

    // Source input needs more attention. It has the size of the filterRegion but gives the
    // size of the cutted sourceImage back. This is part of the specification and optimization.
    FloatRect tileRect = in->maxEffectRect();
    FloatPoint inMaxEffectLocation = tileRect.location();
    FloatPoint maxEffectLocation = maxEffectRect().location();
    if (in->filterEffectType() == FilterEffectTypeSourceInput) {
        Filter* filter = this->filter();
        tileRect = filter->absoluteFilterRegion();
        tileRect.scale(filter->filterResolution().width(), filter->filterResolution().height());
    }

    OwnPtr<ImageBuffer> tileImage;
    if (!SVGRenderingContext::createImageBufferForPattern(tileRect, tileRect, tileImage, ColorSpaceDeviceRGB, filter()->renderingMode()))
        return;

    GraphicsContext* tileImageContext = tileImage->context();
    tileImageContext->translate(-inMaxEffectLocation.x(), -inMaxEffectLocation.y());
    tileImageContext->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, in->absolutePaintRect().location());

    RefPtr<Pattern> pattern = Pattern::create(tileImage->copyImage(CopyBackingStore), true, true);

    AffineTransform patternTransform;
    patternTransform.translate(inMaxEffectLocation.x() - maxEffectLocation.x(), inMaxEffectLocation.y() - maxEffectLocation.y());
    pattern->setPatternSpaceTransform(patternTransform);
    GraphicsContext* filterContext = resultImage->context();
    filterContext->setFillPattern(pattern);
    filterContext->fillRect(FloatRect(FloatPoint(), absolutePaintRect().size()));
}