コード例 #1
0
ImageBuffer* FilterEffect::createImageBufferResult()
{
    // Only one result type is allowed.
    ASSERT(!hasResult());
    ASSERT(isFilterSizeValid(m_absolutePaintRect));

    OwnPtr<ImageBufferSurface> surface;
    surface = adoptPtr(new UnacceleratedImageBufferSurface(m_absolutePaintRect.size()));
    m_imageBufferResult = ImageBuffer::create(surface.release());
    return m_imageBufferResult.get();
}
コード例 #2
0
ファイル: FilterEffect.cpp プロジェクト: ZeusbaseWeb/webkit
void FilterEffect::clearResultsRecursive()
{
    // Clear all results, regardless that the current effect has
    // a result. Can be used if an effect is in an erroneous state.
    if (hasResult())
        clearResult();

    unsigned size = m_inputEffects.size();
    for (unsigned i = 0; i < size; ++i)
        m_inputEffects.at(i).get()->clearResultsRecursive();
}
コード例 #3
0
ファイル: FilterEffect.cpp プロジェクト: windyuuy/opera
Uint8ClampedArray* FilterEffect::createPremultipliedImageResult()
{
    // Only one result type is allowed.
    ASSERT(!hasResult());
    ASSERT(isFilterSizeValid(m_absolutePaintRect));

    if (m_absolutePaintRect.isEmpty())
        return 0;
    m_premultipliedImageResult = Uint8ClampedArray::createUninitialized(m_absolutePaintRect.width() * m_absolutePaintRect.height() * 4);
    return m_premultipliedImageResult.get();
}
コード例 #4
0
ファイル: FEFlood.cpp プロジェクト: dslab-epfl/warr
void FEFlood::apply()
{
    if (hasResult())
        return;
    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    Color color = colorWithOverrideAlpha(floodColor().rgb(), floodOpacity());
    resultImage->context()->fillRect(FloatRect(FloatPoint(), absolutePaintRect().size()), color, ColorSpaceDeviceRGB);
}
コード例 #5
0
ファイル: FilterEffect.cpp プロジェクト: Xertz/EAWebKit
ByteArray* FilterEffect::createUnmultipliedImageResult()
{
    // Only one result type is allowed.
    ASSERT(!hasResult());
    ASSERT(isFilterSizeValid(m_absolutePaintRect));

    determineAbsolutePaintRect();
    if (m_absolutePaintRect.isEmpty())
        return 0;
    m_unmultipliedImageResult = ByteArray::create(m_absolutePaintRect.width() * m_absolutePaintRect.height() * 4);
    return m_unmultipliedImageResult.get();
}
コード例 #6
0
ファイル: FilterEffect.cpp プロジェクト: 1833183060/wke
ImageBuffer* FilterEffect::createImageBufferResult()
{
    // Only one result type is allowed.
    ASSERT(!hasResult());
    if (m_absolutePaintRect.isEmpty())
        return 0;
    m_imageBufferResult = ImageBuffer::create(m_absolutePaintRect.size(), ColorSpaceLinearRGB);
    if (!m_imageBufferResult)
        return 0;
    ASSERT(m_imageBufferResult->context());
    return m_imageBufferResult.get();
}
コード例 #7
0
ファイル: FilterEffect.cpp プロジェクト: sinoory/webv8
ImageBuffer* FilterEffect::createImageBufferResult()
{
    // Only one result type is allowed.
    ASSERT(!hasResult());
    if (m_absolutePaintRect.isEmpty())
        return 0;
    m_imageBufferResult = ImageBuffer::create(m_absolutePaintRect.size(), m_filter->filterScale(), m_resultColorSpace, m_filter->renderingMode());
    if (!m_imageBufferResult)
        return 0;
    ASSERT(m_imageBufferResult->context());
    return m_imageBufferResult.get();
}
コード例 #8
0
ファイル: FilterEffect.cpp プロジェクト: sinoory/webv8
Uint8ClampedArray* FilterEffect::createUnmultipliedImageResult()
{
    // Only one result type is allowed.
    ASSERT(!hasResult());
    ASSERT(isFilterSizeValid(m_absolutePaintRect));

    if (m_absolutePaintRect.isEmpty())
        return 0;
    IntSize resultSize(m_absolutePaintRect.size());
    resultSize.scale(m_filter->filterScale());
    m_unmultipliedImageResult = Uint8ClampedArray::createUninitialized(resultSize.width() * resultSize.height() * 4);
    return m_unmultipliedImageResult.get();
}
コード例 #9
0
ファイル: FilterEffect.cpp プロジェクト: xiaoyanzheng/webkit
Uint8ClampedArray* FilterEffect::createPremultipliedImageResult()
{
    // Only one result type is allowed.
    ASSERT(!hasResult());
    if (m_absolutePaintRect.isEmpty())
        return nullptr;

    IntSize resultSize(m_absolutePaintRect.size());
    ASSERT(!ImageBuffer::sizeNeedsClamping(resultSize));
    resultSize.scale(m_filter.filterScale());
    m_premultipliedImageResult = Uint8ClampedArray::createUninitialized(resultSize.width() * resultSize.height() * 4);
    return m_premultipliedImageResult.get();
}
コード例 #10
0
ファイル: FETurbulence.cpp プロジェクト: Xertz/EAWebKit
void FETurbulence::apply()
{
    if (hasResult())
        return;
    ByteArray* pixelArray = createUnmultipliedImageResult();
    if (!pixelArray)
        return;

    if (absolutePaintRect().isEmpty())
        return;

    PaintingData paintingData(m_seed, roundedIntSize(filterPrimitiveSubregion().size()));
    initPaint(paintingData);

#if ENABLE(PARALLEL_JOBS)

    int optimalThreadNumber = (absolutePaintRect().width() * absolutePaintRect().height()) / s_minimalRectDimension;
    if (optimalThreadNumber > 1) {
        // Initialize parallel jobs
        ParallelJobs<FillRegionParameters> parallelJobs(&WebCore::FETurbulence::fillRegionWorker, optimalThreadNumber);

        // Fill the parameter array
        int i = parallelJobs.numberOfJobs();
        if (i > 1) {
            int startY = 0;
            int stepY = absolutePaintRect().height() / i;
            for (; i > 0; --i) {
                FillRegionParameters& params = parallelJobs.parameter(i-1);
                params.filter = this;
                params.pixelArray = pixelArray;
                params.paintingData = &paintingData;
                params.startY = startY;
                if (i != 1) {
                    params.endY = startY + stepY;
                    startY = startY + stepY;
                } else
                    params.endY = absolutePaintRect().height();
            }

            // Execute parallel jobs
            parallelJobs.execute();

            return;
        }
    }
    // Fallback to sequential mode if there is no room for a new thread or the paint area is too small

#endif // ENABLE(PARALLEL_JOBS)

    fillRegion(pixelArray, paintingData, 0, absolutePaintRect().height());
}
コード例 #11
0
ファイル: FilterEffect.cpp プロジェクト: 1833183060/wke
ImageBuffer* FilterEffect::asImageBuffer()
{
    if (!hasResult())
        return 0;
    if (m_imageBufferResult)
        return m_imageBufferResult.get();
    m_imageBufferResult = ImageBuffer::create(m_absolutePaintRect.size(), ColorSpaceLinearRGB);
    IntRect destinationRect(IntPoint(), m_absolutePaintRect.size());
    if (m_premultipliedImageResult)
        m_imageBufferResult->putPremultipliedImageData(m_premultipliedImageResult.get(), destinationRect.size(), destinationRect, IntPoint());
    else
        m_imageBufferResult->putUnmultipliedImageData(m_unmultipliedImageResult.get(), destinationRect.size(), destinationRect, IntPoint());
    return m_imageBufferResult.get();
}
コード例 #12
0
ファイル: FilterEffect.cpp プロジェクト: xiaoyanzheng/webkit
ImageBuffer* FilterEffect::createImageBufferResult()
{
    // Only one result type is allowed.
    ASSERT(!hasResult());
    if (m_absolutePaintRect.isEmpty())
        return nullptr;

    FloatSize clampedSize = ImageBuffer::clampedSize(m_absolutePaintRect.size());
    m_imageBufferResult = ImageBuffer::create(clampedSize, m_filter.renderingMode(), m_filter.filterScale(), m_resultColorSpace);
    if (!m_imageBufferResult)
        return nullptr;

    return m_imageBufferResult.get();
}
コード例 #13
0
ファイル: FilterEffect.cpp プロジェクト: windyuuy/opera
ImageBuffer* FilterEffect::asImageBuffer()
{
    if (!hasResult())
        return 0;
    if (m_imageBufferResult)
        return m_imageBufferResult.get();
    m_imageBufferResult = ImageBuffer::create(m_absolutePaintRect.size(), 1, m_filter->renderingMode());
    IntRect destinationRect(IntPoint(), m_absolutePaintRect.size());
    if (m_premultipliedImageResult)
        m_imageBufferResult->putByteArray(Premultiplied, m_premultipliedImageResult.get(), destinationRect.size(), destinationRect, IntPoint());
    else
        m_imageBufferResult->putByteArray(Unmultiplied, m_unmultipliedImageResult.get(), destinationRect.size(), destinationRect, IntPoint());
    return m_imageBufferResult.get();
}
コード例 #14
0
ファイル: FEDisplacementMap.cpp プロジェクト: dslab-epfl/warr
void FEDisplacementMap::apply()
{
    if (hasResult())
        return;
    FilterEffect* in = inputEffect(0);
    FilterEffect* in2 = inputEffect(1);
    in->apply();
    in2->apply();
    if (!in->hasResult() || !in2->hasResult())
        return;

    if (m_xChannelSelector == CHANNEL_UNKNOWN || m_yChannelSelector == CHANNEL_UNKNOWN)
        return;

    ByteArray* dstPixelArray = createPremultipliedImageResult();
    if (!dstPixelArray)
        return;

    IntRect effectADrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
    RefPtr<ByteArray> srcPixelArrayA = in->asPremultipliedImage(effectADrawingRect);

    IntRect effectBDrawingRect = requestedRegionOfInputImageData(in2->absolutePaintRect());
    RefPtr<ByteArray> srcPixelArrayB = in2->asUnmultipliedImage(effectBDrawingRect);

    ASSERT(srcPixelArrayA->length() == srcPixelArrayB->length());

    Filter* filter = this->filter();
    IntSize paintSize = absolutePaintRect().size();
    float scaleX = filter->applyHorizontalScale(m_scale / 255);
    float scaleY = filter->applyVerticalScale(m_scale / 255);
    float scaleAdjustmentX = filter->applyHorizontalScale(0.5f - 0.5f * m_scale);
    float scaleAdjustmentY = filter->applyVerticalScale(0.5f - 0.5f * m_scale);
    int stride = paintSize.width() * 4;
    for (int y = 0; y < paintSize.height(); ++y) {
        int line = y * stride;
        for (int x = 0; x < paintSize.width(); ++x) {
            int dstIndex = line + x * 4;
            int srcX = x + static_cast<int>(scaleX * srcPixelArrayB->get(dstIndex + m_xChannelSelector - 1) + scaleAdjustmentX);
            int srcY = y + static_cast<int>(scaleY * srcPixelArrayB->get(dstIndex + m_yChannelSelector - 1) + scaleAdjustmentY);
            for (unsigned channel = 0; channel < 4; ++channel) {
                if (srcX < 0 || srcX >= paintSize.width() || srcY < 0 || srcY >= paintSize.height())
                    dstPixelArray->set(dstIndex + channel, static_cast<unsigned char>(0));
                else {
                    unsigned char pixelValue = srcPixelArrayA->get(srcY * stride + srcX * 4 + channel);
                    dstPixelArray->set(dstIndex + channel, pixelValue);
                }
            }
        }
    }
}
コード例 #15
0
ファイル: FilterEffect.cpp プロジェクト: sinoory/webv8
void FilterEffect::applyAll()
{
    if (hasResult())
        return;
    FilterContextOpenCL* context = FilterContextOpenCL::context();
    if (context) {
        apply();
        if (!context->inError())
            return;
        clearResultsRecursive();
        context->destroyContext();
    }
    // Software code path.
    apply();
}
コード例 #16
0
ファイル: FilterEffect.cpp プロジェクト: 1833183060/wke
void FilterEffect::apply()
{
    if (hasResult())
        return;
    unsigned size = m_inputEffects.size();
    for (unsigned i = 0; i < size; ++i) {
        FilterEffect* in = m_inputEffects.at(i).get();
        in->apply();
        if (!in->hasResult())
            return;
    }
    determineAbsolutePaintRect();
    
    // Add platform specific apply functions here and return earlier.
    platformApplySoftware();
}
コード例 #17
0
ファイル: FilterEffect.cpp プロジェクト: Miaque/mojo
void FilterEffect::transformResultColorSpace(ColorSpace dstColorSpace)
{
    if (!hasResult() || dstColorSpace == m_resultColorSpace)
        return;

    // FIXME: We can avoid this potentially unnecessary ImageBuffer conversion by adding
    // color space transform support for the {pre,un}multiplied arrays.
    asImageBuffer()->transformColorSpace(m_resultColorSpace, dstColorSpace);

    m_resultColorSpace = dstColorSpace;

    if (m_unmultipliedImageResult)
        m_unmultipliedImageResult.clear();
    if (m_premultipliedImageResult)
        m_premultipliedImageResult.clear();
}
コード例 #18
0
ファイル: main.cpp プロジェクト: KubaO/stackoverflown
 void scan() {
     running = true;
     sockaddr_in addr;
     addr.sin_family = AF_INET;
     addr.sin_addr.s_addr = inet_addr("127.0.0.1");
     for (int i = 1; i < 65536 && !stop; ++i) {
         auto s = socket(AF_INET, SOCK_STREAM, 0);
         addr.sin_port = htons(i);
         auto con = ::connect(s, reinterpret_cast<sockaddr*>(&addr), sizeof(sockaddr));
         emit hasResult(i, con == 0);
         con == 0 ? ++open : ++closed;
         ++total;
         ::close(s);
     }
     emit done();
     running = false;
 }
コード例 #19
0
void FEImage::apply()
{
    if (!m_image.get() || hasResult())
        return;

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    FloatRect srcRect(FloatPoint(), m_image->size());
    FloatRect destRect(m_absoluteSubregion);
    m_preserveAspectRatio.transformRect(destRect, srcRect);

    IntPoint paintLocation = absolutePaintRect().location();
    destRect.move(-paintLocation.x(), -paintLocation.y());

    resultImage->context()->drawImage(m_image.get(), ColorSpaceDeviceRGB, destRect, srcRect);
}
コード例 #20
0
ファイル: FilterEffect.cpp プロジェクト: sinoory/webv8
ImageBuffer* FilterEffect::asImageBuffer()
{
    if (!hasResult())
        return 0;
    if (m_imageBufferResult)
        return m_imageBufferResult.get();
#if ENABLE(OPENCL)
    if (m_openCLImageResult)
        return openCLImageToImageBuffer();
#endif
    m_imageBufferResult = ImageBuffer::create(m_absolutePaintRect.size(), m_filter->filterScale(), m_resultColorSpace, m_filter->renderingMode());
    IntRect destinationRect(IntPoint(), m_absolutePaintRect.size());
    if (m_premultipliedImageResult)
        m_imageBufferResult->putByteArray(Premultiplied, m_premultipliedImageResult.get(), destinationRect.size(), destinationRect, IntPoint());
    else
        m_imageBufferResult->putByteArray(Unmultiplied, m_unmultipliedImageResult.get(), destinationRect.size(), destinationRect, IntPoint());
    return m_imageBufferResult.get();
}
コード例 #21
0
ファイル: FEBlend.cpp プロジェクト: Xertz/EAWebKit
void FEBlend::apply()
{
    if (hasResult())
        return;
    FilterEffect* in = inputEffect(0);
    FilterEffect* in2 = inputEffect(1);
    in->apply();
    in2->apply();
    if (!in->hasResult() || !in2->hasResult())
        return;

    ASSERT(m_mode > FEBLEND_MODE_UNKNOWN);
    ASSERT(m_mode <= FEBLEND_MODE_LIGHTEN);

    ByteArray* dstPixelArray = createPremultipliedImageResult();
    if (!dstPixelArray)
        return;

    IntRect effectADrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());
    RefPtr<ByteArray> srcPixelArrayA = in->asPremultipliedImage(effectADrawingRect);

    IntRect effectBDrawingRect = requestedRegionOfInputImageData(in2->absolutePaintRect());
    RefPtr<ByteArray> srcPixelArrayB = in2->asPremultipliedImage(effectBDrawingRect);

    // Keep synchronized with BlendModeType
    static const BlendType callEffect[] = {unknown, normal, multiply, screen, darken, lighten};

    unsigned pixelArrayLength = srcPixelArrayA->length();
    ASSERT(pixelArrayLength == srcPixelArrayB->length());
    for (unsigned pixelOffset = 0; pixelOffset < pixelArrayLength; pixelOffset += 4) {
        unsigned char alphaA = srcPixelArrayA->get(pixelOffset + 3);
        unsigned char alphaB = srcPixelArrayB->get(pixelOffset + 3);
        for (unsigned channel = 0; channel < 3; ++channel) {
            unsigned char colorA = srcPixelArrayA->get(pixelOffset + channel);
            unsigned char colorB = srcPixelArrayB->get(pixelOffset + channel);

            unsigned char result = (*callEffect[m_mode])(colorA, colorB, alphaA, alphaB);
            dstPixelArray->set(pixelOffset + channel, result);
        }
        unsigned char alphaR = 255 - ((255 - alphaA) * (255 - alphaB)) / 255;
        dstPixelArray->set(pixelOffset + 3, alphaR);
    }
}
コード例 #22
0
ファイル: FilterEffect.cpp プロジェクト: Miaque/mojo
ImageBuffer* FilterEffect::asImageBuffer()
{
    if (!hasResult())
        return 0;
    if (m_imageBufferResult)
        return m_imageBufferResult.get();
    OwnPtr<ImageBufferSurface> surface;
    surface = adoptPtr(new UnacceleratedImageBufferSurface(m_absolutePaintRect.size()));
    m_imageBufferResult = ImageBuffer::create(surface.release());
    if (!m_imageBufferResult)
        return 0;

    IntRect destinationRect(IntPoint(), m_absolutePaintRect.size());
    if (m_premultipliedImageResult)
        m_imageBufferResult->putByteArray(Premultiplied, m_premultipliedImageResult.get(), destinationRect.size(), destinationRect, IntPoint());
    else
        m_imageBufferResult->putByteArray(Unmultiplied, m_unmultipliedImageResult.get(), destinationRect.size(), destinationRect, IntPoint());
    return m_imageBufferResult.get();
}
コード例 #23
0
void FEOffset::apply()
{
    if (hasResult())
        return;
    FilterEffect* in = inputEffect(0);
    in->apply();
    if (!in->hasResult())
        return;

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    setIsAlphaImage(in->isAlphaImage());

    FloatRect drawingRegion = drawingRegionOfInputImage(in->absolutePaintRect());
    Filter* filter = this->filter();
    drawingRegion.move(filter->applyHorizontalScale(m_dx), filter->applyVerticalScale(m_dy));
    resultImage->context()->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegion);
}
コード例 #24
0
ファイル: FilterEffect.cpp プロジェクト: xiaoyanzheng/webkit
void FilterEffect::transformResultColorSpace(ColorSpace dstColorSpace)
{
#if USE(CG)
    // CG handles color space adjustments internally.
    UNUSED_PARAM(dstColorSpace);
#else
    if (!hasResult() || dstColorSpace == m_resultColorSpace)
        return;

    // FIXME: We can avoid this potentially unnecessary ImageBuffer conversion by adding
    // color space transform support for the {pre,un}multiplied arrays.
    asImageBuffer()->transformColorSpace(m_resultColorSpace, dstColorSpace);

    m_resultColorSpace = dstColorSpace;

    if (m_unmultipliedImageResult)
        m_unmultipliedImageResult = nullptr;
    if (m_premultipliedImageResult)
        m_premultipliedImageResult = nullptr;
#endif
}
コード例 #25
0
void FEColorMatrix::apply()
{
    if (hasResult())
        return;
    FilterEffect* in = inputEffect(0);
    in->apply();
    if (!in->hasResult())
        return;

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    resultImage->context()->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in->absolutePaintRect()));

    IntRect imageRect(IntPoint(), absolutePaintRect().size());
    RefPtr<ImageData> imageData = resultImage->getUnmultipliedImageData(imageRect);
    ByteArray* pixelArray = imageData->data()->data();

    switch (m_type) {
        case FECOLORMATRIX_TYPE_UNKNOWN:
            break;
        case FECOLORMATRIX_TYPE_MATRIX:
            effectType<FECOLORMATRIX_TYPE_MATRIX>(pixelArray, m_values);
            break;
        case FECOLORMATRIX_TYPE_SATURATE: 
            effectType<FECOLORMATRIX_TYPE_SATURATE>(pixelArray, m_values);
            break;
        case FECOLORMATRIX_TYPE_HUEROTATE:
            effectType<FECOLORMATRIX_TYPE_HUEROTATE>(pixelArray, m_values);
            break;
        case FECOLORMATRIX_TYPE_LUMINANCETOALPHA:
            effectType<FECOLORMATRIX_TYPE_LUMINANCETOALPHA>(pixelArray, m_values);
            setIsAlphaImage(true);
            break;
    }

    resultImage->putUnmultipliedImageData(imageData.get(), imageRect, IntPoint());
}
コード例 #26
0
void FilterEffect::copyUnmultipliedImage(Uint8ClampedArray* destination, const IntRect& rect)
{
    ASSERT(hasResult());

    if (!m_unmultipliedImageResult) {
        // We prefer a conversion from the image buffer.
        if (m_imageBufferResult)
            m_unmultipliedImageResult = m_imageBufferResult->getUnmultipliedImageData(IntRect(IntPoint(), m_absolutePaintRect.size()));
        else {
            IntSize inputSize(m_absolutePaintRect.size());
            ASSERT(!ImageBuffer::sizeNeedsClamping(inputSize));
            inputSize.scale(m_filter.filterScale());
            m_unmultipliedImageResult = Uint8ClampedArray::createUninitialized(inputSize.width() * inputSize.height() * 4);
            if (!m_unmultipliedImageResult) {
                WTFLogAlways("FilterEffect::copyUnmultipliedImage Unable to create buffer. Requested size was %d x %d\n", inputSize.width(), inputSize.height());
                return;
            }
            unsigned char* sourceComponent = m_premultipliedImageResult->data();
            unsigned char* destinationComponent = m_unmultipliedImageResult->data();
            unsigned char* end = sourceComponent + (inputSize.width() * inputSize.height() * 4);
            while (sourceComponent < end) {
                int alpha = sourceComponent[3];
                if (alpha) {
                    destinationComponent[0] = static_cast<int>(sourceComponent[0]) * 255 / alpha;
                    destinationComponent[1] = static_cast<int>(sourceComponent[1]) * 255 / alpha;
                    destinationComponent[2] = static_cast<int>(sourceComponent[2]) * 255 / alpha;
                } else {
                    destinationComponent[0] = 0;
                    destinationComponent[1] = 0;
                    destinationComponent[2] = 0;
                }
                destinationComponent[3] = alpha;
                sourceComponent += 4;
                destinationComponent += 4;
            }
        }
    }
    copyImageBytes(m_unmultipliedImageResult.get(), destination, rect);
}
コード例 #27
0
void FEMerge::apply()
{
    if (hasResult())
        return;
    unsigned size = numberOfEffectInputs();
    ASSERT(size > 0);
    for (unsigned i = 0; i < size; ++i) {
        FilterEffect* in = inputEffect(i);
        in->apply();
        if (!in->hasResult())
            return;
    }

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    GraphicsContext* filterContext = resultImage->context();
    for (unsigned i = 0; i < size; ++i) {
        FilterEffect* in = inputEffect(i);
        filterContext->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in->absolutePaintRect()));
    }
}
コード例 #28
0
void FEConvolveMatrix::apply()
{
    if (hasResult())
        return;
    FilterEffect* in = inputEffect(0);
    in->apply();
    if (!in->hasResult())
        return;

    ByteArray* resultImage;
    if (m_preserveAlpha)
        resultImage = createUnmultipliedImageResult();
    else
        resultImage = createPremultipliedImageResult();
    if (!resultImage)
        return;

    IntRect effectDrawingRect = requestedRegionOfInputImageData(in->absolutePaintRect());

    RefPtr<ByteArray> srcPixelArray;
    if (m_preserveAlpha)
        srcPixelArray = in->asUnmultipliedImage(effectDrawingRect);
    else
        srcPixelArray = in->asPremultipliedImage(effectDrawingRect);

    IntSize paintSize = absolutePaintRect().size();
    PaintingData paintingData;
    paintingData.srcPixelArray = srcPixelArray.get();
    paintingData.dstPixelArray = resultImage;
    paintingData.width = paintSize.width();
    paintingData.height = paintSize.height();
    paintingData.bias = m_bias * 255;

    // Drawing fully covered pixels
    int clipRight = paintSize.width() - m_kernelSize.width();
    int clipBottom = paintSize.height() - m_kernelSize.height();

    if (clipRight >= 0 && clipBottom >= 0) {

#if ENABLE(PARALLEL_JOBS)
        int optimalThreadNumber = (absolutePaintRect().width() * absolutePaintRect().height()) / s_minimalRectDimension;
        if (optimalThreadNumber > 1) {
            ParallelJobs<InteriorPixelParameters> parallelJobs(&WebCore::FEConvolveMatrix::setInteriorPixelsWorker, optimalThreadNumber);
            const int numOfThreads = parallelJobs.numberOfJobs();
            const int heightPerThread = clipBottom / numOfThreads;
            int startY = 0;

            for (int job = 0; job < numOfThreads; ++job) {
                InteriorPixelParameters& param = parallelJobs.parameter(job);
                param.filter = this;
                param.paintingData = &paintingData;
                param.clipRight = clipRight;
                param.clipBottom = clipBottom;
                param.yStart = startY;
                if (job < numOfThreads - 1) {
                    startY += heightPerThread;
                    param.yEnd = startY - 1;
                } else
                    param.yEnd = clipBottom;
            }

            parallelJobs.execute();
        } else
            // Fallback to the default setInteriorPixels call.
#endif
        setInteriorPixels(paintingData, clipRight, clipBottom, 0, clipBottom);

        clipRight += m_targetOffset.x() + 1;
        clipBottom += m_targetOffset.y() + 1;
        if (m_targetOffset.y() > 0)
            setOuterPixels(paintingData, 0, 0, paintSize.width(), m_targetOffset.y());
        if (clipBottom < paintSize.height())
            setOuterPixels(paintingData, 0, clipBottom, paintSize.width(), paintSize.height());
        if (m_targetOffset.x() > 0)
            setOuterPixels(paintingData, 0, m_targetOffset.y(), m_targetOffset.x(), clipBottom);
        if (clipRight < paintSize.width())
            setOuterPixels(paintingData, clipRight, m_targetOffset.y(), paintSize.width(), clipBottom);
    } else {
        // Rare situation, not optimizied for speed
        setOuterPixels(paintingData, 0, 0, paintSize.width(), paintSize.height());
    }
}
コード例 #29
0
ファイル: main.cpp プロジェクト: KubaO/stackoverflown
 IfLookup(QObject * parent = 0) : QObject(parent) {
    connect(&m_timer, &QTimer::timeout, this, [this]{
       abort();
       emit hasResult(TimedOut);
    });
 }