void CanvasRenderingContext2D::putImageData(ImageData* data, float dx, float dy, float dirtyX, float dirtyY, 
                                            float dirtyWidth, float dirtyHeight, ExceptionCode& ec)
{
    if (!data) {
        ec = TYPE_MISMATCH_ERR;
        return;
    }
    if (!isfinite(dx) || !isfinite(dy) || !isfinite(dirtyX) || 
        !isfinite(dirtyY) || !isfinite(dirtyWidth) || !isfinite(dirtyHeight)) {
        ec = INDEX_SIZE_ERR;
        return;
    }

    ImageBuffer* buffer = m_canvas ? m_canvas->buffer() : 0;
    if (!buffer)
        return;

    if (dirtyWidth < 0) {
        dirtyX += dirtyWidth;
        dirtyWidth = -dirtyWidth;
    }

    if (dirtyHeight < 0) {
        dirtyY += dirtyHeight;
        dirtyHeight = -dirtyHeight;
    }

    FloatRect clipRect(dirtyX, dirtyY, dirtyWidth, dirtyHeight);
    clipRect.intersect(IntRect(0, 0, data->width(), data->height()));
    IntSize destOffset(static_cast<int>(dx), static_cast<int>(dy));
    IntRect sourceRect = enclosingIntRect(clipRect);
    sourceRect.move(destOffset);
    sourceRect.intersect(IntRect(IntPoint(), buffer->size()));
    if (sourceRect.isEmpty())
        return;
    willDraw(sourceRect);
    sourceRect.move(-destOffset);
    IntPoint destPoint(destOffset.width(), destOffset.height());
    
    buffer->putImageData(data, sourceRect, destPoint);
}
Esempio n. 2
0
void FETile::applySoftware()
{
    FilterEffect* in = inputEffect(0);

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    setIsAlphaImage(in->isAlphaImage());

    // Source input needs more attention. It has the size of the filterRegion but gives the
    // size of the cutted sourceImage back. This is part of the specification and optimization.
    FloatRect tileRect = in->maxEffectRect();
    FloatPoint inMaxEffectLocation = tileRect.location();
    FloatPoint maxEffectLocation = maxEffectRect().location();
    if (in->filterEffectType() == FilterEffectTypeSourceInput) {
        Filter* filter = this->filter();
        tileRect = filter->absoluteFilterRegion();
    }

    OwnPtr<ImageBufferSurface> surface;
    IntSize intTileSize = roundedIntSize(tileRect.size());
    surface = adoptPtr(new UnacceleratedImageBufferSurface(intTileSize));
    OwnPtr<ImageBuffer> tileImage = ImageBuffer::create(surface.release());
    if (!tileImage)
        return;

    GraphicsContext* tileImageContext = tileImage->context();
    tileImageContext->scale(FloatSize(intTileSize.width() / tileRect.width(), intTileSize.height() / tileRect.height()));
    tileImageContext->translate(-inMaxEffectLocation.x(), -inMaxEffectLocation.y());
    tileImageContext->drawImageBuffer(in->asImageBuffer(), in->absolutePaintRect().location());

    RefPtr<Pattern> pattern = Pattern::create(tileImage->copyImage(CopyBackingStore), true, true);

    AffineTransform patternTransform;
    patternTransform.translate(inMaxEffectLocation.x() - maxEffectLocation.x(), inMaxEffectLocation.y() - maxEffectLocation.y());
    pattern->setPatternSpaceTransform(patternTransform);
    GraphicsContext* filterContext = resultImage->context();
    filterContext->setFillPattern(pattern);
    filterContext->fillRect(FloatRect(FloatPoint(), absolutePaintRect().size()));
}
Esempio n. 3
0
Sprite* SpriteManager::getSprite(string path) {
    ImageBuffer* buf = nullptr;
    for(ImageBuffer* img_buffer : image_buffers_) {
        if(img_buffer->getPath() == path) {
            buf = img_buffer;
        }
    }
    if(buf == nullptr) {
        buf = new ImageBuffer();
        if(!(buf->loadFromPPM(path))) {
            cerr << "Error while loading file: " << path << endl;
            return nullptr;
        }
        image_buffers_.push_back(buf);
    }
    Sprite* sprite = new Sprite(buf);
    sprites_.push_back(sprite);

    return sprite;

}
Esempio n. 4
0
 ImageBuffer operator()(const ImageBuffer& i1,
                        const ImageBuffer& i2,
                        f32 degree,
                        bool adapt,
                        f32 scale,
                        ImageBuffer o) {
   i32 n = 2;
   cl::Kernel& k = caches_[n - kMinLLArgs].get();
   
   i32 i = 0;
   k.setArg(i++, i1.mem());
   k.setArg(i++, i2.mem());
   k.setArg(i++, degree);
   k.setArg(i++, adapt);
   k.setArg(i++, scale);
   k.setArg(i++, o.mem());
   
   Enqueue(k, o);
   
   return o;
 }
Esempio n. 5
0
// This function will be changed to abstract virtual when all filters are landed.
bool FilterEffect::platformApplyOpenCL()
{
    if (!FilterContextOpenCL::context())
        return false;

    unsigned size = m_inputEffects.size();
    for (unsigned i = 0; i < size; ++i) {
        FilterEffect* in = m_inputEffects.at(i).get();
        // Software code path expects that at least one of the following fileds is valid.
        if (!in->m_imageBufferResult && !in->m_unmultipliedImageResult && !in->m_premultipliedImageResult)
            in->asImageBuffer();
    }

    platformApplySoftware();
    ImageBuffer* sourceImage = asImageBuffer();
    if (sourceImage) {
        RefPtr<Uint8ClampedArray> sourceImageData = sourceImage->getUnmultipliedImageData(IntRect(IntPoint(), sourceImage->internalSize()));
        createOpenCLImageResult(sourceImageData->data());
    }
    return true;
}
Esempio n. 6
0
void AVSystem::copyFrameToImage(AVFrame* frameRGB, ImageBuffer<uint8_t>& image)
{
	if (image.width() != frameRGB->width or image.height() != frameRGB->height)
	{
		image.init(frameRGB->width, frameRGB->height);
	}

	parallel_for(0, frameRGB->height, [&](int y)
	{
		for (int x = 0; x < frameRGB->width; ++x)
		{
			int p = x * 3 + y * frameRGB->linesize[0];
			uint8_t r = frameRGB->data[0][p];
			uint8_t g = frameRGB->data[0][p+1];
			uint8_t b = frameRGB->data[0][p+2];
			image.setPixel(x, y, {r, g, b, 255});
		}
	});

	image.requestUpdate();
}
void HTMLCanvasElement::paint(GraphicsContext* context, const IntRect& r)
{
    // Clear the dirty rect
    m_dirtyRect = FloatRect();

    if (context->paintingDisabled())
        return;

    if (m_context) {
        if (!m_context->paintsIntoCanvasBuffer())
            return;
        m_context->paintRenderingResultsToCanvas();
    }

    if (hasCreatedImageBuffer()) {
        ImageBuffer* imageBuffer = buffer();
        if (imageBuffer) {

            if(imageBuffer->drawsUsingRecording())
                return;

            if (m_presentedImage)
                context->drawImage(m_presentedImage.get(), ColorSpaceDeviceRGB, r);
            else if (imageBuffer->drawsUsingCopy())
                context->drawImage(copiedImage(), ColorSpaceDeviceRGB, r);
            else
                context->drawImageBuffer(imageBuffer, ColorSpaceDeviceRGB, r);
        }
    }

#if ENABLE(WEBGL)
    if (is3D())
        static_cast<WebGLRenderingContext*>(m_context.get())->markLayerComposited();
#endif
#if ENABLE(DASHBOARD_SUPPORT)
    Settings* settings = document()->settings();
    if (settings && settings->usesDashboardBackwardCompatibilityMode())
        setIeForbidsInsertHTML();
#endif
}
//[-------------------------------------------------------]
//[ Private virtual TextureCreator functions              ]
//[-------------------------------------------------------]
Texture *TextureCreatorTurbulence3D::Create(TextureManager &cTextureManager, Texture *pTexture) const
{
	// Create the texture
	pTexture = CreateTexture(cTextureManager, pTexture);

	// Get scale
	const Vector3 &vScale = Scale.Get();

	// Create the image
	Image cImage = Image::CreateImage(DataByte, ColorGrayscale, Vector3i(XSize, YSize, ZSize));
	ImageBuffer *pImageBuffer = cImage.GetBuffer();

	// Create the buffer
	uint8 *pTurbBuffer = pImageBuffer->GetData();
	uint8 *pDest = pTurbBuffer;
	uint32 nMin = 255, nMax = 0;
	for (uint32 nZ=0; nZ<ZSize; nZ++) {
		for (uint32 nY=0; nY<YSize; nY++) {
			for (uint32 nX=0; nX<XSize; nX++) {
				const uint8 nT = static_cast<uint8>(127.5f*(1 + PerlinNoiseTurbulence::TileableTurbulence3(vScale.x*nX, vScale.y*nY, vScale.z*nZ, XSize*vScale.x, YSize*vScale.y, ZSize*vScale.z, 16)));
				if (nT > nMax)
					nMax = nT;
				if (nT < nMin)
					nMin = nT;
				*pDest++ = nT;
			}
		}
	}
	const uint32 nNumOfPixels = XSize*YSize*ZSize;
	pDest = pTurbBuffer;
	for (uint32 i=0; i<nNumOfPixels; i++, pDest++)
		*pDest = static_cast<uint8>((255*(*pDest - nMin))/(nMax - nMin));

	// Create the 3D texture buffer
	pTexture->SetTextureBuffer(reinterpret_cast<TextureBuffer*>(cTextureManager.GetRendererContext().GetRenderer().CreateTextureBuffer3D(cImage)));

	// Return the created texture
	return pTexture;
}
Esempio n. 9
0
void FEColorMatrix::apply()
{
    if (hasResult())
        return;
    FilterEffect* in = inputEffect(0);
    in->apply();
    if (!in->hasResult())
        return;

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    resultImage->context()->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in->absolutePaintRect()));

    IntRect imageRect(IntPoint(), absolutePaintRect().size());
    RefPtr<ImageData> imageData = resultImage->getUnmultipliedImageData(imageRect);
    ByteArray* pixelArray = imageData->data()->data();

    switch (m_type) {
        case FECOLORMATRIX_TYPE_UNKNOWN:
            break;
        case FECOLORMATRIX_TYPE_MATRIX:
            effectType<FECOLORMATRIX_TYPE_MATRIX>(pixelArray, m_values);
            break;
        case FECOLORMATRIX_TYPE_SATURATE: 
            effectType<FECOLORMATRIX_TYPE_SATURATE>(pixelArray, m_values);
            break;
        case FECOLORMATRIX_TYPE_HUEROTATE:
            effectType<FECOLORMATRIX_TYPE_HUEROTATE>(pixelArray, m_values);
            break;
        case FECOLORMATRIX_TYPE_LUMINANCETOALPHA:
            effectType<FECOLORMATRIX_TYPE_LUMINANCETOALPHA>(pixelArray, m_values);
            setIsAlphaImage(true);
            break;
    }

    resultImage->putUnmultipliedImageData(imageData.get(), imageRect, IntPoint());
}
Esempio n. 10
0
bool FEDisplacementMap::applySkia()
{
    // For now, only use the skia implementation for accelerated rendering.
    if (filter()->renderingMode() != Accelerated)
        return false;

    FilterEffect* in = inputEffect(0);
    FilterEffect* in2 = inputEffect(1);

    if (!in || !in2)
        return false;

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return false;

    RefPtr<Image> color = in->asImageBuffer()->copyImage(DontCopyBackingStore);
    RefPtr<Image> displ = in2->asImageBuffer()->copyImage(DontCopyBackingStore);

    RefPtr<NativeImageSkia> colorNativeImage = color->nativeImageForCurrentFrame();
    RefPtr<NativeImageSkia> displNativeImage = displ->nativeImageForCurrentFrame();

    if (!colorNativeImage || !displNativeImage)
        return false;

    SkBitmap colorBitmap = colorNativeImage->bitmap();
    SkBitmap displBitmap = displNativeImage->bitmap();

    SkAutoTUnref<SkImageFilter> colorSource(new SkBitmapSource(colorBitmap));
    SkAutoTUnref<SkImageFilter> displSource(new SkBitmapSource(displBitmap));
    SkDisplacementMapEffect::ChannelSelectorType typeX = toSkiaMode(m_xChannelSelector);
    SkDisplacementMapEffect::ChannelSelectorType typeY = toSkiaMode(m_yChannelSelector);
    SkAutoTUnref<SkImageFilter> displEffect(new SkDisplacementMapEffect(
        typeX, typeY, SkFloatToScalar(m_scale), displSource, colorSource));
    SkPaint paint;
    paint.setImageFilter(displEffect);
    resultImage->context()->drawBitmap(colorBitmap, 0, 0, &paint);
    return true;
}
Esempio n. 11
0
void ImageBuffer::warpPerlin(Perlin &p)
{
	// Make a copy of the current image buffer. This is used to preserve the original texture while the current texture is warped.
	ImageBuffer *newBuf = new ImageBuffer(_width, _height);
	memcpy(newBuf->getBuffer(), buffer, _width*_height*3*sizeof(float));

	for(int row=0; row<_height; row++) for(int col=0; col<_width; col++)
	{
		// Make some noise at current pixel
		float x = (float)col/_width;
		float y = (float)row/_height;

		//float noise = p.noise(x,y,0.0f)*0.13f;
		float noise = 0.0f;
		vector<float> *octavesNoise = p.noise(x,y,0.0f);
		vector<float>::iterator i = octavesNoise->begin();
		while(i != octavesNoise->end())
		{
			noise += *i;
			i++;
		}
		delete octavesNoise;

		noise *= 0.13f;

		// Find noisey pixel position. If noise pushes pixel position beyond image dimension, wrap around.
		int noiseCol = static_cast<int>(col + _width*noise);
		if(noiseCol > _width-1) noiseCol -= _width;
		else if(noiseCol < 0) noiseCol += _width;
		int noiseRow = static_cast<int>(row + _height*noise);
		if(noiseRow > _height-1) noiseRow -= _height;
		else if(noiseRow < 0) noiseRow += _height;

		// Set noisy pixel position from copied buffer to current buffer
		setPixel( row, col, newBuf->getPixel(_height-1-noiseRow, noiseCol) );
	}

	delete newBuf;
}
Esempio n. 12
0
bool FEComponentTransfer::platformApplySkia()
{
    FilterEffect* in = inputEffect(0);
    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return false;

    RefPtr<Image> image = in->asImageBuffer()->copyImage(DontCopyBackingStore);
    NativeImageSkia* nativeImage = image->nativeImageForCurrentFrame();
    if (!nativeImage)
        return false;

    unsigned char rValues[256], gValues[256], bValues[256], aValues[256];
    getValues(rValues, gValues, bValues, aValues);

    SkPaint paint;
    paint.setColorFilter(SkTableColorFilter::CreateARGB(aValues, rValues, gValues, bValues))->unref();
    paint.setXfermodeMode(SkXfermode::kSrc_Mode);
    resultImage->context()->platformContext()->drawBitmap(nativeImage->bitmap(), 0, 0, &paint);

    return true;
}
Esempio n. 13
0
void FEBlend::applySoftware()
{
#if HAVE(ARM_NEON_INTRINSICS)
    if (applySoftwareNEON())
        return;
#endif

    FilterEffect* in = inputEffect(0);
    FilterEffect* in2 = inputEffect(1);

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;
    GraphicsContext* filterContext = resultImage->context();

    ImageBuffer* imageBuffer = in->asImageBuffer();
    ImageBuffer* imageBuffer2 = in2->asImageBuffer();
    ASSERT(imageBuffer);
    ASSERT(imageBuffer2);

    filterContext->drawImageBuffer(imageBuffer2, drawingRegionOfInputImage(in2->absolutePaintRect()));
    filterContext->drawImageBuffer(imageBuffer, drawingRegionOfInputImage(in->absolutePaintRect()), 0, CompositeSourceOver, m_mode);
}
Esempio n. 14
0
bool FEColorMatrix::applySkia()
{
    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return false;

    FilterEffect* in = inputEffect(0);

    SkRect drawingRegion = drawingRegionOfInputImage(in->absolutePaintRect());

    SkAutoTUnref<SkColorFilter> filter(createColorFilter(m_type, m_values.data()));

    RefPtr<Image> image = in->asImageBuffer()->copyImage(DontCopyBackingStore);
    RefPtr<NativeImageSkia> nativeImage = image->nativeImageForCurrentFrame();
    if (!nativeImage)
        return false;

    SkPaint paint;
    paint.setColorFilter(filter);
    paint.setXfermodeMode(SkXfermode::kSrc_Mode);
    resultImage->context()->drawBitmap(nativeImage->bitmap(), drawingRegion.fLeft, drawingRegion.fTop, &paint);
    return true;
}
Esempio n. 15
0
void FEMerge::apply()
{
    if (hasResult())
        return;
    unsigned size = numberOfEffectInputs();
    ASSERT(size > 0);
    for (unsigned i = 0; i < size; ++i) {
        FilterEffect* in = inputEffect(i);
        in->apply();
        if (!in->hasResult())
            return;
    }

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    GraphicsContext* filterContext = resultImage->context();
    for (unsigned i = 0; i < size; ++i) {
        FilterEffect* in = inputEffect(i);
        filterContext->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, drawingRegionOfInputImage(in->absolutePaintRect()));
    }
}
void CanvasRenderingContext2D::drawImage(HTMLCanvasElement* sourceCanvas, const FloatRect& srcRect,
    const FloatRect& dstRect, ExceptionCode& ec)
{
    ASSERT(sourceCanvas);

    ec = 0;

    FloatRect srcCanvasRect = FloatRect(FloatPoint(), sourceCanvas->size());
    if (!srcCanvasRect.contains(normalizeRect(srcRect)) || srcRect.width() == 0 || srcRect.height() == 0) {
        ec = INDEX_SIZE_ERR;
        return;
    }

    if (!dstRect.width() || !dstRect.height())
        return;

    GraphicsContext* c = drawingContext();
    if (!c)
        return;
    if (!state().m_invertibleCTM)
        return;
        
    FloatRect sourceRect = c->roundToDevicePixels(srcRect);
    FloatRect destRect = c->roundToDevicePixels(dstRect);
        
    // FIXME: Do this through platform-independent GraphicsContext API.
    ImageBuffer* buffer = sourceCanvas->buffer();
    if (!buffer)
        return;

    if (!sourceCanvas->originClean())
        canvas()->setOriginTainted();

    c->drawImage(buffer->image(), DeviceColorSpace, destRect, sourceRect, state().m_globalComposite);
    willDraw(destRect); // This call comes after drawImage, since the buffer we draw into may be our own, and we need to make sure it is dirty.
                        // FIXME: Arguably willDraw should become didDraw and occur after drawing calls and not before them to avoid problems like this.
}
void CanvasRenderingContext2D::drawImage(HTMLCanvasElement* canvas, const FloatRect& srcRect,
    const FloatRect& dstRect, ExceptionCode& ec)
{
    ASSERT(canvas);

    ec = 0;

    FloatRect srcCanvasRect = FloatRect(FloatPoint(), canvas->size());
    if (!(srcCanvasRect.contains(srcRect) && srcRect.width() >= 0 && srcRect.height() >= 0 
            && dstRect.width() >= 0 && dstRect.height() >= 0)) {
        ec = INDEX_SIZE_ERR;
        return;
    }

    if (srcRect.isEmpty() || dstRect.isEmpty())
        return;

    GraphicsContext* c = drawingContext();
    if (!c)
        return;
        
    FloatRect sourceRect = c->roundToDevicePixels(srcRect);
    FloatRect destRect = c->roundToDevicePixels(dstRect);
        
    // FIXME: Do this through platform-independent GraphicsContext API.
    ImageBuffer* buffer = canvas->buffer();
    if (!buffer)
        return;

    if (!canvas->originClean())
        m_canvas->setOriginTainted();

    c->drawImage(buffer->image(), destRect, sourceRect);
    willDraw(destRect); // This call comes after drawImage, since the buffer we draw into may be our own, and we need to make sure it is dirty.
                        // FIXME: Arguably willDraw should become didDraw and occur after drawing calls and not before them to avoid problems like this.
}
Esempio n. 18
0
void FiltersVector::GaussianBlur(ImageBuffer& Source, ImageBuffer& Dest, float Sigma)
{
   CheckCompatibility(Source, Dest);

   // Prepare mask
   int MaskSize = int(ceil(3 * Sigma));

   if (Sigma <= 0 || MaskSize > 31)
      throw cl::Error(CL_INVALID_ARG_VALUE, "Invalid sigma used with GaussianBlur - allowed : 0.01-10");

   uint NbElements = (MaskSize * 2 + 1 ) * (MaskSize * 2 + 1 );

   std::vector<float> Mask(NbElements);

   GenerateBlurMask(Mask, Sigma, MaskSize);
   // NOTE : Maybe we should generate the mask in the device to prevent having to send that buffer


   // Send mask to device
   ReadBuffer MaskBuffer(*m_CL, Mask.data(), NbElements);

   // Execute kernel
   Kernel(gaussian_blur, In(Source), Out(Dest), Source.Step(), Dest.Step(), Source.Height(), MaskBuffer, MaskSize);
}
Esempio n. 19
0
	value lime_image_load (value data) {
		
		ImageBuffer buffer;
		Resource resource;
		Bytes bytes;
		
		if (val_is_string (data)) {
			
			resource = Resource (val_string (data));
			
		} else {
			
			bytes.Set (data);
			resource = Resource (&bytes);
			
		}
		
		#ifdef LIME_PNG
		if (PNG::Decode (&resource, &buffer)) {
			
			return buffer.Value ();
			
		}
		#endif
		
		#ifdef LIME_JPEG
		if (JPEG::Decode (&resource, &buffer)) {
			
			return buffer.Value ();
			
		}
		#endif
		
		return alloc_null ();
		
	}
void HTMLCanvasElement::paint(GraphicsContext* context, const IntRect& r)
{
    // Clear the dirty rect
    m_dirtyRect = FloatRect();

    if (context->paintingDisabled())
        return;
    
    if (m_context) {
        if (!m_context->paintsIntoCanvasBuffer())
            return;
        m_context->paintRenderingResultsToCanvas();
    }

    if (hasCreatedImageBuffer()) {
        ImageBuffer* imageBuffer = buffer();
        if (imageBuffer) {
            if (imageBuffer->drawsUsingCopy())
                context->drawImage(copiedImage(), ColorSpaceDeviceRGB, r);
            else
                context->drawImageBuffer(imageBuffer, ColorSpaceDeviceRGB, r);
        }
    }
}
Esempio n. 21
0
bool VolumeLoaderDAT::Save(const Volume &cVolume, File &cFile)
{
	// Get the image holding the volumetric data
	const Image &cImage = cVolume.GetVolumeImage();

	// Get image buffer, we only support the data type "byte" and "word"
	ImageBuffer *pImageBuffer = cImage.GetBuffer();
	if (pImageBuffer && (pImageBuffer->GetDataFormat() == DataByte || pImageBuffer->GetDataFormat() == DataWord)) {
		const Vector3i &vSize = pImageBuffer->GetSize();

		// Construct the object filename
		const String sObjectFilename = cFile.GetUrl().GetTitle() + ".raw";

		// A "dat"-file has simple ASCII content
		cFile.PutS("ObjectFileName: " + sObjectFilename + '\n');	// Example:	"ObjectFileName: Teddybear.raw"
		cFile.PutS("TaggedFileName: ---\n");						// Example:	"TaggedFileName: ---"
		cFile.PutS("Resolution:     " + vSize.ToString() + '\n');	// Example:	"Resolution:     128 128 62"
		cFile.PutS("SliceThickness: 1 1 1\n");						// Example:	"SliceThickness: 2.8 2.8 5"
		if (pImageBuffer->GetDataFormat() == DataByte)				// Example:	"Format:         UCHAR"
			cFile.PutS("Format:         UCHAR\n");
		else
			cFile.PutS("Format:         USHORT\n");
		cFile.PutS("NbrTags:        0\n");							// Example:	"NbrTags:        0"
		cFile.PutS("ObjectType:     TEXTURE_VOLUME_OBJECT\n");		// Example:	"ObjectType:     TEXTURE_VOLUME_OBJECT"
		cFile.PutS("ObjectModel:    RGBA\n");						// Example:	"ObjectModel:    RGBA"
		cFile.PutS("GridType:       EQUIDISTANT\n");				// Example:	"GridType:       EQUIDISTANT"

		{ // Raw data...
			// Get the absolute filename of the raw file
			const String sFilename = cFile.GetUrl().CutFilename() + sObjectFilename;

			// Open the raw file
			File cRawFile(sFilename);
			if (cRawFile.Open(File::FileCreate | File::FileWrite)) {
				// Save the data
				cRawFile.Write(pImageBuffer->GetData(), 1, pImageBuffer->GetDataSize());

				// Done
				return true;
			}
		}
	}

	// Error!
	return false;
}
Esempio n. 22
0
void FEImage::applySoftware()
{
    RenderObject* renderer = referencedRenderer();
    if (!m_image && !renderer)
        return;

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;
    IntPoint paintLocation = absolutePaintRect().location();
    resultImage->context()->translate(-paintLocation.x(), -paintLocation.y());

    // FEImage results are always in ColorSpaceDeviceRGB
    setResultColorSpace(ColorSpaceDeviceRGB);

    FloatRect destRect = filter()->mapLocalRectToAbsoluteRect(filterPrimitiveSubregion());
    FloatRect srcRect;

    if (!renderer) {
        srcRect = FloatRect(FloatPoint(), m_image->size());
        m_preserveAspectRatio->transformRect(destRect, srcRect);

        resultImage->context()->drawImage(m_image.get(), destRect, srcRect);
        return;
    }

    SVGElement* contextNode = toSVGElement(renderer->node());
    if (contextNode->hasRelativeLengths()) {
        // FIXME: This fixes relative lengths but breaks non-relative ones (see crbug/260709).
        SVGLengthContext lengthContext(contextNode);
        FloatSize viewportSize;

        // If we're referencing an element with percentage units, eg. <rect with="30%"> those values were resolved against the viewport.
        // Build up a transformation that maps from the viewport space to the filter primitive subregion.
        if (lengthContext.determineViewport(viewportSize))
            resultImage->context()->concatCTM(makeMapBetweenRects(FloatRect(FloatPoint(), viewportSize), destRect));
    } else {
        resultImage->context()->translate(destRect.x(), destRect.y());
        resultImage->context()->concatCTM(filter()->absoluteTransform());
    }

    AffineTransform contentTransformation;
    SVGRenderingContext::renderSubtree(resultImage->context(), renderer, contentTransformation);
}
Esempio n. 23
0
void FiltersVector::Gauss(ImageBuffer& Source, ImageBuffer& Dest, int Width)
{
   CheckCompatibility(Source, Dest);

   if (Width == 3)
   {
      Kernel(gaussian3, Source, Dest, Source.Step(), Dest.Step(), Source.Height());
      return;
   }

   if (Width == 5)
   {
      Kernel(gaussian5, Source, Dest, Source.Step(), Dest.Step(), Source.Height());
      return;
   }

   throw cl::Error(CL_INVALID_ARG_VALUE, "Invalid width used in Gauss - allowed : 3, 5");
}
Esempio n. 24
0
void FiltersVector::Median(ImageBuffer& Source, ImageBuffer& Dest, int Width)
{
   CheckCompatibility(Source, Dest);

   Source.SendIfNeeded();

   if (Width != 3 && Width != 5)
      throw cl::Error(CL_INVALID_ARG_VALUE, "Invalid width used in Median - allowed : 3 or 5");

   if (Width == 3)
   {
      /*if (RangeFit(Source, 16, 16))  // The cached version is slower on my GTX 680
      {
         Kernel_(*m_CL, SelectProgram(Source), median3_cached, cl::NDRange(16, 16, 1), Source, Dest, Source.Step(), Dest.Step(), Source.Height());
         return;
      }*/

      Kernel(median3, Source, Dest, Source.Step(), Dest.Step(), Source.Height());
      return;
   }

   Kernel(median5, Source, Dest, Source.Step(), Dest.Step(), Source.Height());
}
Esempio n. 25
0
void FEDropShadow::platformApplySoftware()
{
    FilterEffect* in = inputEffect(0);

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    Filter& filter = this->filter();
    FloatSize blurRadius(2 * filter.applyHorizontalScale(m_stdX), 2 * filter.applyVerticalScale(m_stdY));
    blurRadius.scale(filter.filterScale());
    FloatSize offset(filter.applyHorizontalScale(m_dx), filter.applyVerticalScale(m_dy));

    FloatRect drawingRegion = drawingRegionOfInputImage(in->absolutePaintRect());
    FloatRect drawingRegionWithOffset(drawingRegion);
    drawingRegionWithOffset.move(offset);

    ImageBuffer* sourceImage = in->asImageBuffer();
    if (!sourceImage)
        return;

    GraphicsContext& resultContext = resultImage->context();
    resultContext.setAlpha(m_shadowOpacity);
    resultContext.drawImageBuffer(*sourceImage, drawingRegionWithOffset);
    resultContext.setAlpha(1);

    ShadowBlur contextShadow(blurRadius, offset, m_shadowColor);

    // TODO: Direct pixel access to ImageBuffer would avoid copying the ImageData.
    IntRect shadowArea(IntPoint(), resultImage->internalSize());
    RefPtr<Uint8ClampedArray> srcPixelArray = resultImage->getPremultipliedImageData(shadowArea, ImageBuffer::BackingStoreCoordinateSystem);

    contextShadow.blurLayerImage(srcPixelArray->data(), shadowArea.size(), 4 * shadowArea.size().width());

    resultImage->putByteArray(Premultiplied, srcPixelArray.get(), shadowArea.size(), shadowArea, IntPoint(), ImageBuffer::BackingStoreCoordinateSystem);

    resultContext.setCompositeOperation(CompositeSourceIn);
    resultContext.fillRect(FloatRect(FloatPoint(), absolutePaintRect().size()), m_shadowColor);
    resultContext.setCompositeOperation(CompositeDestinationOver);

    resultImage->context().drawImageBuffer(*sourceImage, drawingRegion);
}
Esempio n. 26
0
void FEDropShadow::platformApplySoftware()
{
    FilterEffect* in = inputEffect(0);

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    Filter* filter = this->filter();
    FloatSize blurRadius(filter->applyHorizontalScale(m_stdX), filter->applyVerticalScale(m_stdY));
    FloatSize offset(filter->applyHorizontalScale(m_dx), filter->applyVerticalScale(m_dy));

    FloatRect drawingRegion = drawingRegionOfInputImage(in->absolutePaintRect());
    FloatRect drawingRegionWithOffset(drawingRegion);
    drawingRegionWithOffset.move(offset);

    ImageBuffer* sourceImage = in->asImageBuffer();
    ASSERT(sourceImage);
    GraphicsContext* resultContext = resultImage->context();
    ASSERT(resultContext);
    resultContext->setAlpha(m_shadowOpacity);
    resultContext->drawImageBuffer(sourceImage, ColorSpaceDeviceRGB, drawingRegionWithOffset);
    resultContext->setAlpha(1);

    ShadowBlur contextShadow(blurRadius, offset, m_shadowColor, ColorSpaceDeviceRGB);

    // TODO: Direct pixel access to ImageBuffer would avoid copying the ImageData.
    IntRect shadowArea(IntPoint(), resultImage->size());
    RefPtr<ByteArray> srcPixelArray = resultImage->getPremultipliedImageData(shadowArea);

    contextShadow.blurLayerImage(srcPixelArray->data(), shadowArea.size(), 4 * shadowArea.size().width());

    resultImage->putPremultipliedImageData(srcPixelArray.get(), shadowArea.size(), shadowArea, IntPoint());

    resultContext->setCompositeOperation(CompositeSourceIn);
    resultContext->fillRect(FloatRect(FloatPoint(), absolutePaintRect().size()), m_shadowColor, ColorSpaceDeviceRGB);
    resultContext->setCompositeOperation(CompositeDestinationOver);

    resultImage->context()->drawImageBuffer(sourceImage, ColorSpaceDeviceRGB, drawingRegion);
}
Esempio n. 27
0
bool GraphicsContext::isCompatibleWithBuffer(ImageBuffer& buffer) const
{
    GraphicsContext& bufferContext = buffer.context();

    return scalesMatch(getCTM(), bufferContext.getCTM()) && isAcceleratedContext() == bufferContext.isAcceleratedContext();
}
Esempio n. 28
0
void GraphicsContext::clipToImageBuffer(ImageBuffer& buffer, const FloatRect& rect)
{
    if (paintingDisabled())
        return;
    buffer.clip(*this, rect);
}
Esempio n. 29
0
void GraphicsContext::drawImageBuffer(ImageBuffer& image, const FloatRect& destination, const ImagePaintingOptions& imagePaintingOptions)
{
    drawImageBuffer(image, destination, FloatRect(FloatPoint(), FloatSize(image.logicalSize())), imagePaintingOptions);
}
Esempio n. 30
-1
void FETile::platformApplySoftware()
{
// FIXME: See bug 47315. This is a hack to work around a compile failure, but is incorrect behavior otherwise.
#if ENABLE(SVG)
    FilterEffect* in = inputEffect(0);

    ImageBuffer* resultImage = createImageBufferResult();
    if (!resultImage)
        return;

    setIsAlphaImage(in->isAlphaImage());

    // Source input needs more attention. It has the size of the filterRegion but gives the
    // size of the cutted sourceImage back. This is part of the specification and optimization.
    FloatRect tileRect = in->maxEffectRect();
    FloatPoint inMaxEffectLocation = tileRect.location();
    FloatPoint maxEffectLocation = maxEffectRect().location();
    if (in->filterEffectType() == FilterEffectTypeSourceInput) {
        Filter* filter = this->filter();
        tileRect = filter->filterRegion();
        tileRect.scale(filter->filterResolution().width(), filter->filterResolution().height());
    }

    OwnPtr<ImageBuffer> tileImage;
    if (!SVGImageBufferTools::createImageBufferForPattern(tileRect, tileRect, tileImage, ColorSpaceDeviceRGB, filter()->renderingMode()))
        return;

    GraphicsContext* tileImageContext = tileImage->context();
    tileImageContext->translate(-inMaxEffectLocation.x(), -inMaxEffectLocation.y());
    tileImageContext->drawImageBuffer(in->asImageBuffer(), ColorSpaceDeviceRGB, in->absolutePaintRect().location());

    RefPtr<Pattern> pattern = Pattern::create(tileImage->copyImage(CopyBackingStore), true, true);

    AffineTransform patternTransform;
    patternTransform.translate(inMaxEffectLocation.x() - maxEffectLocation.x(), inMaxEffectLocation.y() - maxEffectLocation.y());
    pattern->setPatternSpaceTransform(patternTransform);
    GraphicsContext* filterContext = resultImage->context();
    filterContext->setFillPattern(pattern);
    filterContext->fillRect(FloatRect(FloatPoint(), absolutePaintRect().size()));
#endif
}