void ImageFrame::zeroFillFrameRect(const IntRect& rect) { if (rect.isEmpty()) return; m_bitmap.eraseArea(rect, SkColorSetARGB(0, 0, 0, 0)); setHasAlpha(true); }
ImageFrame& ImageFrame::operator=(const ImageFrame& other) { if (this == &other) return *this; m_bitmap = other.m_bitmap->clone(); // Keep the pixels locked since we will be writing directly into the // bitmap throughout this object's lifetime. m_bitmap->bitmap().lockPixels(); setMemoryAllocator(other.allocator()); setOriginalFrameRect(other.originalFrameRect()); setStatus(other.status()); setDuration(other.duration()); setDisposalMethod(other.disposalMethod()); setPremultiplyAlpha(other.premultiplyAlpha()); // Be sure that this is called after we've called setStatus(), since we // look at our status to know what to do with the alpha value. setHasAlpha(other.hasAlpha()); // Copy raw fields to avoid ASSERT failure in requiredPreviousFrameIndex(). m_requiredPreviousFrameIndex = other.m_requiredPreviousFrameIndex; #if !ASSERT_DISABLED m_requiredPreviousFrameIndexValid = other.m_requiredPreviousFrameIndexValid; #endif return *this; }
bool ImageFrameGenerator::decode(size_t index, ImageDecoder** decoder, SkBitmap* bitmap) { TRACE_EVENT2("blink", "ImageFrameGenerator::decode", "width", m_fullSize.width(), "height", m_fullSize.height()); ASSERT(decoder); SharedBuffer* data = 0; bool allDataReceived = false; bool newDecoder = false; m_data.data(&data, &allDataReceived); // Try to create an ImageDecoder if we are not given one. if (!*decoder) { newDecoder = true; if (m_imageDecoderFactory) *decoder = m_imageDecoderFactory->create().leakPtr(); if (!*decoder) *decoder = ImageDecoder::create(*data, ImageSource::AlphaPremultiplied, ImageSource::GammaAndColorProfileApplied).leakPtr(); if (!*decoder) return false; } if (!m_isMultiFrame && newDecoder && allDataReceived) { // If we're using an external memory allocator that means we're decoding // directly into the output memory and we can save one memcpy. ASSERT(m_externalAllocator.get()); (*decoder)->setMemoryAllocator(m_externalAllocator.get()); } (*decoder)->setData(data, allDataReceived); ImageFrame* frame = (*decoder)->frameBufferAtIndex(index); // For multi-frame image decoders, we need to know how many frames are // in that image in order to release the decoder when all frames are // decoded. frameCount() is reliable only if all data is received and set in // decoder, particularly with GIF. if (allDataReceived) m_frameCount = (*decoder)->frameCount(); (*decoder)->setData(0, false); // Unref SharedBuffer from ImageDecoder. (*decoder)->clearCacheExceptFrame(index); (*decoder)->setMemoryAllocator(0); if (!frame || frame->status() == ImageFrame::FrameEmpty) return false; // A cache object is considered complete if we can decode a complete frame. // Or we have received all data. The image might not be fully decoded in // the latter case. const bool isDecodeComplete = frame->status() == ImageFrame::FrameComplete || allDataReceived; SkBitmap fullSizeBitmap = frame->getSkBitmap(); if (!fullSizeBitmap.isNull()) { ASSERT(fullSizeBitmap.width() == m_fullSize.width() && fullSizeBitmap.height() == m_fullSize.height()); setHasAlpha(index, !fullSizeBitmap.isOpaque()); } *bitmap = fullSizeBitmap; return isDecodeComplete; }
void RGBA32Buffer::copyBitmapData(const RGBA32Buffer& other) { if (this == &other) return; m_bytes = other.m_bytes; m_size = other.m_size; setHasAlpha(other.m_hasAlpha); }
bool ImageFrame::copyBitmapData(const ImageFrame& other) { if (this == &other) return true; m_backingStore = other.m_backingStore; m_bytes = m_backingStore.data(); m_size = other.m_size; setHasAlpha(other.m_hasAlpha); return true; }
void ImageFrame::zeroFillFrameRect(const IntRect& rect) { ASSERT(IntRect(IntPoint(), m_size).contains(rect)); if (rect.isEmpty()) return; size_t rectWidthInBytes = rect.width() * sizeof(PixelData); PixelData* start = m_bytes + (rect.y() * width()) + rect.x(); for (int i = 0; i < rect.height(); ++i) { memset(start, 0, rectWidthInBytes); start += width(); } setHasAlpha(true); }
ImageFrame& ImageFrame::operator=(const ImageFrame& other) { if (this == &other) return *this; m_bitmap = other.m_bitmap; // Keep the pixels locked since we will be writing directly into the // bitmap throughout this object's lifetime. m_bitmap.bitmap().lockPixels(); setOriginalFrameRect(other.originalFrameRect()); setStatus(other.status()); setDuration(other.duration()); setDisposalMethod(other.disposalMethod()); setPremultiplyAlpha(other.premultiplyAlpha()); // Be sure that this is called after we've called setStatus(), since we // look at our status to know what to do with the alpha value. setHasAlpha(other.hasAlpha()); return *this; }
bool SharedBitmap::to16bit() { if (m_locked) return false; if (is16bit()) return true; BitmapInfo newBmpInfo = BitmapInfo::create(m_bmpInfo.size(), BitmapInfo::BitCount16); int width = newBmpInfo.width(); int paddedWidth = newBmpInfo.paddedWidth(); int bufferSize = paddedWidth * newBmpInfo.height(); OwnArrayPtr<unsigned> newPixelData = adoptArrayPtr(new unsigned[bufferSize / 2]); void* newPixels = newPixelData.get(); if (!newPixels) return false; unsigned short* p16 = static_cast<unsigned short*>(newPixels); const unsigned* p32 = static_cast<const unsigned*>(m_pixels); bool skips = paddedWidth != width; const unsigned short* p16end = p16 + bufferSize; while (p16 < p16end) { for (unsigned short* p16lineEnd = p16 + width; p16 < p16lineEnd; ) *p16++ = convert32To16(*p32++); if (skips) *p16++ = 0; } if (m_hbitmap) m_hbitmap = nullptr; else m_pixelData = newPixelData.release(); m_pixels = newPixels; m_bmpInfo = newBmpInfo; setHasAlpha(false); return true; }
bool ImageFrameGenerator::decodeToYUV(size_t index, SkISize componentSizes[3], void* planes[3], size_t rowBytes[3]) { // Prevent concurrent decode or scale operations on the same image data. MutexLocker lock(m_decodeMutex); if (m_decodeFailed) return false; TRACE_EVENT1("blink", "ImageFrameGenerator::decodeToYUV", "frame index", static_cast<int>(index)); if (!planes || !planes[0] || !planes[1] || !planes[2] || !rowBytes || !rowBytes[0] || !rowBytes[1] || !rowBytes[2]) { return false; } SharedBuffer* data = 0; bool allDataReceived = false; m_data->data(&data, &allDataReceived); // FIXME: YUV decoding does not currently support progressive decoding. ASSERT(allDataReceived); OwnPtr<ImageDecoder> decoder = ImageDecoder::create(*data, ImageDecoder::AlphaPremultiplied, ImageDecoder::GammaAndColorProfileApplied); if (!decoder) return false; decoder->setData(data, allDataReceived); OwnPtr<ImagePlanes> imagePlanes = adoptPtr(new ImagePlanes(planes, rowBytes)); decoder->setImagePlanes(imagePlanes.release()); bool sizeUpdated = updateYUVComponentSizes(decoder.get(), componentSizes, ImageDecoder::ActualSize); RELEASE_ASSERT(sizeUpdated); if (decoder->decodeToYUV()) { setHasAlpha(0, false); // YUV is always opaque return true; } ASSERT(decoder->failed()); m_yuvDecodingFailed = true; return false; }
bool RGBA32Buffer::copyBitmapData(const RGBA32Buffer& other) { if (this == &other) return true; m_size = other.m_size; m_image.clear(); // This will usually be called from a completed frame. Depending on whether // or not we have already replaced the original buffer (m_bytes) with the // native image pointer (m_image), we either copy m_bytes directly or // need to extract the pixel data from the image tiles. if (TiledImageOpenVG* image = other.m_image.get()) { m_bytes.resize(m_size.width() * m_size.height()); static const VGImageFormat bufferFormat = VG_sARGB_8888_PRE; const int numColumns = image->numColumns(); const int numRows = image->numRows(); image->makeCompatibleContextCurrent(); for (int yIndex = 0; yIndex < numRows; ++yIndex) { for (int xIndex = 0; xIndex < numColumns; ++xIndex) { IntRect tileRect = image->tileRect(xIndex, yIndex); VGImage tile = image->tile(xIndex, yIndex); PixelData* pixelData = m_bytes.data(); pixelData += (tileRect.y() * width()) + tileRect.x(); vgGetImageSubData(tile, reinterpret_cast<unsigned char*>(pixelData), tileRect.width() * sizeof(PixelData), bufferFormat, 0, 0, tileRect.width(), tileRect.height()); ASSERT_VG_NO_ERROR(); } } } else m_bytes = other.m_bytes; setHasAlpha(other.m_hasAlpha); return true; }
bool ImageFrameGenerator::decodeToYUV(SkISize componentSizes[3], void* planes[3], size_t rowBytes[3]) { // This method is called to populate a discardable memory owned by Skia. // Prevents concurrent decode or scale operations on the same image data. MutexLocker lock(m_decodeMutex); if (m_decodeFailedAndEmpty) return false; TRACE_EVENT2("blink", "ImageFrameGenerator::decodeToYUV", "generator", this, "decodeCount", static_cast<int>(m_decodeCount)); if (!planes || !planes[0] || !planes[1] || !planes[2] || !rowBytes || !rowBytes[0] || !rowBytes[1] || !rowBytes[2]) { return false; } SharedBuffer* data = 0; bool allDataReceived = false; m_data.data(&data, &allDataReceived); // FIXME: YUV decoding does not currently support progressive decoding. ASSERT(allDataReceived); OwnPtr<ImageDecoder> decoder = ImageDecoder::create(*data, ImageSource::AlphaPremultiplied, ImageSource::GammaAndColorProfileApplied); if (!decoder) return false; decoder->setData(data, allDataReceived); OwnPtr<ImagePlanes> imagePlanes = adoptPtr(new ImagePlanes(planes, rowBytes)); decoder->setImagePlanes(imagePlanes.release()); bool sizeUpdated = updateYUVComponentSizes(decoder.get(), componentSizes, ImageDecoder::ActualSize); RELEASE_ASSERT(sizeUpdated); bool yuvDecoded = decoder->decodeToYUV(); if (yuvDecoded) setHasAlpha(0, false); // YUV is always opaque return yuvDecoded; }
ImageFrame& ImageFrame::operator=(const ImageFrame& other) { if (this == &other) return *this; m_bitmap = other.m_bitmap; // Keep the pixels locked since we will be writing directly into the // bitmap throughout this object's lifetime. m_bitmap.lockPixels(); // Be sure to assign this before calling setStatus(), since setStatus() may // call notifyBitmapIfPixelsChanged(). m_pixelsChanged = other.m_pixelsChanged; setMemoryAllocator(other.allocator()); setOriginalFrameRect(other.originalFrameRect()); setStatus(other.getStatus()); setDuration(other.duration()); setDisposalMethod(other.getDisposalMethod()); setAlphaBlendSource(other.getAlphaBlendSource()); setPremultiplyAlpha(other.premultiplyAlpha()); // Be sure that this is called after we've called setStatus(), since we // look at our status to know what to do with the alpha value. setHasAlpha(other.hasAlpha()); setRequiredPreviousFrameIndex(other.requiredPreviousFrameIndex()); return *this; }