void ImageBufferDataPrivateAccelerated::platformTransformColorSpace(const Vector<int>& lookUpTable) { QPainter* painter = paintDevice()->paintEngine()->painter(); QImage image = toQImage().convertToFormat(QImage::Format_ARGB32); ASSERT(!image.isNull()); uchar* bits = image.bits(); const int bytesPerLine = image.bytesPerLine(); for (int y = 0; y < image.height(); ++y) { quint32* scanLine = reinterpret_cast_ptr<quint32*>(bits + y * bytesPerLine); for (int x = 0; x < image.width(); ++x) { QRgb& pixel = scanLine[x]; pixel = qRgba(lookUpTable[qRed(pixel)], lookUpTable[qGreen(pixel)], lookUpTable[qBlue(pixel)], qAlpha(pixel)); } } painter->save(); painter->resetTransform(); painter->setOpacity(1.0); painter->setClipping(false); painter->setCompositionMode(QPainter::CompositionMode_Source); // Should coordinates be flipped? painter->drawImage(QPoint(0,0), image); painter->restore(); }
void ImageBufferDataPrivateUnaccelerated::platformTransformColorSpace(const Vector<int>& lookUpTable) { QPainter* painter = paintDevice()->paintEngine()->painter(); bool isPainting = painter->isActive(); if (isPainting) painter->end(); QImage image = toQImage().convertToFormat(QImage::Format_ARGB32); ASSERT(!image.isNull()); uchar* bits = image.bits(); const int bytesPerLine = image.bytesPerLine(); for (int y = 0; y < m_pixmap.height(); ++y) { quint32* scanLine = reinterpret_cast_ptr<quint32*>(bits + y * bytesPerLine); for (int x = 0; x < m_pixmap.width(); ++x) { QRgb& pixel = scanLine[x]; pixel = qRgba(lookUpTable[qRed(pixel)], lookUpTable[qGreen(pixel)], lookUpTable[qBlue(pixel)], qAlpha(pixel)); } } m_pixmap = QPixmap::fromImage(image); if (isPainting) painter->begin(&m_pixmap); }
void ImageBufferDataPrivateAccelerated::drawPattern(GraphicsContext* destContext, const FloatRect& srcRect, const AffineTransform& patternTransform, const FloatPoint& phase, ColorSpace styleColorSpace, CompositeOperator op, const FloatRect& destRect, bool /*ownContext*/) { RefPtr<Image> image = StillImage::create(QPixmap::fromImage(toQImage())); image->drawPattern(destContext, srcRect, patternTransform, phase, styleColorSpace, op, destRect); }
void ImageBufferDataPrivateAccelerated::paintToTextureMapper(TextureMapper* textureMapper, const FloatRect& targetRect, const TransformationMatrix& matrix, float opacity) { bool canRenderDirectly = false; if (textureMapper->accelerationMode() == TextureMapper::OpenGLMode) { if (QOpenGLContext::areSharing(m_context->context(), static_cast<TextureMapperGL*>(textureMapper)->graphicsContext3D()->platformGraphicsContext3D())) canRenderDirectly = true; } if (!canRenderDirectly) { QImage image = toQImage(); TransformationMatrix oldTransform = textureMapper->graphicsContext()->get3DTransform(); textureMapper->graphicsContext()->concat3DTransform(matrix); textureMapper->graphicsContext()->platformContext()->drawImage(targetRect, image); textureMapper->graphicsContext()->set3DTransform(oldTransform); return; } invalidateState(); #if QT_VERSION >= QT_VERSION_CHECK(5, 4, 0) static_cast<TextureMapperGL*>(textureMapper)->drawTexture(m_paintDevice->texture(), TextureMapperGL::ShouldBlend, m_paintDevice->size(), targetRect, matrix, opacity); #else static_cast<TextureMapperGL*>(textureMapper)->drawTexture(m_paintDevice->texture(), TextureMapperGL::ShouldBlend | TextureMapperGL::ShouldFlipTexture, m_paintDevice->size(), targetRect, matrix, opacity); #endif }
void ImageBufferDataPrivateAccelerated::drawPattern(GraphicsContext* destContext, const FloatRect& srcRect, const AffineTransform& patternTransform, const FloatPoint& phase, ColorSpace styleColorSpace, CompositeOperator op, const FloatRect& destRect, bool ownContext) { RefPtr<Image> image = StillImage::create(QPixmap::fromImage(toQImage())); if (destContext->isAcceleratedContext()) { // this causes the QOpenGLPaintDevice of the destContext to be bound so we can draw destContext->platformContext()->beginNativePainting(); destContext->platformContext()->endNativePainting(); } image->drawPattern(destContext, srcRect, patternTransform, phase, styleColorSpace, op, destRect); }
QImage Image::toQImageWithPoints(const std::vector<ImagePoint>& points) const { auto image = toQImage(); QPainter painter(&image); auto pen = QPen(Qt::red); pen.setWidth(3); painter.setPen(pen); for (auto i = 0; i < points.size(); ++i) { painter.drawPoint(points[i].getY(), points[i].getX()); } return image; }
void br::GalleryToolBar::_checkWebcam() { static QSharedPointer<cv::VideoCapture> videoCapture; if (videoCapture.isNull()) { videoCapture = QSharedPointer<cv::VideoCapture>(new cv::VideoCapture(0)); cv::Mat m; while (!m.data) videoCapture->read(m); // First frames can be empty } if (galleryLock.tryLock()) { cv::Mat m; videoCapture->read(m); galleryLock.unlock(); enroll(toQImage(m)); } }
void ImageBufferDataPrivateAccelerated::draw(GraphicsContext* destContext, ColorSpace styleColorSpace, const FloatRect& destRect, const FloatRect& srcRect, CompositeOperator op, BlendMode blendMode, bool useLowQualityScale, bool /*ownContext*/) { if (destContext->isAcceleratedContext()) { invalidateState(); // If accelerated compositing is disabled, this may be the painter of the QGLWidget, which is a QGL2PaintEngineEx. QOpenGL2PaintEngineEx* acceleratedPaintEngine = dynamic_cast<QOpenGL2PaintEngineEx*>(destContext->platformContext()->paintEngine()); if (acceleratedPaintEngine) { QPaintDevice* targetPaintDevice = acceleratedPaintEngine->paintDevice(); QRect rect(QPoint(), m_paintDevice->size()); // drawTexture's rendering is flipped relative to QtWebKit's convention, so we need to compensate FloatRect srcRectFlipped = m_paintDevice->paintFlipped() ? FloatRect(srcRect.x(), srcRect.maxY(), srcRect.width(), -srcRect.height()) : FloatRect(srcRect.x(), rect.height() - srcRect.maxY(), srcRect.width(), srcRect.height()); // Using the same texture as source and target of a rendering operation is undefined in OpenGL, // so if that's the case we need to use a temporary intermediate buffer. if (m_paintDevice == targetPaintDevice) { m_context->makeCurrentIfNeeded(); QFramebufferPaintDevice device(rect.size(), QOpenGLFramebufferObject::NoAttachment, false); // We disable flipping in order to do a pure blit into the intermediate buffer device.setPaintFlipped(false); QPainter painter(&device); QOpenGL2PaintEngineEx* pe = static_cast<QOpenGL2PaintEngineEx*>(painter.paintEngine()); pe->drawTexture(rect, m_paintDevice->texture(), rect.size(), rect); painter.end(); acceleratedPaintEngine->drawTexture(destRect, device.texture(), rect.size(), srcRectFlipped); } else { acceleratedPaintEngine->drawTexture(destRect, m_paintDevice->texture(), rect.size(), srcRectFlipped); } return; } } RefPtr<Image> image = StillImage::create(QPixmap::fromImage(toQImage())); destContext->drawImage(image.get(), styleColorSpace, destRect, srcRect, op, blendMode, DoNotRespectImageOrientation, useLowQualityScale); }
QImage ImgProc::compare_test(std::vector<std::vector<cv::Point> > &contours, const QString &path, const float& shape_thress) { cv::Mat img = cv::imread(path.toStdString(), cv::IMREAD_UNCHANGED); int max = contours.size(); cv::Scalar default_color = cv::Scalar(255,255,255, 255); std::vector<cv::Scalar> colors; colors.resize(max, default_color); cv::RNG rng(12345); for(int i=0; i<max; i++) { cv::Scalar col = cv::Scalar(rng.uniform(0,255), rng.uniform(0, 255), rng.uniform(0, 255), 255); for(int j=i+1; j<max; j++) { double match = cv::matchShapes(contours[i], contours[j], CV_CONTOURS_MATCH_I1, 0); if(match < shape_thress) { if(colors[j] == default_color) { colors[j] = col; colors[i] = col; } } else continue; } } for(unsigned i = 0; i< contours.size(); i++ ) { cv::drawContours( img, contours,i,colors[i],5); } return toQImage(img); }
void ImageBufferDataPrivateAccelerated::draw(GraphicsContext* destContext, ColorSpace styleColorSpace, const FloatRect& destRect, const FloatRect& srcRect, CompositeOperator op, BlendMode blendMode, bool useLowQualityScale, bool ownContext) { if (destContext->isAcceleratedContext()) { QOpenGLContext *previousContext = QOpenGLContext::currentContext(); GLSharedContext::makeCurrent(); commitChanges(); previousContext->makeCurrent(previousContext->surface()); QOpenGL2PaintEngineEx* acceleratedPaintEngine = static_cast<QOpenGL2PaintEngineEx*>(destContext->platformContext()->paintEngine()); FloatRect flippedSrc = srcRect; flippedSrc.setY(m_fbo->size().height() - flippedSrc.height() - flippedSrc.y()); acceleratedPaintEngine->drawTexture(destRect, m_fbo->texture(), m_fbo->size(), flippedSrc); } else { RefPtr<Image> image = StillImage::create(QPixmap::fromImage(toQImage())); destContext->drawImage(image.get(), styleColorSpace, destRect, srcRect, op, blendMode, DoNotRespectImageOrientation, useLowQualityScale); } }
QImage toQImage(const SkBitmap &bitmap) { QImage image; switch (bitmap.colorType()) { case kUnknown_SkColorType: break; case kAlpha_8_SkColorType: image = toQImage(bitmap, QImage::Format_Alpha8); break; case kRGB_565_SkColorType: image = toQImage(bitmap, QImage::Format_RGB16); break; case kARGB_4444_SkColorType: switch (bitmap.alphaType()) { case kUnknown_SkAlphaType: break; case kUnpremul_SkAlphaType: // not supported - treat as opaque case kOpaque_SkAlphaType: image = toQImage(bitmap, QImage::Format_RGB444); break; case kPremul_SkAlphaType: image = toQImage(bitmap, QImage::Format_ARGB4444_Premultiplied); break; } break; case kRGBA_8888_SkColorType: switch (bitmap.alphaType()) { case kUnknown_SkAlphaType: break; case kOpaque_SkAlphaType: image = toQImage(bitmap, QImage::Format_RGBX8888); break; case kPremul_SkAlphaType: image = toQImage(bitmap, QImage::Format_RGBA8888_Premultiplied); break; case kUnpremul_SkAlphaType: image = toQImage(bitmap, QImage::Format_RGBA8888); break; } break; case kBGRA_8888_SkColorType: // we are assuming little-endian arch here. switch (bitmap.alphaType()) { case kUnknown_SkAlphaType: break; case kOpaque_SkAlphaType: image = toQImage(bitmap, QImage::Format_RGB32); break; case kPremul_SkAlphaType: image = toQImage(bitmap, QImage::Format_ARGB32_Premultiplied); break; case kUnpremul_SkAlphaType: image = toQImage(bitmap, QImage::Format_ARGB32); break; } break; case kIndex_8_SkColorType: { image = toQImage(bitmap, QImage::Format_Indexed8); SkColorTable *skTable = bitmap.getColorTable(); if (skTable) { QVector<QRgb> qTable(skTable->count()); for (int i = 0; i < skTable->count(); ++i) qTable[i] = (*skTable)[i]; image.setColorTable(qTable); } break; } case kGray_8_SkColorType: image = toQImage(bitmap, QImage::Format_Grayscale8); break; } return image; }
void ImageBufferDataPrivateAccelerated::clip(GraphicsContext* context, const FloatRect& floatRect) const { QPixmap alphaMask = QPixmap::fromImage(toQImage()); IntRect rect = enclosingIntRect(floatRect); context->pushTransparencyLayerInternal(rect, 1.0, alphaMask); }
PassRefPtr<Image> ImageBufferDataPrivateAccelerated::copyImage(BackingStoreCopy copyBehavior) const { return StillImage::create(QPixmap::fromImage(toQImage())); }
PassRefPtr<Image> ImageBufferDataPrivateAccelerated::copyImage() const { return StillImage::create(QPixmap::fromImage(toQImage())); }
void Image::saveAsImage(QString filename) const { toQImage().save(filename); }
void CVImage::save(const QString fileName) { toQImage().save(fileName); }