Пример #1
0
    void writePNG(const QImage& image)
    {
#ifndef QT_LSB
        // LSB disallows accessing the info_ptr directly. LSB's png_set_IHDR sets
        // the channels anyways, so just comment it out for LSB usage
        info_ptr->channels = 4;
#endif
        png_set_sig_bytes(png_ptr, 8); // Pretend we already wrote the sig
        png_set_IHDR(png_ptr, info_ptr, image.width(), image.height(),
                     8, image.hasAlphaChannel()
                     ? PNG_COLOR_TYPE_RGB_ALPHA : PNG_COLOR_TYPE_RGB,
                     0, 0, 0);
        png_write_info(png_ptr, info_ptr);
        if (!image.hasAlphaChannel())
            png_set_filler(png_ptr, 0,
                           QSysInfo::ByteOrder == QSysInfo::BigEndian ?
                           PNG_FILLER_BEFORE : PNG_FILLER_AFTER);
        //if ( QImage::systemByteOrder() == QImage::BigEndian ) {
        //png_set_swap_alpha(png_ptr);
        //}
        if (QSysInfo::ByteOrder == QSysInfo::LittleEndian) {
            png_set_bgr(png_ptr);
        }

        int height = image.height();
        png_bytep *row_pointers = new png_bytep[height];
        for (int i = 0; i < height; ++i)
            row_pointers[i] = (png_bytep)image.scanLine(i);
        png_write_image(png_ptr, row_pointers);
        delete [] row_pointers;
        png_write_end(png_ptr, info_ptr);
        end_png();
        begin_png();
    }
Пример #2
0
gpu::Texture* TextureUsage::createNormalTextureFromNormalImage(const QImage& srcImage, const std::string& srcImageName) {
    QImage image = srcImage;

    if (!image.hasAlphaChannel()) {
        if (image.format() != QImage::Format_RGB888) {
            image = image.convertToFormat(QImage::Format_RGB888);
        }
    } else {
        if (image.format() != QImage::Format_ARGB32) {
            image = image.convertToFormat(QImage::Format_ARGB32);
        }
    }

    gpu::Texture* theTexture = nullptr;
    if ((image.width() > 0) && (image.height() > 0)) {

        bool isLinearRGB = true;

        gpu::Element formatGPU = gpu::Element(gpu::VEC3, gpu::UINT8, (isLinearRGB ? gpu::RGB : gpu::SRGB));
        gpu::Element formatMip = gpu::Element(gpu::VEC3, gpu::UINT8, (isLinearRGB ? gpu::RGB : gpu::SRGB));
        if (image.hasAlphaChannel()) {
            formatGPU = gpu::Element(gpu::VEC4, gpu::UINT8, (isLinearRGB ? gpu::RGBA : gpu::SRGBA));
            formatMip = gpu::Element(gpu::VEC4, gpu::UINT8, (isLinearRGB ? gpu::BGRA : gpu::SBGRA));
        }

        theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
        theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
        theTexture->autoGenerateMips(-1);
    }

    return theTexture;
}
Пример #3
0
bool QDirectFBPixmapData::hasAlphaChannel(const QImage &img, Qt::ImageConversionFlags flags)
{
    if (img.depth() == 1)
        return true;
#ifdef QT_DIRECTFB_OPAQUE_DETECTION
    return ((flags & Qt::NoOpaqueDetection) ? img.hasAlphaChannel() : checkForAlphaPixels(img));
#else
    Q_UNUSED(flags);
    return img.hasAlphaChannel();
#endif
}
shared_ptr<Image> CameraImageInput::createImageFromImageFormat(Img::ImageData& srcImage)
{
    auto image = std::make_shared<Image>();
    auto& data = srcImage.raw_data;
    QImage qImage = QImage::fromData(data.get_buffer(), data.length());
    const int w = qImage.width();
    const int h = qImage.height();
    const bool hasAlpha = qImage.hasAlphaChannel();
    if(qImage.isGrayscale()){
        image->setSize(w, h, 1);
        unsigned char* pixels = image->pixels();
        for(int y=0; y < h; ++y){
            for(int x=0; x < w; ++x){
                *pixels++ = qGray(qImage.pixel(x, y));
            }
        }
    } else {
        image->setSize(w, h, hasAlpha ? 4 : 3);
        unsigned char* pixels = image->pixels();
        for(int y=0; y < h; ++y){
            for(int x=0; x < w; ++x){
                QRgb rgb = qImage.pixel(x, y);
                *pixels++ = qRed(rgb);
                *pixels++ = qGreen(rgb);
                *pixels++ = qBlue(rgb);
                if(hasAlpha){
                    *pixels++ = qAlpha(rgb);
                }
            }
        }
    }
    return image;
}
Пример #5
0
CursorWindow::CursorWindow(const QImage &img, QPoint hot, QWidget* sk)
	:QWidget(0),
	m_view(0), skin(sk),
	hotspot(hot)
{
    setWindowFlags( Qt::FramelessWindowHint );
    mouseRecipient = 0;
    setMouseTracking(true);
#ifndef QT_NO_CURSOR
    setCursor(Qt::BlankCursor);
#endif
    QPixmap p;
    p = QPixmap::fromImage(img);
    if (!p.mask()) {
	if ( img.hasAlphaChannel() ) {
	    QBitmap bm;
	    bm = QPixmap::fromImage(img.createAlphaMask());
	    p.setMask( bm );
	} else {
	    QBitmap bm;
	    bm = QPixmap::fromImage(img.createHeuristicMask());
	    p.setMask( bm );
	}
    }
    QPalette palette;
    palette.setBrush(backgroundRole(), QBrush(p));
    setPalette(palette);
    setFixedSize( p.size() );
    if ( !p.mask().isNull() )
	setMask( p.mask() );
}
Пример #6
0
void QEglGLPixmapData::fromImage(const QImage &image, Qt::ImageConversionFlags flags)
{
    TRACE();
    resize(image.width(), image.height());

    if (pixelType() == BitmapType) {
        m_source = image.convertToFormat(QImage::Format_MonoLSB);

    } else {
        QImage::Format format = QImage::Format_RGB32;
        if (qApp->desktop()->depth() == 16)
            format = QImage::Format_RGB16;

        if (image.hasAlphaChannel() && const_cast<QImage &>(image).data_ptr()->checkForAlphaPixels())
            format = QImage::Format_ARGB32_Premultiplied;;

        m_source = image.convertToFormat(format);
    }

    m_dirty = true;
    m_hasFillColor = false;

    m_hasAlpha = m_source.hasAlphaChannel();
    w = image.width();
    h = image.height();
    is_null = (w <= 0 || h <= 0);
    d = m_source.depth();

    if (m_texture.id) {
        QGLShareContextScope ctx(qt_gl_share_widget()->context());
        glDeleteTextures(1, &m_texture.id);
        m_texture.id = 0;
    }    
}
Пример #7
0
void QDirectFBPixmapData::fromImage(const QImage &i,
                                    Qt::ImageConversionFlags flags)
{
#ifdef QT_NO_DIRECTFB_OPAQUE_DETECTION
    Q_UNUSED(flags);
#endif
    const QImage img = (i.depth() == 1 ? i.convertToFormat(screen->alphaPixmapFormat()) : i);
    if (img.hasAlphaChannel()
#ifndef QT_NO_DIRECTFB_OPAQUE_DETECTION
        && (flags & Qt::NoOpaqueDetection || QDirectFBPixmapData::hasAlphaChannel(img))
#endif
        ) {
        alpha = true;
        format = screen->alphaPixmapFormat();
    } else {
        alpha = false;
        format = screen->pixelFormat();
    }
    dfbSurface = screen->copyToDFBSurface(img, format,
                                          QDirectFBScreen::TrackSurface);
    if (!dfbSurface) {
        qWarning("QDirectFBPixmapData::fromImage()");
        invalidate();
        return;
    }
    setSerialNumber(++global_ser_no);
}
Пример #8
0
void LayerTreeHostProxy::createImage(int64_t imageID, const QImage& image)
{
    TiledImage tiledImage;
    static const int TileDimension = 1024;
    bool imageHasAlpha = image.hasAlphaChannel();
    IntRect imageRect(0, 0, image.width(), image.height());
    for (int y = 0; y < image.height(); y += TileDimension) {
        for (int x = 0; x < image.width(); x += TileDimension) {
            QImage subImage;
            IntRect rect(x, y, TileDimension, TileDimension);
            rect.intersect(imageRect);
            if (QSize(rect.size()) == image.size())
                subImage = image;
            else
                subImage = image.copy(rect);
            RefPtr<BitmapTexture> texture = m_textureMapper->createTexture();
            texture->reset(rect.size(), !imageHasAlpha);
            texture->updateContents(imageHasAlpha ? BitmapTexture::BGRAFormat : BitmapTexture::BGRFormat, IntRect(IntPoint::zero(), rect.size()), subImage.bits());
            tiledImage.add(rect.location(), texture);
        }
    }

    m_directlyCompositedImages.remove(imageID);
    m_directlyCompositedImages.add(imageID, tiledImage);
}
Пример #9
0
QSharedPointer<Texture> DilatableNetworkTexture::getDilatedTexture(float dilation) {
    QSharedPointer<Texture> texture = _dilatedTextures.value(dilation);
    if (texture.isNull()) {
        texture = QSharedPointer<Texture>(new Texture());
        
        if (!_image.isNull()) {
            QImage dilatedImage = _image;
            QPainter painter;
            painter.begin(&dilatedImage);
            QPainterPath path;
            qreal radius = glm::mix((float) _innerRadius, (float) _outerRadius, dilation);
            path.addEllipse(QPointF(_image.width() / 2.0, _image.height() / 2.0), radius, radius);
            painter.fillPath(path, Qt::black);
            painter.end();
            
            glBindTexture(GL_TEXTURE_2D, texture->getID());
            if (dilatedImage.hasAlphaChannel()) {
                glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, dilatedImage.width(), dilatedImage.height(), 1,
                    GL_BGRA, GL_UNSIGNED_BYTE, dilatedImage.constBits());
            } else {
                glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, dilatedImage.width(), dilatedImage.height(), 1,
                    GL_RGB, GL_UNSIGNED_BYTE, dilatedImage.constBits());
            }
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
            glBindTexture(GL_TEXTURE_2D, 0);
        }
        
        _dilatedTextures.insert(dilation, texture);
    }
    return texture;
}
Пример #10
0
gpu::Texture* TextureUsage::createMetallicTextureFromImage(const QImage& srcImage, const std::string& srcImageName) {
    QImage image = srcImage;
    if (!image.hasAlphaChannel()) {
        if (image.format() != QImage::Format_RGB888) {
            image = image.convertToFormat(QImage::Format_RGB888);
        }
    } else {
        if (image.format() != QImage::Format_ARGB32) {
            image = image.convertToFormat(QImage::Format_ARGB32);
        }
    }

    image = image.convertToFormat(QImage::Format_Grayscale8);

    gpu::Texture* theTexture = nullptr;
    if ((image.width() > 0) && (image.height() > 0)) {

#ifdef COMPRESS_TEXTURES
        gpu::Element formatGPU = gpu::Element(gpu::SCALAR, gpu::NUINT8, gpu::COMPRESSED_R);
#else
        gpu::Element formatGPU = gpu::Element(gpu::SCALAR, gpu::NUINT8, gpu::RGB);
#endif
        gpu::Element formatMip = gpu::Element(gpu::SCALAR, gpu::NUINT8, gpu::RGB);

        theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
        theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
        generateMips(theTexture, image, formatMip);

        // FIXME queue for transfer to GPU and block on completion
    }

    return theTexture;
}
Пример #11
0
bool QDirectFBPixmapData::hasAlphaChannel(const QImage &img)
{
#ifndef QT_NO_DIRECTFB_OPAQUE_DETECTION
    return ::checkForAlphaPixels(img);
#else
    return img.hasAlphaChannel();
#endif
}
Пример #12
0
/**
 * Automatic marshaling of a QImage for org.freedesktop.Notifications.Notify
 *
 * This function is from the Clementine project (see
 * http://www.clementine-player.org) and licensed under the GNU General Public
 * License, version 3 or later.
 *
 * Copyright 2010, David Sansome <*****@*****.**>
 */
QDBusArgument &operator<<( QDBusArgument &arg, const QImage &image )
{
  if ( image.isNull() )
  {
    arg.beginStructure();
    arg << 0 << 0 << 0 << false << 0 << 0 << QByteArray();
    arg.endStructure();
    return arg;
  }

  QImage scaled = image.scaledToHeight( 100, Qt::SmoothTransformation );
  scaled = scaled.convertToFormat( QImage::Format_ARGB32 );

#if Q_BYTE_ORDER == Q_LITTLE_ENDIAN
  // ABGR -> ARGB
  QImage i = scaled.rgbSwapped();
#else
  // ABGR -> GBAR
  QImage i( scaled.size(), scaled.format() );
  for ( int y = 0; y < i.height(); ++y )
  {
    QRgb *p = ( QRgb * ) scaled.scanLine( y );
    QRgb *q = ( QRgb * ) i.scanLine( y );
    QRgb *end = p + scaled.width();
    while ( p < end )
    {
      *q = qRgba( qGreen( *p ), qBlue( *p ), qAlpha( *p ), qRed( *p ) );
      p++;
      q++;
    }
  }
#endif

  arg.beginStructure();
  arg << i.width();
  arg << i.height();
  arg << i.bytesPerLine();
  arg << i.hasAlphaChannel();
  int channels = i.isGrayscale() ? 1 : ( i.hasAlphaChannel() ? 4 : 3 );
  arg << i.depth() / channels;
  arg << channels;
  arg << QByteArray( reinterpret_cast<const char *>( i.bits() ), i.numBytes() );
  arg.endStructure();
  return arg;
}
Пример #13
0
bool ImageCreator::create(const QString &path, int, int, QImage &img)
{
    // create image preview
    if (!img.load( path ))
        return false;
    if (img.depth() != 32)
        img = img.convertToFormat(img.hasAlphaChannel() ? QImage::Format_ARGB32 : QImage::Format_RGB32);
    return true;
}
Пример #14
0
template< int operation( int, int ) > static QImage changeImage( const QImage& image, int value )
{
    QImage im = image;
    im.detach();
    if( im.colorCount() == 0 ) /* truecolor */
    {
        if( im.format() != QImage::Format_RGB32 ) /* just in case */
            im = im.convertToFormat( QImage::Format_RGB32 );
        int table[ 256 ];
        for( int i = 0;
                i < 256;
                ++i )
            table[ i ] = operation( i, value );
        if( im.hasAlphaChannel() )
        {
            for( int y = 0;
                    y < im.height();
                    ++y )
            {
                QRgb* line = reinterpret_cast< QRgb* >( im.scanLine( y ));
                for( int x = 0;
                        x < im.width();
                        ++x )
                    line[ x ] = qRgba( changeUsingTable( qRed( line[ x ] ), table ),
                            changeUsingTable( qGreen( line[ x ] ), table ),
                            changeUsingTable( qBlue( line[ x ] ), table ),
                            changeUsingTable( qAlpha( line[ x ] ), table ));
            }
        }
        else
        {
            for( int y = 0;
                    y < im.height();
                    ++y )
            {
                QRgb* line = reinterpret_cast< QRgb* >( im.scanLine( y ));
                for( int x = 0;
                        x < im.width();
                        ++x )
                    line[ x ] = qRgb( changeUsingTable( qRed( line[ x ] ), table ),
                            changeUsingTable( qGreen( line[ x ] ), table ),
                            changeUsingTable( qBlue( line[ x ] ), table ));
            }
        }
    }
    else
    {
        QVector<QRgb> colors = im.colorTable();
        for( int i = 0;
                i < im.colorCount();
                ++i )
            colors[ i ] = qRgb( operation( qRed( colors[ i ] ), value ),
                    operation( qGreen( colors[ i ] ), value ),
                    operation( qBlue( colors[ i ] ), value ));
    }
    return im;
}
Пример #15
0
//----------------------------------------------------------------------------------------------------------------------
// Qt Image loading routines
//----------------------------------------------------------------------------------------------------------------------
bool Image::load( const std::string &_fName  ) noexcept
{
#ifdef IMAGE_DEBUG_ON
  std::cerr<<"loading with QImage"<<std::endl;
#endif
  QImage image;
  bool loaded=image.load(_fName.c_str());
  if(loaded ==false)
  {
    std::cerr<<"error loading image "<<_fName.c_str()<<"\n";
  }
  if(loaded == true)
  {
    image=image.mirrored();
    m_width=static_cast<GLuint> (image.width());
    m_height=static_cast<GLuint> (image.height());
    m_hasAlpha=image.hasAlphaChannel();
    if(m_hasAlpha == true)
    {
      m_channels=4;
      m_format = GL_RGBA;
    }
    else
    {
      m_channels=3;
      m_format = GL_RGB;
    }

    m_data.reset(new unsigned char[ m_width*m_height*m_channels]);
    unsigned int index=0;
    QRgb colour;
    for(unsigned int y=0; y<m_height; ++y)
    {
      for(unsigned int x=0; x<m_width; ++x)
      {
        colour=image.pixel(x,y);

        m_data[index++]=static_cast<unsigned char> (qRed(colour));
        m_data[index++]=static_cast<unsigned char> (qGreen(colour));
        m_data[index++]=static_cast<unsigned char> (qBlue(colour));
        if(m_hasAlpha)
        {
          m_data[index++]=static_cast<unsigned char> (qAlpha(colour));
        }
      }
    }
#ifdef IMAGE_DEBUG_ON
  std::cerr<<"size "<<m_width<<" "<<m_height<<std::endl;
  std::cerr<<"channels "<<m_channels<<std::endl;
#endif

   return true;

  }

  else return false;
}
void NemoThumbnailProvider::writeCacheFile(const QByteArray &hashKey, const QImage &img)
{
    QFile fi(cacheFileName(hashKey, true));
    if (!fi.open(QIODevice::WriteOnly)) {
        qWarning() << "Couldn't cache to " << fi.fileName();
        return;
    }
    img.save(&fi, img.hasAlphaChannel() ? "PNG" : "JPG");
    fi.flush();
    fi.close();
}
Пример #17
0
/*!
    Sets the image that is associated with this texture to \a pixmap.

    This is a convenience that calls setImage() after converting
    \a pixmap into a QImage.  It may be more efficient on some
    platforms than the application calling QPixmap::toImage().

    \sa setImage()
*/
void QGLTexture2D::setPixmap(const QPixmap& pixmap)
{
    QImage image = pixmap.toImage();
    if (pixmap.depth() == 16 && !image.hasAlphaChannel()) {
        // If the system depth is 16 and the pixmap doesn't have an alpha channel
        // then we convert it to RGB16 in the hope that it gets uploaded as a 16
        // bit texture which is much faster to access than a 32-bit one.
        image = image.convertToFormat(QImage::Format_RGB16);
    }
    setImage(image);
}
Пример #18
0
//----------------------------------------------------------------------------------------------------------------------
bool Texture::loadImage( const std::string &_fName  )
{
 // std::cout<<"NGL loading texture\n";
  QImage *image = new QImage();
  bool loaded=image->load(_fName.c_str());
  if(loaded ==false)
  {
    std::cerr<<"error loading image "<<_fName.c_str()<<"\n";
  }
  if(loaded == true)
  {
    m_width=image->width();
    m_height=image->height();
    m_hasAlpha=image->hasAlphaChannel();
    if(m_hasAlpha == true)
    {
      m_bpp=4;
      m_format = GL_RGBA;
    }
    else
    {
      m_bpp=3;
      m_format = GL_RGB;
    }

    m_data.reset(new unsigned char[ m_width*m_height*m_bpp]);
    unsigned int index=0;
    QRgb colour;
    for(unsigned int y=m_height-1; y>0; y--)
    {
      for(unsigned int x=0; x<m_width; ++x)
      {
        colour=image->pixel(x,y);

        m_data[index++]=qRed(colour);
        m_data[index++]=qGreen(colour);
        m_data[index++]=qBlue(colour);
        if(m_hasAlpha)
        {
          m_data[index++]=qAlpha(colour);
        }
      }
    }

   // std::cout<<"Image size ="<<m_width<<"x"<<m_height<<std::endl;
   // std::cout<<"has alpha = "<<m_hasAlpha<<std::endl;
   // std::cout<<"bpp = "<<m_bpp<<std::endl;
    return true;

  }

  else return false;
}
Пример #19
0
void KoResourceItemDelegate::paint( QPainter * painter, const QStyleOptionViewItem & option, const QModelIndex & index ) const
{
    if( ! index.isValid() )
        return;

    KoResource * resource = static_cast<KoResource*>( index.internalPointer() );
    if (!resource)
        return;

    painter->save();

    if (option.state & QStyle::State_Selected)
        painter->fillRect( option.rect, option.palette.highlight() );

    QRect innerRect = option.rect.adjusted( 2, 1, -2, -1 );

    KoAbstractGradient * gradient = dynamic_cast<KoAbstractGradient*>( resource );
    if (gradient) {
        QGradient * g = gradient->toQGradient();

        QLinearGradient paintGradient;
        paintGradient.setStops( g->stops() );
        paintGradient.setStart( innerRect.topLeft() );
        paintGradient.setFinalStop( innerRect.topRight() );

        m_checkerPainter.paint( *painter, innerRect );
        painter->fillRect( innerRect, QBrush( paintGradient ) );

        delete g;
    }
    else {
        QImage thumbnail = index.data( Qt::DecorationRole ).value<QImage>();

        QSize imageSize = thumbnail.size();

        if(imageSize.height() > innerRect.height() || imageSize.width() > innerRect.width()) {
            qreal scaleW = static_cast<qreal>( innerRect.width() ) / static_cast<qreal>( imageSize.width() );
            qreal scaleH = static_cast<qreal>( innerRect.height() ) / static_cast<qreal>( imageSize.height() );

            qreal scale = qMin( scaleW, scaleH );

            int thumbW = static_cast<int>( imageSize.width() * scale );
            int thumbH = static_cast<int>( imageSize.height() * scale );
            thumbnail = thumbnail.scaled( thumbW, thumbH, Qt::IgnoreAspectRatio, Qt::SmoothTransformation );
        }
        painter->setRenderHint(QPainter::SmoothPixmapTransform, true);
        if (thumbnail.hasAlphaChannel()) {
            painter->fillRect(innerRect, Qt::white); // no checkers, they are confusing with patterns.
        }
        painter->fillRect( innerRect, QBrush(thumbnail) );
    }
    painter->restore();
}
Пример #20
0
//-------------------------------------------------------------------//
bool Texture::load(const std::string &_file)
{
    // Based on Jon Macey's ngl::Texture //

    m_filepath = _file;

    std::cout<<"loading texture: "<<_file<<std::endl;
    QImage *image = new QImage();
    bool loaded=image->load(_file.c_str());
    if(loaded == true)
    {
      m_width=image->width();
      m_height=image->height();
      m_hasAlpha=image->hasAlphaChannel();
      if(m_hasAlpha == true)
      {
        m_bpp=4;
        m_format = GL_RGBA;
      }
      else
      {
        m_bpp=3;
        m_format = GL_RGB;
      }
      QRgb colour;
      m_data.resize(m_width*m_height*m_bpp);
      unsigned int index =0;
      for(int y=m_height-1; y>-1; y--)
      {
        for(unsigned int x=0; x<m_width; ++x)
        {
          colour=image->pixel(x,y);

          m_data[index++] = qRed(colour);
          m_data[index++] = qGreen(colour);
          m_data[index++] = qBlue(colour);
          if(m_hasAlpha == true)
          {
            m_data[index++] = qAlpha(colour);
          }
        }
      }

      std::cout<<"Image size ="<<m_width<<"x"<<m_height<<std::endl;

      loadData(m_width, m_height, m_bpp, &m_data[0]);

      return true;

    }
    else return false;
}
Пример #21
0
QImage::Format QGLPixmapData::idealFormat(QImage &image, Qt::ImageConversionFlags flags)
{
    QImage::Format format = QImage::Format_RGB32;
    if (qApp->desktop()->depth() == 16)
        format = QImage::Format_RGB16;

    if (image.hasAlphaChannel()
        && ((flags & Qt::NoOpaqueDetection)
            || const_cast<QImage &>(image).data_ptr()->checkForAlphaPixels()))
        format = QImage::Format_ARGB32_Premultiplied;

    return format;
}
Пример #22
0
void ImageReader::run() {
    QSharedPointer<Resource> texture = _texture.toStrongRef();
    if (texture.isNull()) {
        _reply->deleteLater();
        return;
    }
    QUrl url = _reply->url();
    QImage image = QImage::fromData(_reply->readAll());
    _reply->deleteLater();
    
    // enforce a fixed maximum
    const int MAXIMUM_SIZE = 1024;
    if (image.width() > MAXIMUM_SIZE || image.height() > MAXIMUM_SIZE) {
        qDebug() << "Image greater than maximum size:" << url << image.width() << image.height();
        image = image.scaled(MAXIMUM_SIZE, MAXIMUM_SIZE, Qt::KeepAspectRatio);
    }
    
    if (!image.hasAlphaChannel()) {
        if (image.format() != QImage::Format_RGB888) {
            image = image.convertToFormat(QImage::Format_RGB888);
        }
        QMetaObject::invokeMethod(texture.data(), "setImage", Q_ARG(const QImage&, image), Q_ARG(bool, false));
        return;
    }
    if (image.format() != QImage::Format_ARGB32) {
        image = image.convertToFormat(QImage::Format_ARGB32);
    }
    
    // check for translucency/false transparency
    int opaquePixels = 0;
    int translucentPixels = 0;
    const int EIGHT_BIT_MAXIMUM = 255;
    const int RGB_BITS = 24;
    for (int y = 0; y < image.height(); y++) {
        for (int x = 0; x < image.width(); x++) {
            int alpha = image.pixel(x, y) >> RGB_BITS;
            if (alpha == EIGHT_BIT_MAXIMUM) {
                opaquePixels++;
            } else if (alpha != 0) {
                translucentPixels++;
            }
        }
    }
    int imageArea = image.width() * image.height();
    if (opaquePixels == imageArea) {
        qDebug() << "Image with alpha channel is completely opaque:" << url;
        image = image.convertToFormat(QImage::Format_RGB888);
    }
    QMetaObject::invokeMethod(texture.data(), "setImage", Q_ARG(const QImage&, image),
        Q_ARG(bool, translucentPixels >= imageArea / 2));
}
Пример #23
0
void TextureUsage::defineColorTexelFormats(gpu::Element& formatGPU, gpu::Element& formatMip, 
const QImage& image, bool isLinear, bool doCompress) {

#ifdef COMPRESS_TEXTURES
#else
    doCompress = false;
#endif

    if (image.hasAlphaChannel()) {
        gpu::Semantic gpuSemantic;
        gpu::Semantic mipSemantic;
        if (isLinear) {
            mipSemantic = gpu::BGRA;
            if (doCompress) {
                gpuSemantic = gpu::COMPRESSED_RGBA;
            } else {
                gpuSemantic = gpu::RGBA;
            }
        } else {
            mipSemantic = gpu::SBGRA;
            if (doCompress) {
                gpuSemantic = gpu::COMPRESSED_SRGBA;
            } else {
                gpuSemantic = gpu::SRGBA;
            }
        }
        formatGPU = gpu::Element(gpu::VEC4, gpu::NUINT8, gpuSemantic);
        formatMip = gpu::Element(gpu::VEC4, gpu::NUINT8, mipSemantic);
    } else {
        gpu::Semantic gpuSemantic;
        gpu::Semantic mipSemantic;
        if (isLinear) {
            mipSemantic = gpu::RGB;
            if (doCompress) {
                gpuSemantic = gpu::COMPRESSED_RGB;
            } else {
                gpuSemantic = gpu::RGB;
            }
        } else {
            mipSemantic = gpu::SRGB;
            if (doCompress) {
                gpuSemantic = gpu::COMPRESSED_SRGB;
            } else {
                gpuSemantic = gpu::SRGB;
            }
        }
        formatGPU = gpu::Element(gpu::VEC3, gpu::NUINT8, gpuSemantic);
        formatMip = gpu::Element(gpu::VEC3, gpu::NUINT8, mipSemantic);
    }
}
Пример #24
0
/// Returns a texture version of an image file
gpu::TexturePointer TextureCache::getImageTexture(const QString& path) {
    QImage image = QImage(path).mirrored(false, true);
    gpu::Element formatGPU = gpu::Element(gpu::VEC3, gpu::NUINT8, gpu::RGB);
    gpu::Element formatMip = gpu::Element(gpu::VEC3, gpu::NUINT8, gpu::RGB);
    if (image.hasAlphaChannel()) {
        formatGPU = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::RGBA);
        formatMip = gpu::Element(gpu::VEC4, gpu::NUINT8, gpu::BGRA);
    }
    gpu::TexturePointer texture = gpu::TexturePointer(
        gpu::Texture::create2D(formatGPU, image.width(), image.height(), 
            gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
    texture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
    texture->autoGenerateMips(-1);
    return texture;
}
Пример #25
0
void Slideshow::setTexture (QImage ts[], uint length)
{
  glTexParameteri (GL_TEXTURE_RECTANGLE, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
  glTexParameteri (GL_TEXTURE_RECTANGLE, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
  glTexParameteri (GL_TEXTURE_RECTANGLE, GL_TEXTURE_WRAP_S, GL_CLAMP);
  glTexParameteri (GL_TEXTURE_RECTANGLE, GL_TEXTURE_WRAP_T, GL_CLAMP);
  logGLError("in glTexParameteri");
  for (uint i=0; i<length; i++) {
    QImage t = ts[i];
    int w = t.width ();
    int h = t.height ();
    glTexImage2D (GL_TEXTURE_RECTANGLE, i, 3 + (t.hasAlphaChannel() ? 1 : 0), w, h, 0, GL_RGBA, GL_UNSIGNED_INT_8_8_8_8_REV, t.bits());
    logGLError("in glTexImage2D");
  }
}
Пример #26
0
QImage ImageTrim::createImage(const QImage& sourceImage, bool maxAlphaValue, QRect& cropRect) {
    if (sourceImage.width() < 2 || sourceImage.height() < 2 || !sourceImage.hasAlphaChannel()) {
        return sourceImage;
    }

    cropRect = getBoundingBox(sourceImage, maxAlphaValue);
    if (cropRect.x() > 0 ||
        cropRect.y() > 0 ||
        cropRect.width() < sourceImage.width() ||
        cropRect.height() < sourceImage.height())
    {
        return sourceImage.copy(cropRect);
    }
    return sourceImage;
}
Пример #27
0
QDBusArgument& operator<< (QDBusArgument& arg, const DBusNotifyImageData &data) {
	if (data.image.isNull()) {
		// Sometimes this gets called with a null QImage for no obvious reason.
		// - There is one reason: Qt calls this method at first time to research it's structure
		arg.beginStructure();
		arg << 0 << 0 << 0 << false << 0 << 0 << QByteArray();
		arg.endStructure();
		return arg;
	}
	QImage scaled = data.image.scaledToHeight(qMin(100, qMin(data.image.height(), data.image.width())),
											  Qt::SmoothTransformation).toImage();
	QImage i = scaled.convertToFormat(QImage::Format_ARGB32).rgbSwapped();
	arg.beginStructure();
	arg << i.width();
	arg << i.height();
	arg << i.bytesPerLine();
	arg << i.hasAlphaChannel();
	int channels = i.isGrayscale() ? 1 : (i.hasAlphaChannel() ? 4 : 3);
	arg << i.depth() / channels;
	arg << channels;
	arg << QByteArray(reinterpret_cast<const char*>(i.bits()), i.numBytes());
	arg.endStructure();
	return arg;
}
Пример #28
0
void NetworkTexture::setImage(const QImage& image, bool translucent) {
    _translucent = translucent;
    
    finishedLoading(true);
    imageLoaded(image);
    glBindTexture(GL_TEXTURE_2D, getID());
    if (image.hasAlphaChannel()) {
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, image.width(), image.height(), 1,
            GL_BGRA, GL_UNSIGNED_BYTE, image.constBits());
    } else {
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, image.width(), image.height(), 1,
            GL_RGB, GL_UNSIGNED_BYTE, image.constBits());
    }
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glBindTexture(GL_TEXTURE_2D, 0);
}
Пример #29
0
void QDirectFBPixmapData::fromImage(const QImage &img, Qt::ImageConversionFlags flags)
{
    alpha = QDirectFBPixmapData::hasAlphaChannel(img, flags);
    imageFormat = alpha ? screen->alphaPixmapFormat() : screen->pixelFormat();
    QImage image;
    if ((flags & ~Qt::NoOpaqueDetection) != Qt::AutoColor) {
        image = img.convertToFormat(imageFormat, flags);
        flags = Qt::AutoColor;
    } else if (img.format() == QImage::Format_RGB32 || img.depth() == 1) {
        image = img.convertToFormat(imageFormat, flags);
    } else {
        image = img;
    }

    IDirectFBSurface *imageSurface = screen->createDFBSurface(image, image.format(), QDirectFBScreen::DontTrackSurface);
    if (!imageSurface) {
        qWarning("QDirectFBPixmapData::fromImage()");
        invalidate();
        return;
    }

    dfbSurface = screen->createDFBSurface(image.size(), imageFormat, QDirectFBScreen::TrackSurface);
    if (!dfbSurface) {
        qWarning("QDirectFBPixmapData::fromImage()");
        invalidate();
        return;
    }

    if (image.hasAlphaChannel()) {
        dfbSurface->Clear(dfbSurface, 0, 0, 0, 0);
        dfbSurface->SetBlittingFlags(dfbSurface, DSBLIT_BLEND_ALPHACHANNEL);
    } else {
        dfbSurface->SetBlittingFlags(dfbSurface, DSBLIT_NOFX);
    }

    dfbSurface->Blit(dfbSurface, imageSurface, 0, 0, 0);
    imageSurface->Release(imageSurface);

    w = image.width();
    h = image.height();
    is_null = (w <= 0 || h <= 0);
    d = QDirectFBScreen::depth(imageFormat);
    setSerialNumber(++global_ser_no);
#ifdef QT_NO_DIRECTFB_OPAQUE_DETECTION
    Q_UNUSED(flags);
#endif
}
Пример #30
0
/*!
    out-of-place conversion (inPlace == false) will always detach()
 */
void QGLPixmapData::createPixmapForImage(QImage &image, Qt::ImageConversionFlags flags, bool inPlace)
{
    if (image.size() == QSize(w, h))
        setSerialNumber(++qt_gl_pixmap_serial);

    resize(image.width(), image.height());

    if (pixelType() == BitmapType) {
        m_source = image.convertToFormat(QImage::Format_MonoLSB);

    } else {
        QImage::Format format = QImage::Format_RGB32;
        if (qApp->desktop()->depth() == 16)
            format = QImage::Format_RGB16;

        if (image.hasAlphaChannel()
            && ((flags & Qt::NoOpaqueDetection)
                || const_cast<QImage &>(image).data_ptr()->checkForAlphaPixels()))
            format = QImage::Format_ARGB32_Premultiplied;;

        if (inPlace && image.data_ptr()->convertInPlace(format, flags)) {
            m_source = image;
        } else {
            m_source = image.convertToFormat(format);

            // convertToFormat won't detach the image if format stays the same.
            if (image.format() == format)
                m_source.detach();
        }
    }

    m_dirty = true;
    m_hasFillColor = false;

    m_hasAlpha = m_source.hasAlphaChannel();
    w = image.width();
    h = image.height();
    is_null = (w <= 0 || h <= 0);
    d = m_source.depth();

    if (m_texture.id) {
        QGLShareContextScope ctx(qt_gl_share_context());
        glDeleteTextures(1, &m_texture.id);
        m_texture.id = 0;
    }
}