static void saveAsQuadraticPng(const QPixmap &pixmap, const QString &fileName) { QImage icon = pixmap.toImage(); icon = icon.convertToFormat(QImage::Format_ARGB32); const int iconExtent = qMax(icon.width(), icon.height()); icon = icon.copy((icon.width() - iconExtent) / 2, (icon.height() - iconExtent) / 2, iconExtent, iconExtent); icon.save(fileName, "PNG"); }
ImageIO::Status ImageIO::load( const QImage& image, int levels ) { #define SUPPORTED_DEPTH QImage::Format_ARGB32 // Remove previously loaded image samples delete[] image_samples; // Create image levels image_samples = new QImage[ image_levels = levels ]; if( image.isNull() ) { // Notify of change to image emit changed(); _status = LOAD_ERROR; return LOAD_ERROR; } // Load the original image switch( image.format() ) { case QImage::Format_RGB32: case QImage::Format_ARGB32: image_samples[ 0 ] = image; break; default: { // Try to convert image to supported depth image_samples[ 0 ] = image.convertToFormat( QImage::Format_ARGB32 ); if( image_samples[ 0 ].isNull() ) { // Notify of change to image emit changed(); _status = DEPTH_ERROR; return DEPTH_ERROR; } } } // For each level after the first for( int i = 1; i < levels; ++i ) { // Load image half the size of the previous image QImage *prev_sample = image_samples + i - 1; int width = prev_sample->width() / 2; int height = prev_sample->height() / 2; image_samples[ i ] = prev_sample->scaled( width ? width : 1, height ? height : 1 ); } // Notify of change to image emit changed(); _status = NORMAL; return NORMAL; }
static QImage convertImage(const QImage &image) { switch (image.format()) { case QImage::Format_ARGB32: case QImage::Format_ARGB32_Premultiplied: case QImage::Format_RGB32: return image; default: return image.convertToFormat(QImage::Format_ARGB32); } }
QImage PunchFilter::apply(const QImage &image, const QRect& clipRect /*= QRect()*/ ) const { QImage out; QImage::Format fmt = image.format(); Punch(image, &out, clipRect); if (out.format() != fmt) { out = out.convertToFormat(fmt); } return out; }
QImage QFontEngineDirectWrite::alphaRGBMapForGlyph(glyph_t t, QFixed subPixelPosition, int margin, const QTransform &xform) { QImage mask = imageForGlyph(t, subPixelPosition, margin, xform); return mask.depth() == 32 ? mask : mask.convertToFormat(QImage::Format_RGB32); }
static QImage simplifyFormats(const QImage& img) { switch (img.format()) { case QImage::Format_RGB32: case QImage::Format_ARGB32_Premultiplied: return img.convertToFormat(QImage::Format_ARGB32); default: return img; } }
void QVGPixmapData::fromImage (const QImage &image, Qt::ImageConversionFlags flags) { if (image.size() == QSize(w, h)) setSerialNumber(++qt_vg_pixmap_serial); else resize(image.width(), image.height()); source = image.convertToFormat(sourceFormat(), flags); recreate = true; }
void ImageContents::setImage(const QImage &image, int index, int level) { if (d->imageFormat == QImage::Format_Invalid) { d->imageFormat = image.format(); } QImage copy(image); if (image.format() != d->imageFormat) copy = image.convertToFormat(d->imageFormat); d->images.insert(ImageContentsData::ImageIndex(index, level), copy); }
void App::applyEffect(QRectF rect, float zoom, float opacityA, float opacityB, float opacityC, float opacityD) { QTime t; t.start(); QRectF targetRect(0, 0, m_displayWidth, m_displayHeight); QRectF sourceRect(rect.x() / zoom, rect.y() / zoom, rect.width(), rect.height()); QImage resultImage(QSize(m_displayWidth, m_displayHeight), QImage::Format_ARGB32_Premultiplied); QImage baseImage = Helper::createImageFromTile(m_tile, m_displayWidth, m_displayHeight); baseImage.convertToFormat(QImage::Format_ARGB32_Premultiplied); QDir imageDir = QDir::current(); imageDir.cd("app/native/assets/images"); QImage overlay1(imageDir.absoluteFilePath("effect1.png")); overlay1.convertToFormat(QImage::Format_ARGB32_Premultiplied); QImage overlay2(imageDir.absoluteFilePath("effect2.png")); overlay2.convertToFormat(QImage::Format_ARGB32_Premultiplied); QImage overlay3(imageDir.absoluteFilePath("effect3.png")); overlay3.convertToFormat(QImage::Format_ARGB32_Premultiplied); QImage overlay4(imageDir.absoluteFilePath("effect4.png")); overlay4.convertToFormat(QImage::Format_ARGB32_Premultiplied); qDebug() << "TIME: loading images:" << t.elapsed(); t.restart(); QPainter p(&resultImage); p.drawImage(targetRect, baseImage, sourceRect); p.setCompositionMode(QPainter::CompositionMode_SourceOver); p.setOpacity(opacityA); p.drawImage(0,0, overlay1); p.setOpacity(opacityB); p.drawImage(0,0, overlay2); p.setOpacity(opacityC); p.drawImage(0,0, overlay3); p.setOpacity(opacityD); p.drawImage(0,0, overlay4); p.end(); qDebug() << "TIME drawing final image:" << t.elapsed(); t.restart(); m_bigImageCache = resultImage; bb::cascades::Image cimg = Helper::convertImage(resultImage); setBigImage(cimg); qDebug() << "TIME converting final image:" << t.elapsed(); t.restart(); }
void EC_WidgetCanvas::Update(QImage buffer) { if (framework->IsHeadless()) return; if (buffer.width() <= 0 || buffer.height() <= 0) return; if (buffer.format() != QImage::Format_ARGB32 && buffer.format() != QImage::Format_ARGB32_Premultiplied) { LogWarning("EC_WidgetCanvas::Update(QImage buffer): Input format needs to be Format_ARGB32 or Format_ARGB32_Premultiplied, preforming auto conversion!"); buffer = buffer.convertToFormat(QImage::Format_ARGB32); if (buffer.isNull()) { LogError("-- Auto conversion failed, not updating!"); return; } } try { Ogre::TexturePtr texture = Ogre::TextureManager::getSingleton().getByName(texture_name_); if (texture.isNull()) return; // Set texture to material if need be if (update_internals_ && !material_name_.empty()) { Ogre::MaterialPtr material = Ogre::MaterialManager::getSingleton().getByName(material_name_); if (material.isNull()) return; // Just for good measure, this is done once in the ctor already if everything went well. OgreRenderer::SetTextureUnitOnMaterial(material, texture_name_); UpdateSubmeshes(); update_internals_ = false; } if ((int)texture->getWidth() != buffer.width() || (int)texture->getHeight() != buffer.height()) { texture->freeInternalResources(); texture->setWidth(buffer.width()); texture->setHeight(buffer.height()); texture->createInternalResources(); } Blit(buffer, texture); } catch (Ogre::Exception &e) // inherits std::exception { LogError("Exception occurred while blitting texture data from memory: " + std::string(e.what())); } catch (...) { LogError("Unknown exception occurred while blitting texture data from memory."); } }
bool MNISTReader::readTrainingImage(QString filename) { QFile file(filename); if(!file.open(QIODevice::ReadOnly)) { qDebug() << "Can't open training image file " << file.fileName(); return false; } QDataStream in(&file); // read the data serialized from the file // readin magic number quint32 magicNumber; in >> magicNumber; if(magicNumber != TRAINING_IMAGE_MAGIC_NUMBER) { qDebug() << "This (" << magicNumber << ") is not the correct MNIST TRAINING image file!"; return false; } qDebug("magic number = 0x%08x\n" ,magicNumber); // readin number of images quint32 numberOfImages; in >> numberOfImages; qDebug("number of items = %d\n",numberOfImages); Q_ASSERT(numberOfImages == TRAINING_IMAGE_CNT); // read in number of rows and cols quint32 rowCnt, colCnt; in >> rowCnt >> colCnt; Q_ASSERT(rowCnt == MNIST_IMAGE_HEIGHT); Q_ASSERT(colCnt == MNIST_IMAGE_WIDTH); // read in images uchar* pImageBuffer = NULL; QImage* pImage = NULL; QVector<QRgb> grayscaleTable; for(int i = 0; i < 256; i++) grayscaleTable.push_back(qRgb(i,i,i)); //for(int i = 0; i < TRAINING_IMAGE_CNT; i++) for(int i = 0; i < NUMBER_OF_READING; ++i) { // read in raw data of one image pImageBuffer = new uchar[rowCnt * colCnt]; for(uint j = 0; j < rowCnt * colCnt; ++j) in >> pImageBuffer[j]; pImage = new QImage(pImageBuffer,colCnt,rowCnt,QImage::Format_Indexed8); pImage->setColorTable(grayscaleTable); trainingImageBuffer.push_back(pImageBuffer); trainingImages.push_back(pImage->convertToFormat(QImage::Format_RGB888)); delete pImage; } return true; }
/*! Sets the image that is associated with this texture to \a pixmap. This is a convenience that calls setImage() after converting \a pixmap into a QImage. It may be more efficient on some platforms than the application calling QPixmap::toImage(). \sa setImage() */ void QGLTexture2D::setPixmap(const QPixmap& pixmap) { QImage image = pixmap.toImage(); if (pixmap.depth() == 16 && !image.hasAlphaChannel()) { // If the system depth is 16 and the pixmap doesn't have an alpha channel // then we convert it to RGB16 in the hope that it gets uploaded as a 16 // bit texture which is much faster to access than a 32-bit one. image = image.convertToFormat(QImage::Format_RGB16); } setImage(image); }
QRcodeImage::QRcodeImage(QString FilePathName) { DECIMAL_POINT=21; //ÔØÈëͼƬ QImage image; image.load(FilePathName); m_EncodeImage = image.convertToFormat(QImage::Format_RGB32); m_Width = m_EncodeImage.width(); m_Height = m_EncodeImage.height(); }
cv::Mat QImage2Mat(QImage &src) { assert(!src.isNull()); if(src.format() != QImage::Format_RGB888){ src = src.convertToFormat(QImage::Format_RGB888); } cv::Mat tmp(src.height(), src.width(), CV_8UC3, (uchar*)src.bits(), src.bytesPerLine()); cv::Mat result(src.height(), src.width(), CV_8UC1); cvtColor(tmp, result, CV_BGR2GRAY, 1); return result; }
void ZBarReaderTest::decodeIterative() { mTotalRead = 0; int totalFiles = mFiles.size(); mLogFile.setFileName(QString("zbar-reader-report-%1-%2-force-decode.txt").arg(totalFiles).arg(QDir(mInputDir).dirName())); if (!mLogFile.open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Truncate)) { QMessageBox::critical(this, tr("Error"), tr("Unable to open log file")); return; } QTextStream stream(&mLogFile); QVector<QRgb>colorTable; for (int x = 0; x < 256; x++) colorTable << qRgb(x,x,x); QTime timer; timer.start(); for (int x = 0; x < totalFiles; x++) { QString line = QString::number(x + 1) + " / " + QString::number(totalFiles) + " : " + mFiles[x].split("/").takeLast(); QImage img; if (!readFromFileCheck->isChecked()) img = QImage(mInputDir + "/" + mFiles[x]); else img = QImage(mFiles[x]); if (img.isNull()) { line += " | Invalid image"; continue; } QImage tmp = img.convertToFormat(QImage::Format_Indexed8, colorTable); *future = QtConcurrent::run(this, &ZBarReaderTest::decodeIterative, tmp); watcher->setFuture(*future); QString result = future->result(); if (result.isEmpty()) line += " | Unable to decode"; else line += result; // qDebug () << line.split("/").takeLast(); stream << line << endl; resultOutputEdit->append(line); resultOutputEdit->textCursor ().movePosition (QTextCursor::End); qApp->processEvents(); } int time = (double)timer.elapsed() / 1000.; QString res = QString("Total read: %1\nTime elapsed: %2 s.\n").arg(mTotalRead).arg(time); // qDebug () << res; stream << res; mLogFile.close(); resultOutputEdit->append(res); }
void QGLPixmapData::createPixmapForImage(QImage &image, Qt::ImageConversionFlags flags, bool inPlace) { if (image.size() == QSize(w, h)) setSerialNumber(++qt_gl_pixmap_serial); resize(image.width(), image.height()); if (pixelType() == BitmapType) { QImage convertedImage = image.convertToFormat(QImage::Format_MonoLSB); if (image.format() == QImage::Format_MonoLSB) convertedImage.detach(); m_source = QVolatileImage(convertedImage); } else { QImage::Format format = idealFormat(image, flags); if (inPlace && image.data_ptr()->convertInPlace(format, flags)) { m_source = QVolatileImage(image); } else { QImage convertedImage = image.convertToFormat(format); // convertToFormat won't detach the image if format stays the same. if (image.format() == format) convertedImage.detach(); m_source = QVolatileImage(convertedImage); } } m_dirty = true; m_hasFillColor = false; m_hasAlpha = m_source.hasAlphaChannel(); w = image.width(); h = image.height(); is_null = (w <= 0 || h <= 0); d = m_source.depth(); destroyTexture(); }
/*! * Adds an image to the pdf and return the pdf-object id. Returns -1 if adding the image failed. */ int QPdfEnginePrivate::addImage(const QImage &img, bool *bitmap, qint64 serial_no) { if (img.isNull()) return -1; int object = imageCache.value(serial_no); if(object) return object; QImage image = img; QImage::Format format = image.format(); if (image.depth() == 1 && *bitmap && img.colorTable().size() == 2 && img.colorTable().at(0) == QColor(Qt::black).rgba() && img.colorTable().at(1) == QColor(Qt::white).rgba()) { if (format == QImage::Format_MonoLSB) image = image.convertToFormat(QImage::Format_Mono); format = QImage::Format_Mono; } else { *bitmap = false; if (format != QImage::Format_RGB32 && format != QImage::Format_ARGB32) { image = image.convertToFormat(QImage::Format_ARGB32); format = QImage::Format_ARGB32; } } int w = image.width(); int h = image.height(); int d = image.depth(); if (format == QImage::Format_Mono) { int bytesPerLine = (w + 7) >> 3; QByteArray data; data.resize(bytesPerLine * h); char *rawdata = data.data(); for (int y = 0; y < h; ++y) { memcpy(rawdata, image.scanLine(y), bytesPerLine); rawdata += bytesPerLine; } object = writeImage(data, w, h, d, 0, 0); } else {
void QAhiGLCursor::set(const QImage &image, int hotx, int hoty) { if (texture) glDeleteTextures(1, &texture); if (image.isNull()) texture = 0; else texture = createTexture(image.convertToFormat(QImage::Format_ARGB32)); QScreenCursor::set(image, hotx, hoty); }
void SearchDialog::saveCoverFromReply(QNetworkReply* reply){ //image to be saved must exist in albumLineEdit.images QByteArray data = reply->readAll(); QImage p; p.loadFromData(data); p.convertToFormat(QImage::Format_RGB32); reply->deleteLater(); int aind = searchResults->currentRow(); if(aind==-1){ return; } Album album = albums_[searchResults->currentItem()->data(Qt::UserRole).toString()]; QList<QUrl> images = album.images(); QString url = reply->url().toString(); int cInd=-1; for(int i=0;i<images.size();i++){ if(url==images[i].toString()){ cInd=i; break; } } if(cInd==-1){ return; } QString name = images[cInd].toString(); //qDebug()<<name; int ind = name.lastIndexOf("/"); if(ind==-1){ ind = name.lastIndexOf("\\"); } name = name.remove(0,ind+1); if(coverFormatCheckBox->isChecked()){ int suffInd = name.lastIndexOf("."); QString ext = name.right(name.size()-suffInd); name = createCoverName( cInd+1, ext ); } //qDebug()<<name; QString path = items_[0]->fileInfo().absoluteDir().absolutePath(); name = path+"/"+name; int quality = settings->value("SearchDialog/coverQuality",-1).toInt(); bool ok = p.save( name, 0, quality ); if(!ok){ QMessageBox::critical(this, "Could not save cover", "Could not save cover "+url+" as "+name, QMessageBox::Ok, QMessageBox::Ok); }else{ qDebug()<<"saved"<<name; } }
bool imageViewer::openImage(QByteArray *ba) { QImage tempImage; QBuffer buffer(ba); buffer.open(QIODevice::ReadOnly); if(tempImage.load(&buffer,NULL)) { return openImage(tempImage.convertToFormat(QImage::Format_ARGB32_Premultiplied)); } validImage=false; return false; }
QList<MessagePtr> ImageCanvasLoader::loadInitCommands() { if(m_filename.endsWith(".ora", Qt::CaseInsensitive)) { // Load OpenRaster image // TODO identify by filetype magic? openraster::OraResult ora = openraster::loadOpenRaster(m_filename); if(!ora.error.isEmpty()) { m_error = ora.error; return QList<MessagePtr>(); } if(ora.warnings != openraster::OraResult::NO_WARNINGS) { QString text = QGuiApplication::tr("Drawpile does not support all the features used in this OpenRaster file. Saving this file may result in data loss.\n"); if((ora.warnings & openraster::OraResult::ORA_EXTENDED)) text += "\n- " + QGuiApplication::tr("Application specific extensions are used"); if((ora.warnings & openraster::OraResult::ORA_NESTED)) text += "\n- " + QGuiApplication::tr("Nested layers are not fully supported."); m_warning = text; } return ora.commands; } else { // Load an image using Qt's image loader. // If the image is animated, each frame is loaded as a layer QList<MessagePtr> msgs; QImageReader ir(m_filename); int layerId = 1; while(true) { QImage image = ir.read(); if(image.isNull()) { if(layerId>1) break; m_error = ir.errorString(); return QList<MessagePtr>(); } if(layerId==1) { msgs << MessagePtr(new protocol::CanvasResize(1, 0, image.size().width(), image.size().height(), 0)); } image = image.convertToFormat(QImage::Format_ARGB32); msgs << MessagePtr(new protocol::LayerCreate(1, layerId, 0, 0, 0, QStringLiteral("Layer %1").arg(layerId))); msgs << net::command::putQImage(1, layerId, 0, 0, image, paintcore::BlendMode::MODE_REPLACE); ++layerId; } return msgs; } }
const QImage TextureUsage::process2DImageColor(const QImage& srcImage, bool& validAlpha, bool& alphaAsMask) { QImage image = srcImage; validAlpha = false; alphaAsMask = true; const uint8 OPAQUE_ALPHA = 255; const uint8 TRANSPARENT_ALPHA = 0; if (image.hasAlphaChannel()) { std::map<uint8, uint32> alphaHistogram; if (image.format() != QImage::Format_ARGB32) { image = image.convertToFormat(QImage::Format_ARGB32); } // Actual alpha channel? create the histogram for (int y = 0; y < image.height(); ++y) { const QRgb* data = reinterpret_cast<const QRgb*>(image.constScanLine(y)); for (int x = 0; x < image.width(); ++x) { auto alpha = qAlpha(data[x]); alphaHistogram[alpha] ++; validAlpha = validAlpha || (alpha != OPAQUE_ALPHA); } } // If alpha was meaningfull refine if (validAlpha && (alphaHistogram.size() > 1)) { auto totalNumPixels = image.height() * image.width(); auto numOpaques = alphaHistogram[OPAQUE_ALPHA]; auto numTransparents = alphaHistogram[TRANSPARENT_ALPHA]; auto numTranslucents = totalNumPixels - numOpaques - numTransparents; alphaAsMask = ((numTranslucents / (double)totalNumPixels) < 0.05); } } if (!validAlpha && image.format() != QImage::Format_RGB888) { image = image.convertToFormat(QImage::Format_RGB888); } return image; }
bool BE::Contacts::setPhoto(const QString &path) { QRect r(0,0,160,160); QImage img = QImage( path ); if (img.isNull()) return false; const float f = qMin( float(img.width())/float(r.width()), float(img.height())/float(r.height()) ); r.setSize( r.size()*f ); r.moveTopRight( img.rect().topRight() ); img = img.copy(r).scaled( QSize(160,160), Qt::KeepAspectRatioByExpanding, Qt::SmoothTransformation); img = img.convertToFormat(QImage::Format_ARGB32); // if ( !MPC::setting("rgbCover").toBool() ) { int r,g,b; palette().color(foregroundRole()).getRgb(&r,&g,&b); int n = img.width() * img.height(); const uchar *bits = img.bits(); QRgb *pixel = (QRgb*)(const_cast<uchar*>(bits)); // this creates a (slightly) translucent monochromactic version of the // image using the foreground color // the gray value is turned into the opacity #define ALPHA qAlpha(pixel[i]) #define GRAY qGray(pixel[i]) #define OPACITY 224 if ( qMax( qMax(r,g), b ) > 128 ) // value > 50%, bright foreground for (int i = 0; i < n; ++i) pixel[i] = qRgba( r,g,b, ( ALPHA * ( (OPACITY*GRAY) / 255 ) ) / 255 ); else // inverse for (int i = 0; i < n; ++i) pixel[i] = qRgba( r,g,b, ( ALPHA * ( (OPACITY*(255-GRAY)) / 255 ) ) / 255 ); } #if 1 QPainterPath glasPath; glasPath.moveTo( img.rect().topLeft() ); glasPath.lineTo( img.rect().topRight() ); glasPath.quadTo( img.rect().center()/2, img.rect().bottomLeft() ); QPainter p( &img ); p.setRenderHint( QPainter::Antialiasing ); p.setPen( Qt::NoPen ); p.setBrush( QColor(255,255,255,64) ); p.drawPath(glasPath); p.end(); #endif m_ui2->photo->setPixmap( QPixmap::fromImage( img ) ); return true; }
static QPixmap scalePixmap( const QPixmap& pm, int w, int h ) { QImage scaled = pm.toImage().scaled(w, h, Qt::IgnoreAspectRatio, Qt::SmoothTransformation); if (scaled.format() != QImage::Format_ARGB32_Premultiplied && scaled.format() != QImage::Format_ARGB32) scaled = scaled.convertToFormat(QImage::Format_ARGB32_Premultiplied); QImage result(20, 20, QImage::Format_ARGB32_Premultiplied); QPainter p(&result); p.setCompositionMode(QPainter::CompositionMode_Source); p.fillRect(result.rect(), Qt::transparent); p.drawImage((20 - w) / 2, (20 - h) / 2, scaled, 0, 0, w, h); return QPixmap::fromImage(result); }
void SurfaceBuffer::bindToTexture() const { Q_ASSERT(m_compositor); if (isSharedMemory()) { QImage image = this->image(); if (image.hasAlphaChannel()) { if (image.format() != QImage::Format_RGBA8888) { image = image.convertToFormat(QImage::Format_RGBA8888); } glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, image.width(), image.height(), 0, GL_RGBA, GL_UNSIGNED_BYTE, image.constBits()); } else { if (image.format() != QImage::Format_RGBX8888) { image = image.convertToFormat(QImage::Format_RGBX8888); } glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, image.width(), image.height(), 0, GL_RGB, GL_UNSIGNED_BYTE, image.constBits()); } } else { if (QtWayland::ClientBufferIntegration *clientInt = QWaylandCompositorPrivate::get(m_compositor)->clientBufferIntegration()) { clientInt->bindTextureToBuffer(m_buffer); } } }
const QImage toAlpha(const QImage &image) { if (image.isNull()) return image; QRgb alpha = image.pixel(0, 0); QImage result = image.convertToFormat(QImage::Format_ARGB32_Premultiplied); QRgb *data = reinterpret_cast<QRgb *>(result.bits()); int size = image.width() * image.height(); for (int i = 0; i < size; ++i) if (data[i] == alpha) data[i] = 0; return result; }
QImage QWindowsFontEngineDirectWrite::alphaRGBMapForGlyph(glyph_t t, QFixed subPixelPosition, const QTransform &xform) { QImage mask = imageForGlyph(t, subPixelPosition, glyphMargin(QFontEngineGlyphCache::Raster_RGBMask), xform); return mask.depth() == 32 ? mask : mask.convertToFormat(QImage::Format_RGB32); }
void GuiManager::grabImg(QQuickItem *item) { qDebug()<<qobject_cast<QQuickItem*>(item); QSize s(800, 480); gr = item->grabToImage(s); qDebug()<<gr.data(); connect( gr.data(), SIGNAL( ready() ), this, SLOT(grabImg() ), Qt::DirectConnection ); // Using view can grab the whole picture QImage img = mpView->grabWindow(); // Format_Grayscale8 QImage copy = img.convertToFormat(QImage::Format_RGB666, Qt::AutoColor); copy.save("window.bmp"); }
QImage toGray(const QImage& image) { QImage result = image.convertToFormat(QImage::Format_ARGB32); for (int y = 0; y < result.height(); ++y) { QRgb *pScanLine = (QRgb*)result.scanLine(y); for (int x = 0; x < result.width(); ++x) { const int g = qGray(pScanLine[x]); pScanLine[x] = qRgba(g, g, g, qAlpha(pScanLine[x])); } } return result; }
void QAhiScreen::blit(const QImage &image, const QPoint &topLeft, const QRegion ®) { AhiPixelFormat_t pixFmt = pixelFormatForImageFormat(image.format()); if (pixFmt >= AhiPixelFormatMax) { // generic fallback QImage::Format toFormat = pixelFormat(); if (toFormat == QImage::Format_Invalid) toFormat = QImage::Format_ARGB32; blit(image.convertToFormat(toFormat), topLeft, reg); return; } AhiSts_t status; status = AhiDrawSurfDstSet(d_ptr->context, d_ptr->surface, 0); if (status != AhiStsOk) { qWarning("QAhiScreen::blit(): AhiDrawSurfDstSet failed: %x", status); return; } const QVector<QRect> rects = (reg & region()).rects(); const int numRects = rects.size(); QVarLengthArray<AhiPoint_t, 8> src(numRects); QVarLengthArray<AhiRect_t, 8> dest(numRects); for (int i = 0; i < numRects; ++i) { const QRect rect = rects.at(i); src[i].x = rect.x() - topLeft.x(); src[i].y = rect.y() - topLeft.y(); dest[i].left = rect.left(); dest[i].top = rect.top(); dest[i].right = rect.x() + rect.width(); dest[i].bottom = rect.y() + rect.height(); } AhiSize_t bitmapSize = { image.width(), image.height() }; AhiBitmap_t bitmap = { bitmapSize, (void*)(image.bits()), image.bytesPerLine(), pixFmt }; status = AhiDrawRopSet(d_ptr->context, AHIMAKEROP3(AHIROPSRCCOPY)); if (status != AhiStsOk) { qWarning("QAhiScreen::blit(): AhiDrawRopSet failed: %x", status); return; } for (int i = 0; i < numRects; ++i) { status = AhiDrawBitmapBlt(d_ptr->context, &dest[i], &src[i], &bitmap, 0, 0); if (status != AhiStsOk) { qWarning("QAhiScreen::blit(): AhiDrawBitmapBlt failed: %x", status); break; } } }