Exemple #1
0
/// Returns a texture version of an image file
gpu::TexturePointer TextureCache::getImageTexture(const QString& path) {
    QImage image = QImage(path).mirrored(false, true);
    gpu::Element formatGPU = gpu::Element(gpu::VEC3, gpu::UINT8, gpu::RGB);
    gpu::Element formatMip = gpu::Element(gpu::VEC3, gpu::UINT8, gpu::RGB);
    if (image.hasAlphaChannel()) {
        formatGPU = gpu::Element(gpu::VEC4, gpu::UINT8, gpu::RGBA);
        formatMip = gpu::Element(gpu::VEC4, gpu::UINT8, gpu::BGRA);
    }
    gpu::TexturePointer texture = gpu::TexturePointer(
        gpu::Texture::create2D(formatGPU, image.width(), image.height(), 
            gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
    texture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
    texture->autoGenerateMips(-1);
    return texture;
}
Exemple #2
0
color_image_t* SimilarityContent::makeGistImage(const VImage* vim) {
    // Convert to a color_image_t
    QImage small = vim->getQImage()->scaled(GIST_SIZE, GIST_SIZE);
    const uchar* data = small.constBits();

    int width = small.width(), height = small.height();
    int area = width*height;
    color_image_t *im = color_image_new(width, height);
    for(int i=0, i3=0; i<area; ++i, i3+=3) {
        im->c1[i]=data[i3+0];
        im->c2[i]=data[i3+1];
        im->c3[i]=data[i3+2];
    }

    return im;
}
Exemple #3
0
static bool isEmpty(const QImage &image)
{
    if (image.format() == QImage::Format_RGB32)
        return false;

    Q_ASSERT(image.format() == QImage::Format_ARGB32 ||
             image.format() == QImage::Format_ARGB32_Premultiplied);

    const QRgb *rgb = reinterpret_cast<const QRgb*>(image.constBits());
    const QRgb * const last = rgb + image.byteCount() / 4;

    for (; rgb != last; ++rgb)
        if (qAlpha(*rgb) > 0)
            return false;

    return true;
}
SkBitmap Clipboard::ReadImage(ClipboardType type) const
{
    // FIXME: Untested, pasting image data seems to only be supported through
    // FileReader.readAsDataURL in JavaScript and this isn't working down the pipe for some reason.
    const QMimeData *mimeData = QGuiApplication::clipboard()->mimeData(type == CLIPBOARD_TYPE_COPY_PASTE ? QClipboard::Clipboard : QClipboard::Selection);
    QImage image = qvariant_cast<QImage>(mimeData->imageData());

    Q_ASSERT(image.format() == QImage::Format_ARGB32);
    SkBitmap bitmap;
    bitmap.setConfig(SkBitmap::kARGB_8888_Config, image.width(), image.height());
    bitmap.setPixels(const_cast<uchar*>(image.constBits()));

    // Return a deep copy of the pixel data.
    SkBitmap copy;
    bitmap.copyTo(&copy, SkBitmap::kARGB_8888_Config);
    return copy;
}
Exemple #5
0
void QFrameConverter::timerEvent(QTimerEvent * ev)
{
    if (ev->timerId() != timer.timerId())
    {
        QObject::timerEvent(ev);
    }
    else
    {
        if (!pauseProcessing)
        {
            frameSource->getFrame(frame);

            if (!frame.empty())
            {
                QImage::Format format(QImage::Format_RGB888);
                switch (frame.channels())
                {
                case 1:
                    format = QImage::Format_Grayscale8;
                    break;
                case 3:
                    format = QImage::Format_RGB888;
                    break;
                default:
                    Q_ASSERT(false);
                }

                const QImage image(frame.data, frame.cols, frame.rows, static_cast<int>(frame.step), format);

                Q_ASSERT(image.constBits() == frame.data);

                emit imageReady(image);
            }
        }
    }

    if (stopTimer)
    {
        timer.stop();
        while(timer.isActive())
        {
            QThread::sleep(10);
        }
    }
}
Exemple #6
0
void ImageUtility::getColumn(const QImage &image, uchar *buf,
				    int x, int y, int height)
{
	assert(buf != 0);
	assert(x >= 0 && x < image.width());
	assert(y >= 0 && y + height <= image.height());
	assert(height > 0);

	int bytes = image.depth() / 8;
	const uchar *column = image.constBits() + bytes * (image.width() * y  + x);
	int offset = bytes * image.width();

	for (int i = 0; i != height; ++i) {
		memcpy(buf, column, bytes);
		column += offset;
		buf += bytes;
	}
}
void FaceDetector::process(cv::Mat frame)
{
    cv::Mat grey_image;
    cv::cvtColor(frame, grey_image, CV_BGR2GRAY);
    cv::equalizeHist(grey_image, grey_image);

    std::vector<cv::Rect> faces;
    // Calculate the camera size and set the size to 1/8 of screen height
    faceCascade.detectMultiScale(grey_image, faces, 1.1, 2,  0|CV_HAAR_SCALE_IMAGE,
                                 cv::Size(frame.cols/4, frame.rows/4)); // Minimum size of obj
    //-- Draw rectangles around faces
    for( size_t i = 0; i < faces.size(); i++)
    {
        cv::rectangle(frame, faces[i], cv::Scalar( 255, 0, 255 ));
        /*
        cv::Point center( faces[i].x + faces[i].width*0.5,
                  faces[i].y + faces[i].height*0.5);

        ellipse( frame, center,
             cv::Size( faces[i].width*0.5, faces[i].height*0.5 ),
             0, 0, 360, cv::Scalar( 255, 0, 255 ), 4, 8, 0);

        cv::Mat faceROI = frameGray( faces[i] );
        std::vector<cv::Rect> eyes;

        //-- In each face, detect eyes
        eyeCascade.detectMultiScale( faceROI, eyes, 1.1, 2, 0|CV_HAAR_SCALE_IMAGE, cv::Size(30, 30) );

        for( size_t j = 0; j < eyes.size(); j++)
        {
            cv::Point center( faces[i].x + eyes[j].x + eyes[j].width*0.5,
                      faces[i].y + eyes[j].y + eyes[j].height*0.5 );
            int radius = cvRound( (eyes[j].width + eyes[j].height) *0.25);
            circle( frame, center, radius, cv::Scalar( 255, 0, 0 ), 4, 8, 0);
        }
        */

    }
    const QImage image((const unsigned char*)frame.data, frame.cols, frame.rows, frame.step,
                       QImage::Format_RGB888, &matDeleter, new cv::Mat(frame));
    image.rgbSwapped();
    Q_ASSERT(image.constBits() == frame.data);
    emit image_signal(image);
}
Exemple #8
0
gpu::Texture* TextureUsage::createNormalTextureFromNormalImage(const QImage& srcImage, const std::string& srcImageName) {
    QImage image = srcImage;

    if (image.format() != QImage::Format_RGB888) {
        image = image.convertToFormat(QImage::Format_RGB888);
    }

    gpu::Texture* theTexture = nullptr;
    if ((image.width() > 0) && (image.height() > 0)) {
        
        gpu::Element formatGPU = gpu::Element(gpu::VEC3, gpu::NUINT8, gpu::RGB);
        gpu::Element formatMip = gpu::Element(gpu::VEC3, gpu::NUINT8, gpu::RGB);

        theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
        theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
        generateMips(theTexture, image, formatMip);
    }

    return theTexture;
}
bool QEglFSVBPageFlipper::displayBuffer(QPlatformScreenBuffer *buffer)
{
    QImage *frame = static_cast<QImage *>(buffer->handle());

#ifdef QT_EGLFSVB_ENABLE_ROTATION
    QImage rotatedFrame;
    if (!m_transform.isIdentity()) {
        rotatedFrame = frame->transformed(m_transform);
        frame = &rotatedFrame;
    }
#endif // QT_EGLFSVB_ENABLE_ROTATION

    QRect geometry = m_screen->geometry();
    geometry.setTopLeft(QPoint());
    int area = geometry.width() * geometry.height() * 4;
    quint8 *mapped = reinterpret_cast<quint8 *>(mmap(NULL, area, PROT_WRITE, MAP_SHARED, fd, 0));
    if (mapped == MAP_FAILED) {
        qWarning("Unable to map fbdev.\n");
        return false;
    }

    buffer->aboutToBeDisplayed();
    if (frame->width() * 4 != frame->bytesPerLine() || frame->rect() != geometry) {
        int stride = qMin(frame->bytesPerLine(), geometry.width() * 4);
        int height = qMin(frame->height(), geometry.height());
        for (int i = 0; i < height; ++i)
            memcpy(mapped + (stride * i), frame->scanLine(i), stride);
    } else {
        memcpy(mapped, frame->constBits(), area);
    }

    munmap(mapped, area);

    buffer->displayed();

    if (m_buffer)
        m_buffer->release();
    m_buffer = buffer;

    return true;
}
Exemple #10
0
    jobject createBitmap(QImage img, JNIEnv *env)
    {
        if (img.format() != QImage::Format_ARGB32 && img.format() != QImage::Format_RGB16)
            img = img.convertToFormat(QImage::Format_ARGB32);

        jobject bitmap = env->CallStaticObjectMethod(m_bitmapClass,
                                                     m_createBitmapMethodID,
                                                     img.width(),
                                                     img.height(),
                                                     img.format() == QImage::Format_ARGB32
                                                        ? m_ARGB_8888_BitmapConfigValue
                                                        : m_RGB_565_BitmapConfigValue);
        if (!bitmap)
            return 0;

        AndroidBitmapInfo info;
        if (AndroidBitmap_getInfo(env, bitmap, &info) < 0) {
            env->DeleteLocalRef(bitmap);
            return 0;
        }

        void *pixels;
        if (AndroidBitmap_lockPixels(env, bitmap, &pixels) < 0) {
            env->DeleteLocalRef(bitmap);
            return 0;
        }

        if (info.stride == uint(img.bytesPerLine())
                && info.width == uint(img.width())
                && info.height == uint(img.height())) {
            memcpy(pixels, img.constBits(), info.stride * info.height);
        } else {
            uchar *bmpPtr = static_cast<uchar *>(pixels);
            const unsigned width = qMin(info.width, (uint)img.width()); //should be the same
            const unsigned height = qMin(info.height, (uint)img.height()); //should be the same
            for (unsigned y = 0; y < height; y++, bmpPtr += info.stride)
                memcpy(bmpPtr, img.constScanLine(y), width);
        }
        AndroidBitmap_unlockPixels(env, bitmap);
        return bitmap;
    }
Ogre::TexturePtr textureFromImage(const QImage &image,
                                  const std::string &name) {
  ROS_INFO("Loading a %i x %i texture", image.width(), image.height());
  //  convert to 24bit rgb
  QImage converted = image.convertToFormat(QImage::Format_RGB888).mirrored();

  //  create texture
  Ogre::TexturePtr texture;
  Ogre::DataStreamPtr data_stream;
  data_stream.bind(new Ogre::MemoryDataStream((void *)converted.constBits(),
                                              converted.byteCount()));

  const Ogre::String res_group =
      Ogre::ResourceGroupManager::DEFAULT_RESOURCE_GROUP_NAME;
  Ogre::TextureManager &texture_manager = Ogre::TextureManager::getSingleton();
  //  swap byte order when going from QImage to Ogre
  texture = texture_manager.loadRawData(name, res_group, data_stream,
                                        converted.width(), converted.height(),
                                        Ogre::PF_B8G8R8, Ogre::TEX_TYPE_2D, 0);
  return texture;
}
Exemple #12
0
static bool isEmpty(const QImage &image)
{
    if (image.format() == QImage::Format_RGB32)
        return false;

    Q_ASSERT(image.format() == QImage::Format_ARGB32 ||
             image.format() == QImage::Format_ARGB32_Premultiplied);

    const QRgb *rgb = reinterpret_cast<const QRgb*>(image.constBits());
#if QT_VERSION < QT_VERSION_CHECK(5, 10, 0)
    const QRgb * const last = rgb + image.byteCount() / 4;
#else
    const QRgb * const last = rgb + image.sizeInBytes() / 4;
#endif

    for (; rgb != last; ++rgb)
        if (qAlpha(*rgb) > 0)
            return false;

    return true;
}
Exemple #13
0
void getGrayscaleImageMinMax(const QImage image, double *pmin, double *pmax, bool rescaleRange)
{
    double min = 0;
    double max = 1;

    if (image.format() != QImage::Format_Indexed8) {
        LWARNINGF("Image not grayscale; cannot compute min/max; returning default;");
    } else {
        const cv::Mat tmp(image.height(), image.width(), CV_8UC1, const_cast<uchar*>(image.constBits()), image.bytesPerLine());
        cv::minMaxLoc(tmp, &min, &max);
        if (rescaleRange) {
            min /= 255.;
            max /= 255.;
        }
    }

    if (pmin != 0) {
        *pmin = min;
    }
    if (pmax != 0) {
        *pmax = max;
    }
}
Exemple #14
0
const QImage TextureUsage::process2DImageColor(const QImage& srcImage, bool& validAlpha, bool& alphaAsMask) {
    QImage image = srcImage;
    validAlpha = false;
    alphaAsMask = true;
    const uint8 OPAQUE_ALPHA = 255;
    const uint8 TRANSPARENT_ALPHA = 0;
    if (image.hasAlphaChannel()) {
        if (image.format() != QImage::Format_ARGB32) {
            image = image.convertToFormat(QImage::Format_ARGB32);
        }

        // Figure out if we can use a mask for alpha or not
        int numOpaques = 0;
        int numTranslucents = 0;
        const int NUM_PIXELS = image.width() * image.height();
        const int MAX_TRANSLUCENT_PIXELS_FOR_ALPHAMASK = (int)(0.05f * (float)(NUM_PIXELS));
        const QRgb* data = reinterpret_cast<const QRgb*>(image.constBits());
        for (int i = 0; i < NUM_PIXELS; ++i) {
            auto alpha = qAlpha(data[i]);
            if (alpha == OPAQUE_ALPHA) {
                numOpaques++;
            } else if (alpha != TRANSPARENT_ALPHA) {
                if (++numTranslucents > MAX_TRANSLUCENT_PIXELS_FOR_ALPHAMASK) {
                    alphaAsMask = false;
                    break;
                }
            }
        }
        validAlpha = (numOpaques != NUM_PIXELS);
    }

    if (!validAlpha && image.format() != QImage::Format_RGB888) {
        image = image.convertToFormat(QImage::Format_RGB888);
    }

    return image;
}
PremultipliedImage decodeImage(const std::string& string) {
    const uint8_t* data = reinterpret_cast<const uint8_t*>(string.data());
    const size_t size = string.size();

#if !defined(QT_IMAGE_DECODERS)
    if (size >= 12) {
        uint32_t riff_magic = (data[0] << 24) | (data[1] << 16) | (data[2] << 8) | data[3];
        uint32_t webp_magic = (data[8] << 24) | (data[9] << 16) | (data[10] << 8) | data[11];
        if (riff_magic == 0x52494646 && webp_magic == 0x57454250) {
            return decodeWebP(data, size);
        }
    }

    if (size >= 2) {
        uint16_t magic = ((data[0] << 8) | data[1]) & 0xffff;
        if (magic == 0xFFD8) {
            return decodeJPEG(data, size);
        }
    }
#endif

    QImage image =
        QImage::fromData(data, size)
        .rgbSwapped()
        .convertToFormat(QImage::Format_ARGB32_Premultiplied);

    if (image.isNull()) {
        throw std::runtime_error("Unsupported image type");
    }

    auto img = std::make_unique<uint8_t[]>(image.byteCount());
    memcpy(img.get(), image.constBits(), image.byteCount());

    return { { static_cast<uint32_t>(image.width()), static_cast<uint32_t>(image.height()) },
             std::move(img) };
}
Exemple #16
0
gpu::Texture* TextureUsage::createRoughnessTextureFromGlossImage(const QImage& srcImage, const std::string& srcImageName) {
    QImage image = srcImage;
    if (!image.hasAlphaChannel()) {
        if (image.format() != QImage::Format_RGB888) {
            image = image.convertToFormat(QImage::Format_RGB888);
        }
    } else {
        if (image.format() != QImage::Format_ARGB32) {
            image = image.convertToFormat(QImage::Format_ARGB32);
        }
    }

    // Gloss turned into Rough
    image.invertPixels(QImage::InvertRgba);
    
    image = image.convertToFormat(QImage::Format_Grayscale8);
    
    gpu::Texture* theTexture = nullptr;
    if ((image.width() > 0) && (image.height() > 0)) {
        
#ifdef COMPRESS_TEXTURES
        gpu::Element formatGPU = gpu::Element(gpu::SCALAR, gpu::NUINT8, gpu::COMPRESSED_R);
#else
        gpu::Element formatGPU = gpu::Element(gpu::SCALAR, gpu::NUINT8, gpu::RGB);
#endif
        gpu::Element formatMip = gpu::Element(gpu::SCALAR, gpu::NUINT8, gpu::RGB);

        theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
        theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
        generateMips(theTexture, image, formatMip);
        
        // FIXME queue for transfer to GPU and block on completion
    }
    
    return theTexture;
}
Exemple #17
0
gpu::Texture* TextureUsage::create2DTextureFromImage(const QImage& srcImage, const std::string& srcImageName) {
    QImage image = srcImage;
 
    int imageArea = image.width() * image.height();
    
    int opaquePixels = 0;
    int translucentPixels = 0;
    //bool isTransparent = false;
    int redTotal = 0, greenTotal = 0, blueTotal = 0, alphaTotal = 0;
    const int EIGHT_BIT_MAXIMUM = 255;
    QColor averageColor(EIGHT_BIT_MAXIMUM, EIGHT_BIT_MAXIMUM, EIGHT_BIT_MAXIMUM);
    
    if (!image.hasAlphaChannel()) {
        if (image.format() != QImage::Format_RGB888) {
            image = image.convertToFormat(QImage::Format_RGB888);
        }
        // int redTotal = 0, greenTotal = 0, blueTotal = 0;
        for (int y = 0; y < image.height(); y++) {
            for (int x = 0; x < image.width(); x++) {
                QRgb rgb = image.pixel(x, y);
                redTotal += qRed(rgb);
                greenTotal += qGreen(rgb);
                blueTotal += qBlue(rgb);
            }
        }
        if (imageArea > 0) {
            averageColor.setRgb(redTotal / imageArea, greenTotal / imageArea, blueTotal / imageArea);
        }
    } else {
        if (image.format() != QImage::Format_ARGB32) {
            image = image.convertToFormat(QImage::Format_ARGB32);
        }
        
        // check for translucency/false transparency
        // int opaquePixels = 0;
        // int translucentPixels = 0;
        // int redTotal = 0, greenTotal = 0, blueTotal = 0, alphaTotal = 0;
        for (int y = 0; y < image.height(); y++) {
            for (int x = 0; x < image.width(); x++) {
                QRgb rgb = image.pixel(x, y);
                redTotal += qRed(rgb);
                greenTotal += qGreen(rgb);
                blueTotal += qBlue(rgb);
                int alpha = qAlpha(rgb);
                alphaTotal += alpha;
                if (alpha == EIGHT_BIT_MAXIMUM) {
                    opaquePixels++;
                } else if (alpha != 0) {
                    translucentPixels++;
                }
            }
        }
        if (opaquePixels == imageArea) {
            qCDebug(modelLog) << "Image with alpha channel is completely opaque:" << QString(srcImageName.c_str());
            image = image.convertToFormat(QImage::Format_RGB888);
        }
        
        averageColor = QColor(redTotal / imageArea,
                              greenTotal / imageArea, blueTotal / imageArea, alphaTotal / imageArea);
        
        //isTransparent = (translucentPixels >= imageArea / 2);
    }
    
    gpu::Texture* theTexture = nullptr;
    if ((image.width() > 0) && (image.height() > 0)) {
        
        // bool isLinearRGB = true; //(_type == NORMAL_TEXTURE) || (_type == EMISSIVE_TEXTURE);
        bool isLinearRGB = true; //(_type == NORMAL_TEXTURE) || (_type == EMISSIVE_TEXTURE);
        
        gpu::Element formatGPU = gpu::Element(gpu::VEC3, gpu::UINT8, (isLinearRGB ? gpu::RGB : gpu::SRGB));
        gpu::Element formatMip = gpu::Element(gpu::VEC3, gpu::UINT8, (isLinearRGB ? gpu::RGB : gpu::SRGB));
        if (image.hasAlphaChannel()) {
            formatGPU = gpu::Element(gpu::VEC4, gpu::UINT8, (isLinearRGB ? gpu::RGBA : gpu::SRGBA));
            formatMip = gpu::Element(gpu::VEC4, gpu::UINT8, (isLinearRGB ? gpu::BGRA : gpu::SBGRA));
        }
        

            theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
            theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
            theTexture->autoGenerateMips(-1);
    }
    
    return theTexture;
}
Exemple #18
0
const unsigned char *imageConstBits(QImage_ *image)
{
    QImage *qimage = reinterpret_cast<QImage *>(image);
    return qimage->constBits();
}
Exemple #19
0
QImage MorphologyEffect::processImage(const QImage &image, const KoFilterEffectRenderContext &context) const
{
    QImage result = image;

    QPointF radius = context.toUserSpace(m_radius);

    const int rx = static_cast<int>(ceil(radius.x()));
    const int ry = static_cast<int>(ceil(radius.y()));

    const int w = result.width();
    const int h = result.height();

    // setup mask
    const int maskSize = (1+2*rx)*(1+2*ry);
    int * mask = new int[maskSize];
    int index = 0;
    for (int y = -ry; y <= ry; ++y) {
        for (int x = -rx; x <= rx; ++x) {
            mask[index] = y*w+x;
            index++;
        }
    }

    int dstPixel, srcPixel;
    uchar s0, s1, s2, s3;
#if QT_VERSION >= 0x040700
    const uchar * src = image.constBits();
#else
    const uchar * src = image.bits();
#endif
    uchar * dst = result.bits();

    const QRect roi = context.filterRegion().toRect();
    const int minX = qMax(rx, roi.left());
    const int maxX = qMin(w-rx, roi.right());
    const int minY = qMax(ry, roi.top());
    const int maxY = qMin(h-ry, roi.bottom());
    const int defValue = m_operator == Erode ? 255 : 0;

    uchar * d = 0;

    for (int row = minY; row < maxY; ++row) {
        for (int col = minX; col < maxX; ++col) {
            dstPixel = row * w + col;
            s0 = s1 = s2 = s3 = defValue;
            for (int i = 0; i < maskSize; ++i) {
                srcPixel = dstPixel+mask[i];
                const uchar *s = &src[4*srcPixel];
                if (m_operator == Erode ) {
                    s0 = qMin(s0, s[0]);
                    s1 = qMin(s1, s[1]);
                    s2 = qMin(s2, s[2]);
                    s3 = qMin(s3, s[3]);
                } else {
                    s0 = qMax(s0, s[0]);
                    s1 = qMax(s1, s[1]);
                    s2 = qMax(s2, s[2]);
                    s3 = qMax(s3, s[3]);
                }
            }
            d = &dst[4*dstPixel];
            d[0] = s0;
            d[1] = s1;
            d[2] = s2;
            d[3] = s3;
        }
    }

    delete [] mask;

    return result;
}
Exemple #20
0
void QGLPixmapData::ensureCreated() const
{
    if (!m_dirty)
        return;

    m_dirty = false;

    if (nativeImageHandleProvider && !nativeImageHandle)
        const_cast<QGLPixmapData *>(this)->createFromNativeImageHandleProvider();

    QGLShareContextScope ctx(qt_gl_share_widget()->context());
    m_ctx = ctx;

    const GLenum internal_format = m_hasAlpha ? GL_RGBA : GL_RGB;
#ifdef QT_OPENGL_ES_2
    const GLenum external_format = internal_format;
#else
    const GLenum external_format = qt_gl_preferredTextureFormat();
#endif
    const GLenum target = GL_TEXTURE_2D;

    GLenum type = GL_UNSIGNED_BYTE;
    // Avoid conversion when pixmap is created from CFbsBitmap of EColor64K.
    if (!m_source.isNull() && m_source.format() == QImage::Format_RGB16)
        type = GL_UNSIGNED_SHORT_5_6_5;

    m_texture.options &= ~QGLContext::MemoryManagedBindOption;

    if (!m_texture.id) {
        m_texture.id = QGLTexturePool::instance()->createTextureForPixmap(
                                                                target,
                                                                0, internal_format,
                                                                w, h,
                                                                external_format,
                                                                type,
                                                                const_cast<QGLPixmapData*>(this));
        if (!m_texture.id) {
            failedToAlloc = true;
            return;
        }
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);

        inTexturePool = true;
    } else if (inTexturePool) {
        glBindTexture(target, m_texture.id);
        QGLTexturePool::instance()->useTexture(const_cast<QGLPixmapData*>(this));
    }

    if (!m_source.isNull() && m_texture.id) {
        if (external_format == GL_RGB) {
            m_source.beginDataAccess();
            QImage tx;
            if (type == GL_UNSIGNED_BYTE)
                tx = m_source.imageRef().convertToFormat(QImage::Format_RGB888).mirrored(false, true);
            else if (type == GL_UNSIGNED_SHORT_5_6_5)
                tx = m_source.imageRef().mirrored(false, true);
            m_source.endDataAccess(true);

            glBindTexture(target, m_texture.id);
            if (!tx.isNull())
                glTexSubImage2D(target, 0, 0, 0, w, h, external_format,
                                type, tx.constBits());
            else
                qWarning("QGLPixmapData: Failed to create GL_RGB image of size %dx%d", w, h);
        } else {
            // do byte swizzling ARGB -> RGBA
            m_source.beginDataAccess();
            const QImage tx = ctx->d_func()->convertToGLFormat(m_source.imageRef(), true, external_format);
            m_source.endDataAccess(true);
            glBindTexture(target, m_texture.id);
            if (!tx.isNull())
                glTexSubImage2D(target, 0, 0, 0, w, h, external_format,
                                type, tx.constBits());
            else
                qWarning("QGLPixmapData: Failed to create GL_RGBA image of size %dx%d", w, h);
        }

        if (useFramebufferObjects())
            m_source = QVolatileImage();
    }
}
Exemple #21
0
	// copied from openframeworks superfast blur and modified
	void applyTo(QImage &mask, const QColor &color, int radius) {
		if (radius < 1 || mask.isNull())
			return;
		setSize(mask.size());
		setRadius(radius);
		const int w = s.width();
		const int h = s.height();

		uchar *a = valpha.data();
		const uchar *inv = vinv.constData();
		int *min = vmin.data();
		int *max = vmax.data();

		const int xmax = mask.width()-1;
		for (int x=0; x<w; ++x) {
			min[x] = qMin(x + radius + 1, xmax);
			max[x] = qMax(x - radius, 0);
		}

		const uchar *c_bits = mask.constBits()+3;
		uchar *it = a;
		for (int y=0; y<h; ++y, c_bits += (mask.width() << 2)) {
			int sum = 0;
			for(int i=-radius; i<=radius; ++i)
				sum += c_bits[qBound(0, i, xmax) << 2];
			for (int x=0; x<w; ++x, ++it) {
				sum += c_bits[min[x] << 2];
				sum -= c_bits[max[x] << 2];
				*it = inv[sum];
			}
		}

		const int ymax = mask.height()-1;
		for (int y=0; y<h; ++y){
			min[y] = qMin(y + radius + 1, ymax)*w;
			max[y] = qMax(y - radius, 0)*w;
		}

		uchar *bits = mask.bits();
		const double coef = color.alphaF();
		const double r = color.redF()*coef;
		const double g = color.greenF()*coef;
		const double b = color.blueF()*coef;
		const uchar *c_it = a;
		for (int x=0; x<w; ++x, ++c_it){
			int sum = 0;
			int yp = -radius*w;
			for(int i=-radius; i<=radius; ++i, yp += w)
				sum += c_it[qMax(0, yp)];
			uchar *p = bits + (x << 2);
			for (int y=0; y<h; ++y, p += (xmax << 2)){
				const uchar a = inv[sum];
				if (p[3] < 255) {
					*p++ = a*b;
					*p++ = a*g;
					*p++ = a*r;
					*p++ = a*coef;
				} else {
					p += 4;
				}
				sum += c_it[min[y]];
				sum -= c_it[max[y]];
			}
		}
	}
Exemple #22
0
// http://qtnode.net/wiki?title=Qt_with_cmake
int main(int argc, char *argv[])
{
	QApplication app(argc, argv);
	
	// Create the world and the viewer
	bool igt(app.arguments().size() > 1);
	QImage gt;
	if (igt)
		gt = QGLWidget::convertToGLFormat(QImage(app.arguments().last()));
	igt = !gt.isNull();
	#if QT_VERSION >= QT_VERSION_CHECK(4,7,0)
	World world(120, Color(0.9, 0.9, 0.9), igt ? World::GroundTexture(gt.width(), gt.height(), (const uint32_t*)gt.constBits()) : World::GroundTexture());
	#else
	World world(120, Color(0.9, 0.9, 0.9), igt ? World::GroundTexture(gt.width(), gt.height(), (uint32_t*)gt.bits()) : World::GroundTexture());
	#endif
	EnkiPlayground viewer(&world);
	
	viewer.show();
	
	return app.exec();
}
Exemple #23
0
void BillboardOverlay::render() {
    if (!_visible || !_isLoaded) {
        return;
    }
    
    if (!_billboard.isEmpty()) {
        if (_newTextureNeeded && _billboardTexture) {
            _billboardTexture.reset();
        }
        if (!_billboardTexture) {
            QImage image = QImage::fromData(_billboard);
            if (image.format() != QImage::Format_ARGB32) {
                image = image.convertToFormat(QImage::Format_ARGB32);
            }
            _size = image.size();
            if (_fromImage.x() == -1) {
                _fromImage.setRect(0, 0, _size.width(), _size.height());
            }
            _billboardTexture.reset(new Texture());
            _newTextureNeeded = false;
            glBindTexture(GL_TEXTURE_2D, _billboardTexture->getID());
            glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, _size.width(), _size.height(), 0,
                         GL_BGRA, GL_UNSIGNED_BYTE, image.constBits());
            glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
            
        } else {
            glBindTexture(GL_TEXTURE_2D, _billboardTexture->getID());
        }
    }
    
    glEnable(GL_ALPHA_TEST);
    glAlphaFunc(GL_GREATER, 0.5f);
    
    glEnable(GL_TEXTURE_2D);
    glDisable(GL_LIGHTING);
    
    glPushMatrix(); {
        glTranslatef(_position.x, _position.y, _position.z);
        glm::quat rotation;
        if (_isFacingAvatar) {
            // rotate about vertical to face the camera
            rotation = Application::getInstance()->getCamera()->getRotation();
            rotation *= glm::angleAxis(glm::pi<float>(), glm::vec3(0.0f, 1.0f, 0.0f));
        } else {
            rotation = getRotation();
        }
        glm::vec3 axis = glm::axis(rotation);
        glRotatef(glm::degrees(glm::angle(rotation)), axis.x, axis.y, axis.z);
        glScalef(_scale, _scale, _scale);
        
        if (_billboardTexture) {
            float maxSize = glm::max(_fromImage.width(), _fromImage.height());
            float x = _fromImage.width() / (2.0f * maxSize);
            float y = -_fromImage.height() / (2.0f * maxSize);
            
            const float MAX_COLOR = 255.0f;
            xColor color = getColor();
            float alpha = getAlpha();
            glColor4f(color.red / MAX_COLOR, color.green / MAX_COLOR, color.blue / MAX_COLOR, alpha);
            glBegin(GL_QUADS); {
                glTexCoord2f((float)_fromImage.x() / (float)_size.width(),
                             (float)_fromImage.y() / (float)_size.height());

                glVertex2f(-x, -y);
                glTexCoord2f(((float)_fromImage.x() + (float)_fromImage.width()) / (float)_size.width(),
                             (float)_fromImage.y() / (float)_size.height());
                glVertex2f(x, -y);
                glTexCoord2f(((float)_fromImage.x() + (float)_fromImage.width()) / (float)_size.width(),
                             ((float)_fromImage.y() + (float)_fromImage.height()) / _size.height());
                glVertex2f(x, y);
                glTexCoord2f((float)_fromImage.x() / (float)_size.width(),
                             ((float)_fromImage.y() + (float)_fromImage.height()) / (float)_size.height());
                glVertex2f(-x, y);
            } glEnd();
        }
    } glPopMatrix();
    
    glDisable(GL_TEXTURE_2D);
    glEnable(GL_LIGHTING);
    glDisable(GL_ALPHA_TEST);
    
    glBindTexture(GL_TEXTURE_2D, 0);
}
void ImageComparator::Compare( QString const&filepath, QImage const &img )
{
  QSettings sett(SettingsValues::filename(),QSettings::IniFormat);
  sett.beginGroup(SettingsValues::comparatorGroupName());
  QString goldenPath = sett.value(SettingsValues::goldenPathValueName(), SettingsValues::goldenPathDefault()).toString() + "/";
  QString diffPath =  sett.value(SettingsValues::diffPathValueName(), SettingsValues::diffPathDefault()).toString() + "/";  
  sett.endGroup();
  QFileInfo info(filepath);
  QString const samplePath = goldenPath + info.fileName();
  if(!QFile::exists(samplePath))
  {
    QMessageBox::critical(NULL, "Error!", QString("Golden image \"%1\" does not exist!").arg(samplePath));
    return;
  }
  QImage sample(samplePath);
  sample = sample.convertToFormat(QImage::Format_ARGB32_Premultiplied);
  if(sample.isNull())
  {
    QMessageBox::critical(NULL, "Error!", QString("Could not open file \"%1\"!").arg(samplePath));
    return;
  }
  if(sample.size() != img.size())
  {
    QMessageBox::critical(NULL, "Error!", QString("Sample and current images have different sizes!"));
    return;
  }
  unsigned long long accum = 0;
  int sx = sample.width();
  int sy = sample.height();
  int bpl = sample.bytesPerLine();
  QImage diffImg(sample.size(), QImage::Format_ARGB32);
  for (int y = 0; y < sy; ++y) {
    for (int x = 0; x < sx; ++x) 
    {
      for (int c = 0; c < 3; ++c) 
      {
        unsigned idx = y * bpl + (x << 2) + c;
        uchar diff = abs((int)sample.constBits()[idx] - (int)img.constBits()[idx]);
        diffImg.bits()[idx] = diff;
        accum += diff;
      }
      diffImg.bits()[y * bpl + (x << 2) + 3] = sample.bits()[y * bpl + (x << 2) + 3];
    }
  }
  QString const diffName = diffPath + info.fileName();
  QDir diffDir(diffPath);
  if(!diffDir.exists(diffPath))
  {
    if(!QDir(diffPath + "/..").mkpath(diffDir.dirName()))
    {
      QMessageBox::critical(NULL, "Error!", QString("Could not create diff folder \"%1\"!").arg(diffPath));
      return;
    }
  }
  if(!diffImg.save(diffName, info.suffix().toAscii().data()))
  {
    QMessageBox::critical(NULL, "Error!", QString("Could not save the difference image \"%1\"!").arg(diffName));
    return;
  }
  double diff = ((double)accum / (img.size().width() * img.size().height() * 3) * 100.0 / 255.0);

  QDialog dlg;
  Ui::ImgDifferenceDialog ui;
  ui.setupUi(&dlg);
  dlg.setModal(true);
  dlg.setSizeGripEnabled(false);
  dlg.layout()->setSizeConstraint( QLayout::SetFixedSize );
  ui.label->setText(QString("The difference is: %1").arg(diff, 0, 'f', 2));
  QPixmap pxmp = QPixmap::fromImage(diffImg);
  QRect r = ui.frame->frameRect();
  if(r.width() < pxmp.width())
    pxmp = pxmp.scaledToWidth(r.width(), Qt::SmoothTransformation);
  if(r.height() < pxmp.height())
    pxmp = pxmp.scaledToHeight(r.height(), Qt::SmoothTransformation);
  //ui.differenceLabel->resize(pxmp.size());
  ui.differenceLabel->setPixmap(pxmp);
  
  dlg.exec();

}
Exemple #25
0
void Head::init() {
    if (_irisProgram == 0) {
        switchToResourcesParentIfRequired();
        _irisProgram = new ProgramObject();
        _irisProgram->addShaderFromSourceFile(QGLShader::Vertex, "resources/shaders/iris.vert");
        _irisProgram->addShaderFromSourceFile(QGLShader::Fragment, "resources/shaders/iris.frag");
        _irisProgram->link();
    
        _irisProgram->setUniformValue("texture", 0);
        _eyePositionLocation = _irisProgram->uniformLocation("eyePosition");
        
        QImage image = QImage(IRIS_TEXTURE_FILENAME).convertToFormat(QImage::Format_ARGB32);
        
        glGenTextures(1, &_irisTextureID);
        glBindTexture(GL_TEXTURE_2D, _irisTextureID);
        glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, image.width(), image.height(), 1, GL_BGRA, GL_UNSIGNED_BYTE, image.constBits());
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_BORDER);
        glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_BORDER);
        glBindTexture(GL_TEXTURE_2D, 0);
    }
}
Exemple #26
0
VideoFrame::VideoFrame(const QImage& image)
    : Frame(new VideoFramePrivate(image.width(), image.height(), VideoFormat(image.format())))
{
    setBits((uchar*)image.constBits(), 0);
    setBytesPerLine(image.bytesPerLine(), 0);
}
Exemple #27
0
gpu::Texture* TextureUsage::createNormalTextureFromBumpImage(const QImage& srcImage, const std::string& srcImageName) {
    QImage image = srcImage;
    

    #if 0
    // PR 5540 by AlessandroSigna
    // integrated here as a specialized TextureLoader for bumpmaps
    // The conversion is done using the Sobel Filter to calculate the derivatives from the grayscale image
    const double pStrength = 2.0;
    int width = image.width();
    int height = image.height();
    QImage result(width, height, image.format());
    
    for (int i = 0; i < width; i++) {
        const int iNextClamped = clampPixelCoordinate(i + 1, width - 1);
        const int iPrevClamped = clampPixelCoordinate(i - 1, width - 1);
    
        for (int j = 0; j < height; j++) {
            const int jNextClamped = clampPixelCoordinate(j + 1, height - 1);
            const int jPrevClamped = clampPixelCoordinate(j - 1, height - 1);
    
            // surrounding pixels
            const QRgb topLeft = image.pixel(iPrevClamped, jPrevClamped);
            const QRgb top = image.pixel(iPrevClamped, j);
            const QRgb topRight = image.pixel(iPrevClamped, jNextClamped);
            const QRgb right = image.pixel(i, jNextClamped);
            const QRgb bottomRight = image.pixel(iNextClamped, jNextClamped);
            const QRgb bottom = image.pixel(iNextClamped, j);
            const QRgb bottomLeft = image.pixel(iNextClamped, jPrevClamped);
            const QRgb left = image.pixel(i, jPrevClamped);
    
            // take their gray intensities
            // since it's a grayscale image, the value of each component RGB is the same
            const double tl = qRed(topLeft);
            const double t = qRed(top);
            const double tr = qRed(topRight);
            const double r = qRed(right);
            const double br = qRed(bottomRight);
            const double b = qRed(bottom);
            const double bl = qRed(bottomLeft);
            const double l = qRed(left);
    
            // apply the sobel filter
            const double dX = (tr + pStrength * r + br) - (tl + pStrength * l + bl);
            const double dY = (bl + pStrength * b + br) - (tl + pStrength * t + tr);
            const double dZ = RGBA_MAX / pStrength;
    
            glm::vec3 v(dX, dY, dZ);
            glm::normalize(v);
    
            // convert to rgb from the value obtained computing the filter
            QRgb qRgbValue = qRgb(mapComponent(v.x), mapComponent(v.y), mapComponent(v.z));
            result.setPixel(i, j, qRgbValue);
        }
    }
    #endif
    
    gpu::Texture* theTexture = nullptr;
    if ((image.width() > 0) && (image.height() > 0)) {
        
        // bool isLinearRGB = true; //(_type == NORMAL_TEXTURE) || (_type == EMISSIVE_TEXTURE);
        bool isLinearRGB = true; //(_type == NORMAL_TEXTURE) || (_type == EMISSIVE_TEXTURE);
        
        gpu::Element formatGPU = gpu::Element(gpu::VEC3, gpu::UINT8, (isLinearRGB ? gpu::RGB : gpu::SRGB));
        gpu::Element formatMip = gpu::Element(gpu::VEC3, gpu::UINT8, (isLinearRGB ? gpu::RGB : gpu::SRGB));
        if (image.hasAlphaChannel()) {
            formatGPU = gpu::Element(gpu::VEC4, gpu::UINT8, (isLinearRGB ? gpu::RGBA : gpu::SRGBA));
            formatMip = gpu::Element(gpu::VEC4, gpu::UINT8, (isLinearRGB ? gpu::BGRA : gpu::SBGRA));
        }
        
        
        theTexture = (gpu::Texture::create2D(formatGPU, image.width(), image.height(), gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_MIP_LINEAR)));
        theTexture->assignStoredMip(0, formatMip, image.byteCount(), image.constBits());
        theTexture->autoGenerateMips(-1);
    }
    
    return theTexture;
}
Exemple #28
0
// See https://code.woboq.org/qt5/qtbase/src/gui/image/qimage.cpp.html#_ZNK6QImage5pixelEii
inline QRgb getPixel(const QImage& image, int x, int y)
{
    uchar* line = const_cast<uchar*>(image.constBits()) + y * image.bytesPerLine();

    return reinterpret_cast<QRgb*>(line)[x];
}
Exemple #29
0
inline void qt_copy_qimage_to_image2d(const QImage &qimage,
                                      image2d &image,
                                      command_queue &queue)
{
    queue.enqueue_write_image(image, image.origin(), image.size(), qimage.constBits());
}
Exemple #30
0
inline void sobel(const QImage &image,
                  QVector<int> &gradient,
                  QVector<int> &direction)
{
    int size = image.width() * image.height();
    gradient.resize(size);
    direction.resize(size);

    for (int y = 0; y < image.height(); y++) {
        size_t yOffset = y * image.width();
        const quint8 *grayLine = image.constBits() + yOffset;

        const quint8 *grayLine_m1 = y < 1? grayLine: grayLine - image.width();
        const quint8 *grayLine_p1 = y >= image.height() - 1? grayLine: grayLine + image.width();

        int *gradientLine = gradient.data() + yOffset;
        int *directionLine = direction.data() + yOffset;

        for (int x = 0; x < image.width(); x++) {
            int x_m1 = x < 1? x: x - 1;
            int x_p1 = x >= image.width() - 1? x: x + 1;

            int gradX = grayLine_m1[x_p1]
                      + 2 * grayLine[x_p1]
                      + grayLine_p1[x_p1]
                      - grayLine_m1[x_m1]
                      - 2 * grayLine[x_m1]
                      - grayLine_p1[x_m1];

            int gradY = grayLine_m1[x_m1]
                      + 2 * grayLine_m1[x]
                      + grayLine_m1[x_p1]
                      - grayLine_p1[x_m1]
                      - 2 * grayLine_p1[x]
                      - grayLine_p1[x_p1];

            gradientLine[x] = qAbs(gradX) + qAbs(gradY);

            /* Gradient directions are classified in 4 possible cases
             *
             * dir 0
             *
             * x x x
             * - - -
             * x x x
             *
             * dir 1
             *
             * x x /
             * x / x
             * / x x
             *
             * dir 2
             *
             * \ x x
             * x \ x
             * x x \
             *
             * dir 3
             *
             * x | x
             * x | x
             * x | x
             */
            if (gradX == 0 && gradY == 0)
                directionLine[x] = 0;
            else if (gradX == 0)
                directionLine[x] = 3;
            else {
                qreal a = 180. * atan(qreal(gradY) / gradX) / M_PI;

                if (a >= -22.5 && a < 22.5)
                    directionLine[x] = 0;
                else if (a >= 22.5 && a < 67.5)
                    directionLine[x] = 1;
                else if (a >= -67.5 && a < -22.5)
                    directionLine[x] = 2;
                else
                    directionLine[x] = 3;
            }
        }
    }
}