Esempio n. 1
0
void BlurEffect::drawWindow(EffectWindow *w, int mask, QRegion region, WindowPaintData &data)
{
    const QRect screen = effects->virtualScreenGeometry();
    if (shouldBlur(w, mask, data)) {
        QRegion shape = region & blurRegion(w).translated(w->pos()) & screen;

        const bool translated = data.xTranslation() || data.yTranslation();
        // let's do the evil parts - someone wants to blur behind a transformed window
        if (translated) {
            shape = shape.translated(data.xTranslation(), data.yTranslation());
            shape = shape & region;
        }

        if (!shape.isEmpty()) {
            if (m_shouldCache && !translated) {
                doCachedBlur(w, region, data.opacity());
            } else {
                doBlur(shape, screen, data.opacity());
            }
        }
    }

    // Draw the window over the blurred area
    effects->drawWindow(w, mask, region, data);
}
Esempio n. 2
0
void BlurEffect::slotPropertyNotify(EffectWindow *w, long atom)
{
    if (w && atom == net_wm_blur_region) {
        updateBlurRegion(w);
        CacheEntry it = windows.find(w);
        if (it != windows.end()) {
            const QRect screen = effects->virtualScreenGeometry();
            it->damagedRegion = expand(blurRegion(w).translated(w->pos())) & screen;
        }
    }
}
Esempio n. 3
0
void BlurEffect::doCachedBlur(EffectWindow *w, const QRegion& region, const float opacity)
{
    const QRect screen = effects->virtualScreenGeometry();
    const QRegion blurredRegion = blurRegion(w).translated(w->pos()) & screen;
    const QRegion expanded = expand(blurredRegion) & screen;
    const QRect r = expanded.boundingRect();

    // The background texture we get is only partially valid.

    CacheEntry it = windows.find(w);
    if (it == windows.end()) {
        BlurWindowInfo bwi;
        bwi.blurredBackground = GLTexture(r.width(),r.height());
        bwi.damagedRegion = expanded;
        bwi.dropCache = false;
        bwi.windowPos = w->pos();
        it = windows.insert(w, bwi);
    } else if (it->blurredBackground.size() != r.size()) {
        it->blurredBackground = GLTexture(r.width(),r.height());
        it->dropCache = false;
        it->windowPos = w->pos();
    } else if (it->windowPos != w->pos()) {
        it->dropCache = false;
        it->windowPos = w->pos();
    }

    GLTexture targetTexture = it->blurredBackground;
    targetTexture.setFilter(GL_LINEAR);
    targetTexture.setWrapMode(GL_CLAMP_TO_EDGE);
    shader->bind();
    QMatrix4x4 textureMatrix;
    QMatrix4x4 modelViewProjectionMatrix;

    /**
     * Which part of the background texture can be updated ?
     *
     * Well this is a rather difficult question. We kind of rely on the fact, that
     * we need a bigger background region being painted before, more precisely if we want to
     * blur region A we need the background region expand(A). This business logic is basically
     * done in prePaintWindow:
     *          data.paint |= expand(damagedArea);
     *
     * Now "data.paint" gets clipped and becomes what we receive as the "region" variable
     * in this function. In theory there is now only one function that does this clipping
     * and this is paintSimpleScreen. The clipping has the effect that "damagedRegion"
     * is no longer a subset of "region" and we cannot fully validate the cache within one
     * rendering pass. If we would now update the "damageRegion & region" part of the cache
     * we would wrongly update the part of the cache that is next to the "region" border and
     * which lies within "damagedRegion", just because we cannot assume that the framebuffer
     * outside of "region" is valid. Therefore the maximal damaged region of the cache that can
     * be repainted is given by:
     *          validUpdate = damagedRegion - expand(damagedRegion - region);
     *
     * Now you may ask what is with the rest of "damagedRegion & region" that is not part
     * of "validUpdate" but also might end up on the screen. Well under the assumption
     * that only the occlusion culling can shrink "data.paint", we can control this by reducing
     * the opaque area of every window by a margin of the blurring radius (c.f. prePaintWindow).
     * This way we are sure that this area is overpainted by a higher opaque window.
     *
     * Apparently paintSimpleScreen is not the only function that can influence "region".
     * In fact every effect's paintWindow that is called before Blur::paintWindow
     * can do so (e.g. SlidingPopups). Hence we have to make the compromise that we update
     * "damagedRegion & region" of the cache but only mark "validUpdate" as valid.
     **/
    const QRegion damagedRegion = it->damagedRegion;
    const QRegion updateBackground = damagedRegion & region;
    const QRegion validUpdate = damagedRegion - expand(damagedRegion - region);

    const QRegion horizontal = validUpdate.isEmpty() ? QRegion() : (updateBackground & screen);
    const QRegion vertical   = blurredRegion & region;

    const int horizontalOffset = 0;
    const int horizontalCount = horizontal.rectCount() * 6;

    const int verticalOffset = horizontalCount;
    const int verticalCount = vertical.rectCount() * 6;

    GLVertexBuffer *vbo = GLVertexBuffer::streamingBuffer();
    uploadGeometry(vbo, horizontal, vertical);

    vbo->bindArrays();

    if (!validUpdate.isEmpty()) {
        const QRect updateRect = (expand(updateBackground) & expanded).boundingRect();
        // First we have to copy the background from the frontbuffer
        // into a scratch texture (in this case "tex").
        tex.bind();

        glCopyTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, updateRect.x(), displayHeight() - updateRect.y() - updateRect.height(),
                            updateRect.width(), updateRect.height());

        // Draw the texture on the offscreen framebuffer object, while blurring it horizontally
        target->attachTexture(targetTexture);
        GLRenderTarget::pushRenderTarget(target);

        shader->setDirection(Qt::Horizontal);
        shader->setPixelDistance(1.0 / tex.width());

        modelViewProjectionMatrix.ortho(0, r.width(), r.height(), 0 , 0, 65535);
        modelViewProjectionMatrix.translate(-r.x(), -r.y(), 0);
        shader->setModelViewProjectionMatrix(modelViewProjectionMatrix);

        // Set up the texture matrix to transform from screen coordinates
        // to texture coordinates.
        textureMatrix.scale(1.0 / tex.width(), -1.0 / tex.height(), 1);
        textureMatrix.translate(-updateRect.x(), -updateRect.height() - updateRect.y(), 0);
        shader->setTextureMatrix(textureMatrix);

        vbo->draw(GL_TRIANGLES, horizontalOffset, horizontalCount);

        GLRenderTarget::popRenderTarget();
        tex.unbind();
        // mark the updated region as valid
        it->damagedRegion -= validUpdate;
    }

    // Now draw the horizontally blurred area back to the backbuffer, while
    // blurring it vertically and clipping it to the window shape.
    targetTexture.bind();

    shader->setDirection(Qt::Vertical);
    shader->setPixelDistance(1.0 / targetTexture.height());

    // Modulate the blurred texture with the window opacity if the window isn't opaque
    if (opacity < 1.0) {
        glEnable(GL_BLEND);
        glBlendColor(0, 0, 0, opacity);
        glBlendFunc(GL_CONSTANT_ALPHA, GL_ONE_MINUS_CONSTANT_ALPHA);
    }

    modelViewProjectionMatrix.setToIdentity();
    const QSize screenSize = effects->virtualScreenSize();
    modelViewProjectionMatrix.ortho(0, screenSize.width(), screenSize.height(), 0, 0, 65535);
    shader->setModelViewProjectionMatrix(modelViewProjectionMatrix);

    // Set the up the texture matrix to transform from screen coordinates
    // to texture coordinates.
    textureMatrix.setToIdentity();
    textureMatrix.scale(1.0 / targetTexture.width(), -1.0 / targetTexture.height(), 1);
    textureMatrix.translate(-r.x(), -targetTexture.height() - r.y(), 0);
    shader->setTextureMatrix(textureMatrix);

    vbo->draw(GL_TRIANGLES, verticalOffset, verticalCount);
    vbo->unbindArrays();

    if (opacity < 1.0) {
        glDisable(GL_BLEND);
    }

    targetTexture.unbind();
    shader->unbind();
}
Esempio n. 4
0
void BlurEffect::prePaintWindow(EffectWindow* w, WindowPrePaintData& data, int time)
{
    // this effect relies on prePaintWindow being called in the bottom to top order

    effects->prePaintWindow(w, data, time);

    if (!w->isPaintingEnabled()) {
        return;
    }
    if (!shader || !shader->isValid()) {
        return;
    }

    // to blur an area partially we have to shrink the opaque area of a window
    QRegion newClip;
    const QRegion oldClip = data.clip;
    const int radius = shader->radius();
    foreach (const QRect& rect, data.clip.rects()) {
        newClip |= rect.adjusted(radius,radius,-radius,-radius);
    }
    data.clip = newClip;

    const QRegion oldPaint = data.paint;

    // we don't have to blur a region we don't see
    m_currentBlur -= newClip;
    // if we have to paint a non-opaque part of this window that intersects with the
    // currently blurred region (which is not cached) we have to redraw the whole region
    if ((data.paint-oldClip).intersects(m_currentBlur)) {
        data.paint |= m_currentBlur;
    }

    // in case this window has regions to be blurred
    const QRect screen = effects->virtualScreenGeometry();
    const QRegion blurArea = blurRegion(w).translated(w->pos()) & screen;
    const QRegion expandedBlur = expand(blurArea) & screen;

    if (m_shouldCache) {
        // we are caching the horizontally blurred background texture

        // if a window underneath the blurred area is damaged we have to
        // update the cached texture
        QRegion damagedCache;
        CacheEntry it = windows.find(w);
        if (it != windows.end() && !it->dropCache &&
            it->windowPos == w->pos() &&
            it->blurredBackground.size() == expandedBlur.boundingRect().size()) {
            damagedCache = (expand(expandedBlur & m_damagedArea) |
                            (it->damagedRegion & data.paint)) & expandedBlur;
        } else {
            damagedCache = expandedBlur;
        }
        if (!damagedCache.isEmpty()) {
            // This is the area of the blurry window which really can change.
            const QRegion damagedArea = damagedCache & blurArea;
            // In order to be able to recalculate this area we have to make sure the
            // background area is painted before.
            data.paint |= expand(damagedArea);
            if (it != windows.end()) {
                // In case we already have a texture cache mark the dirty regions invalid.
                it->damagedRegion &= expandedBlur;
                it->damagedRegion |= damagedCache;
                // The valid part of the cache can be considered as being opaque
                // as long as we don't need to update a bordering part
                data.clip |= blurArea - expand(it->damagedRegion);
                it->dropCache = false;
            }
            // we keep track of the "damage propagation"
            m_damagedArea |= damagedArea;
            // we have to check again whether we do not damage a blurred area
            // of a window we do not cache
            if (expandedBlur.intersects(m_currentBlur)) {
                data.paint |= m_currentBlur;
            }
        }
    } else {
        // we are not caching the window

        // if this window or an window underneath the blurred area is painted again we have to
        // blur everything
        if (m_paintedArea.intersects(expandedBlur) || data.paint.intersects(blurArea)) {
            data.paint |= expandedBlur;
            // we keep track of the "damage propagation"
            m_damagedArea |= expand(expandedBlur & m_damagedArea) & blurArea;
            // we have to check again whether we do not damage a blurred area
            // of a window we do not cache
            if (expandedBlur.intersects(m_currentBlur)) {
                data.paint |= m_currentBlur;
            }
        }

        m_currentBlur |= expandedBlur;
    }

    // we don't consider damaged areas which are occluded and are not
    // explicitly damaged by this window
    m_damagedArea -= data.clip;
    m_damagedArea |= oldPaint;

    // in contrast to m_damagedArea does m_paintedArea keep track of all repainted areas
    m_paintedArea -= data.clip;
    m_paintedArea |= data.paint;
}
//------------------------------------------------------------------------------------------------------
void QOpencvProcessor::faceProcess(const cv::Mat &input)
{
    cv::Mat output;
    if(f_fill)
        output = input;
    else
        output = input.clone();

    cv::Mat gray; // Create an instance of cv::Mat for temporary image storage
    cv::cvtColor(input, gray, CV_BGR2GRAY);
    cv::equalizeHist(gray, gray);
    std::vector<cv::Rect> faces_vector;

    m_classifier.detectMultiScale(gray, faces_vector, 1.1, 7, cv::CASCADE_FIND_BIGGEST_OBJECT, cv::Size(OBJECT_MINSIZE, OBJECT_MINSIZE)); // Detect faces (list of flags CASCADE_DO_CANNY_PRUNING, CASCADE_DO_ROUGH_SEARCH, CASCADE_FIND_BIGGEST_OBJECT, CASCADE_SCALE_IMAGE )

    if(faces_vector.size() == 0) {
        m_emptyFrames++;
        if(m_emptyFrames > FRAMES_WITHOUT_FACE_TRESHOLD)
            setAverageFaceRect(0, 0, 0, 0);
    } else {
        m_emptyFrames = 0;
        enrollFaceRect(faces_vector[0]);
    }
    cv::Rect face = getAverageFaceRect();

    unsigned int X = face.x; // the top-left corner horizontal coordinate of future rectangle
    unsigned int Y = face.y; // the top-left corner vertical coordinate of future rectangle
    unsigned int rectwidth = face.width; //...
    unsigned int rectheight = face.height; //...
    unsigned long red = 0; // an accumulator for red color channel
    unsigned long green = 0; // an accumulator for green color channel
    unsigned long blue = 0; // an accumulator for blue color channel
    unsigned int dX = rectwidth/16;
    unsigned int dY = rectheight/30;
    unsigned long area = 0;

    if(face.area() > 10000)
    {
        for(int i = 0; i < 256; i++)
            v_temphist[i] = 0;
        cv::Mat blurRegion(output, face);
        cv::blur(blurRegion, blurRegion, cv::Size(m_blurSize, m_blurSize));
        m_ellipsRect = cv::Rect(X + dX, Y - 6 * dY, rectwidth - 2 * dX, rectheight + 6 * dY);
        X = m_ellipsRect.x;
        rectwidth = m_ellipsRect.width;
        unsigned char *p; // this pointer will be used to store adresses of the image rows
        unsigned char tempBlue;
        unsigned char tempRed;
        unsigned char tempGreen;
        if(output.channels() == 3)
        {
            if(m_skinFlag)
            {
                if(f_fill){
                    for(unsigned int j = Y; j < Y + rectheight; j++) // it is lucky that unsigned int saves from out of image memory cells processing from image top bound, but not from bottom where you should check this issue explicitly
                    {
                        p = output.ptr(j); //takes pointer to beginning of data on row
                        for(unsigned int i = X; i < X + rectwidth; i++)
                        {
                            tempBlue = p[3*i];
                            tempGreen = p[3*i+1];
                            tempRed = p[3*i+2];
                            if( isSkinColor(tempRed, tempGreen, tempBlue) && isInEllips(i, j)) {
                                area++;
                                blue += tempBlue;
                                green += tempGreen;
                                red += tempRed;
                                //p[3*i] = 255;
                                //p[3*i+1] %= LEVEL_SHIFT;
                                p[3*i+2] %= LEVEL_SHIFT;
                                v_temphist[tempGreen]++;
                            }
                        }
                    }
                } else {
                    for(unsigned int j = Y; j < Y + rectheight; j++) // it is lucky that unsigned int saves from out of image memory cells processing from image top bound, but not from bottom where you should check this issue explicitly
                    {
                        p = output.ptr(j); //takes pointer to beginning of data on row
                        for(unsigned int i = X; i < X + rectwidth; i++)
                        {
                            tempBlue = p[3*i];
                            tempGreen = p[3*i+1];
                            tempRed = p[3*i+2];
                            if( isSkinColor(tempRed, tempGreen, tempBlue) && isInEllips(i, j)) {
                                area++;
                                blue += tempBlue;
                                green += tempGreen;
                                red += tempRed;
                                v_temphist[tempGreen]++;
                            }
                        }
                    }
                }
            } else {
                for(unsigned int j = Y; j < Y + rectheight; j++)
                {
                    p = output.ptr(j); //takes pointer to beginning of data on row
                    for(unsigned int i = X; i < X + rectwidth; i++)
                    {
                        blue += p[3*i];
                        green += p[3*i+1];
                        red += p[3*i+2];
                        if(f_fill)  {
                            //p[3*i] = 255;
                            //p[3*i+1] %= LEVEL_SHIFT;
                            p[3*i+2] %= LEVEL_SHIFT;
                        }
                        v_temphist[p[3*i+1]]++;
                    }
                }
                area = rectwidth*rectheight;
            }
        } else {
            for(unsigned int j = Y; j < Y + rectheight; j++)
            {
                p = output.ptr(j);//pointer to beginning of data on rows
                for(unsigned int i = X; i < X + rectwidth; i++)
                {
                    green += p[i];
                    //Uncomment if want to see the enrolled domain on image
                    if(f_fill)  {
                        p[i] %= LEVEL_SHIFT;
                    }
                    v_temphist[p[i]]++;
                }
            }
            blue = green;
            red = green;
            area = rectwidth*rectheight;
        }
    }


    //-----end of if(faces_vector.size() != 0)-----
    m_framePeriod = ((double)cv::getTickCount() -  m_timeCounter)*1000.0 / cv::getTickFrequency();
    m_timeCounter = cv::getTickCount();
    if(area > 1000)
    {
        if(!f_fill)
            cv::rectangle( cv::Mat(input), face, cv::Scalar(15,15,250),1,CV_AA);

        emit dataCollected(red, green, blue, area, m_framePeriod);

        unsigned int mass = 0;
        for(int i = 0; i < 256; i++)
            mass += v_temphist[i];
        if(mass > 0)
        for(int i = 0; i < 256; i++)
            v_hist[i] = (qreal)v_temphist[i]/mass;
        emit histUpdated(v_hist, 256);
    }
    else
    {
        if(m_classifier.empty())
            emit selectRegion( QT_TRANSLATE_NOOP("QImageWidget", "Load cascade for detection") );
        else
            emit selectRegion( QT_TRANSLATE_NOOP("QImageWidget", "Come closer or change light") );
    }
    emit frameProcessed(input, m_framePeriod, area);
}
void QOpencvProcessor::rectProcess(const cv::Mat &input)
{
    cv::Mat output(input); //Copy constructor
    unsigned int rectwidth = m_cvRect.width;
    unsigned int rectheight = m_cvRect.height;
    unsigned int X = m_cvRect.x;
    unsigned int Y = m_cvRect.y;

    if( (output.rows < (Y + rectheight)) || (output.cols < (X + rectwidth)) )
    {
        rectheight = 0;
        rectwidth = 0;
    }

    unsigned long red = 0;
    unsigned long green = 0;
    unsigned long blue = 0;
    unsigned long area = 0;
    //-------------------------------------------------------------------------
    if((rectheight > 0) && (rectwidth > 0))
    {
        for(int i = 0; i < 256; i++)
            v_temphist[i] = 0;

        cv::Mat blurRegion(output, m_cvRect);
        cv::blur(blurRegion, blurRegion, cv::Size(m_blurSize, m_blurSize));

        unsigned char *p; // a pointer to store the adresses of image rows
        if(output.channels() == 3)
        {
            if(m_seekCalibColors)
            {
                unsigned char tempRed;
                unsigned char tempGreen;
                unsigned char tempBlue;
                for(unsigned int j = Y; j < Y + rectheight; j++)
                {
                    p = output.ptr(j); //takes pointer to beginning of data on rows
                    for(unsigned int i = X; i < X + rectwidth; i++)
                    {
                        tempBlue = p[3*i];
                        tempGreen = p[3*i+1];
                        tempRed = p[3*i+2];
                        if( isCalibColor(tempGreen) && isSkinColor(tempRed, tempGreen, tempBlue) ) {
                            area++;
                            blue += tempBlue;
                            green += tempGreen;
                            red += tempRed;
                            if(f_fill)  {
                                //p[3*i] = 255;
                                //p[3*i+1] %= LEVEL_SHIFT;
                                p[3*i+2] %= LEVEL_SHIFT;
                            }
                            v_temphist[tempGreen]++;
                        }
                    }
                }
            } else {
            if(m_skinFlag)
            {
                unsigned char tempRed;
                unsigned char tempGreen;
                unsigned char tempBlue;
                for(unsigned int j = Y; j < Y + rectheight; j++)
                {
                    p = output.ptr(j); //takes pointer to beginning of data on rows
                    for(unsigned int i = X; i < X + rectwidth; i++)
                    {
                        tempBlue = p[3*i];
                        tempGreen = p[3*i+1];
                        tempRed = p[3*i+2];
                        if( isSkinColor(tempRed, tempGreen, tempBlue)) {
                            area++;
                            blue += tempBlue;
                            green += tempGreen;
                            red += tempRed;
                            if(f_fill)  {
                                //p[3*i] = 255;
                                //p[3*i+1] %= LEVEL_SHIFT;
                                p[3*i+2] %= LEVEL_SHIFT;
                            }
                            v_temphist[tempGreen]++;
                        }
                    }
                }
            }
            else
            {
                for(unsigned int j = Y; j < Y + rectheight; j++)
                {
                    p = output.ptr(j); //takes pointer to beginning of data on rows
                    for(unsigned int i = X; i < X + rectwidth; i++)
                    {
                        blue += p[3*i];
                        green += p[3*i+1];
                        red += p[3*i+2];
                        if(f_fill)  {
                            //p[3*i] = 255;
                            //p[3*i+1] %= LEVEL_SHIFT;
                            p[3*i+2] %= LEVEL_SHIFT;
                        }
                        v_temphist[p[3*i+1]]++;
                    }
                }
                area = rectwidth*rectheight;
            }
            }
        }
        else
        {
            for(unsigned int j = Y; j < Y + rectheight; j++)
            {
                p = output.ptr(j);//pointer to beginning of data on rows
                for(unsigned int i = X; i < X + rectwidth; i++)
                {
                    green += p[i];
                    if(f_fill)  {
                        p[i] %= LEVEL_SHIFT;
                    }
                    v_temphist[p[i]]++;
                }
            }
            area = rectwidth*rectheight;
        }
    }
    //------end of if((rectheight > 0) && (rectwidth > 0))
    m_framePeriod = ((double)cv::getTickCount() -  m_timeCounter)*1000.0 / cv::getTickFrequency();
    m_timeCounter = cv::getTickCount();
    if( area > 0 )
    {
        cv::rectangle( output , m_cvRect, cv::Scalar(15,250,15), 1, CV_AA);
        emit dataCollected(red, green, blue, area, m_framePeriod);

        unsigned int mass = 0;
        for(int i = 0; i < 256; i++)
            mass += v_temphist[i];
        if(mass > 0)
        for(int i = 0; i < 256; i++)
            v_hist[i] = (qreal)v_temphist[i]/mass;
        emit histUpdated(v_hist, 256);

        if(m_calibFlag)
        {
            v_calibValues[m_calibSamples] = (qreal)green/area;
            m_calibMean += v_calibValues[m_calibSamples];
            m_calibSamples++;
            if(m_calibSamples == CALIBRATION_VECTOR_LENGTH)
            {
                m_calibMean /= CALIBRATION_VECTOR_LENGTH;
                m_calibError = 0.0;
                for(quint16 i = 0; i < CALIBRATION_VECTOR_LENGTH; i++)
                {
                    m_calibError += (v_calibValues[i] - m_calibMean)*(v_calibValues[i] - m_calibMean);
                }
                m_calibError = 10 * sqrt( m_calibError /(CALIBRATION_VECTOR_LENGTH - 1) );
                qWarning("mean: %f; error: %f; samples: %d", m_calibMean,m_calibError, m_calibSamples);
                m_calibSamples = 0;
                m_calibFlag = false;
                m_seekCalibColors = true;
                emit calibrationDone(m_calibMean, m_calibError/10, m_calibSamples);
            }
        }
    }
    else
    {
        emit selectRegion( QT_TRANSLATE_NOOP("QImageWidget", "Select region on image" ) );
    }
    emit frameProcessed(output, m_framePeriod, area);
}