cv::Mat connectedComponentsFilter(cv::Mat& curFrame, cv::Mat& img) {

    if (!img.isContinuous()) {
        throwError("Parammeter 'img' in 'connectedComponentsFilter' must be continuous");
    }

    //morphology_(img);

    maskContours.clear();

    // Отрисовать найденные области обратно в маску
    cv::Mat result(img.size(), img.type());
    result.setTo(0);

    cv::findContours(img, maskContours, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE);
    size_t i = 0;

    while (i < maskContours.size()) {
        Contour& contour = maskContours[i];

        cv::Mat contourMat(contour, false);
        double len = cv::arcLength(contourMat, true);

        if (len * PERIM_SCALE < img.size().height + img.size().width) {
            // Отбрасываем контуры со слишком маленьким периметром.
            maskContours.erase(maskContours.begin() + i);
        } else {
            // Достаточно большие контуры аппроксимируем указанным методом.
            Contour newContour;

            // Методы аппроксимации.
            //cv::approxPolyDP(contourMat, newContour, CHAIN_APPROX_SIMPLE, true);
            cv::convexHull(contourMat, newContour, true);

            cv::Mat newContourMat(newContour, false);
            Rect boundingRect = cv::boundingRect(newContourMat);
            cv::rectangle(curFrame, boundingRect, cv::Scalar(255));

            maskContours[i] = newContour;

            i++;

            //points.push_back(CvPoint(boundingRect.x + boundingRect.width / 2, boundingRect.y + boundingRect.height / 2));
        }
    }


    if (!maskContours.empty()) { // Обходим баг OpenCV 2.1.0; в 2.3.1 он уже исправлен.
        cv::drawContours(result, maskContours, -1, cv::Scalar(255), FILLED);

    }
    return result;
}
bool closeSurface(  fwVertexPosition &_vertex, fwVertexIndex &_vertexIndex )
{
    typedef std::pair< int, int  >  Edge;
    typedef std::vector< Edge > Contour; // at Border
    typedef std::vector< Contour> Contours;

    Contours contours;
    findBorderEdges( _vertexIndex , contours);
    bool closurePerformed = !contours.empty() ;
    // close each hole
    for ( Contours::iterator contour=contours.begin();  contour != contours.end(); ++contour )
    {
        int newVertexIndex = _vertex.size() ;
        // create gravity point & insert new triangle
        std::vector< float > massCenter(3,0);
        for ( Contour::iterator edge =contour->begin();  edge != contour->end(); ++edge )
        {
            for (int i=0; i<3; ++i )
            {
                massCenter[i]  += _vertex[edge->first][i];
                massCenter[i]  += _vertex[edge->second][i];
            }
            // create new Triangle
            std::vector< int > triangleIndex(3);
            triangleIndex[0] =  edge->first;
            triangleIndex[1] =  edge->second;
            triangleIndex[2] =  newVertexIndex;
            _vertexIndex.push_back( triangleIndex ); // TEST
        }
        for (int i=0; i<3; ++i )
        {
            massCenter[i] /= contour->size()*2;
        }
        _vertex.push_back( massCenter ); // normalize barycenter
    }
    return closurePerformed;
}
Exemple #3
0
Mat ScreenDetector::getTransformationMatrix(Error& error)
{
    bool approxFound = false;

    // convert image to HSV
    cvtColor(img, hsv, CV_BGR2HSV);

    // threshold the image
    inRange(hsv, hsvMin, hsvMax, thresholded);

    // Optimize threshold by reducing noise
    erode(thresholded, thresholded, getStructuringElement(MORPH_ELLIPSE, Size(erodeDilateSize, erodeDilateSize)) );
    dilate( thresholded, thresholded, getStructuringElement(MORPH_ELLIPSE, Size(erodeDilateSize, erodeDilateSize)) );
    dilate( thresholded, thresholded, getStructuringElement(MORPH_ELLIPSE, Size(erodeDilateSize, erodeDilateSize)) );
    erode(thresholded, thresholded, getStructuringElement(MORPH_ELLIPSE, Size(erodeDilateSize, erodeDilateSize)) );
    GaussianBlur(thresholded, thresholded, Size(3,3), 0);

    Mat forContours;
    thresholded.copyTo(forContours);
    // find all contours
    Contours contours;
    Contour approximatedScreen;
    findContours(forContours, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE);
    int nbContours = contours.size();
    cout << nbContours << " contours found, debug: " << DEBUG << endl;

    if(nbContours == 0)
    {
        error.setError("Unable to find the screen",
                       "The camera doesn't detect any screen or green element."
                       "Please check if your screen is turned on and directed toward the screen");
        return img;
    }

    sort(contours.begin(), contours.end(), contour_compare_area);

    // find the contour with the biggest area that have 4 points when approximated
    for(int i=0; i < nbContours; ++i)
    {
        approxPolyDP(contours.at(i), approximatedScreen, approximateEpsilon * arcLength(contours.at(i), true), true);
        // our screen has 4 point when approximated
        if(approximatedScreen.size() == 4)
        {
            approxFound = true;
            break;
        }
    }

    if(!approxFound)
    {
        error.setError("Unable to find the screen properly",
                       "It seems that the screen is not fully detectable by the camera. Try to reduce light in your room");
        return img;
    }

    if(DEBUG)
    {
        namedWindow("debug", WINDOW_KEEPRATIO);
        namedWindow("thresholded_calibration", WINDOW_KEEPRATIO);
        Mat debug = Mat::zeros(img.rows, img.cols, CV_8UC3);
        polylines(debug, approximatedScreen, true, Scalar(0,0,255), 3);
        imshow("debug", debug);
        imshow("thresholded_calibration", thresholded);
    }

    return transformImage(approximatedScreen);
}