Esempio n. 1
0
File: ippe.cpp Progetto: lz89/IPPE
void IPPE::PoseSolver::solveGeneric(cv::InputArray _objectPoints, cv::InputArray _normalizedInputPoints,
                                    cv::OutputArray _Ma, cv::OutputArray _Mb)
{

    //argument checking:
    size_t n = _objectPoints.rows() * _objectPoints.cols(); //number of points
    int objType = _objectPoints.type();
    int type_input = _normalizedInputPoints.type();
    assert((objType == CV_32FC3) | (objType == CV_64FC3));
    assert((type_input == CV_32FC2) | (type_input == CV_64FC2));
    assert((_objectPoints.rows() == 1) | (_objectPoints.cols() == 1));
    assert((_objectPoints.rows() >= 4) | (_objectPoints.cols() >= 4));
    assert((_normalizedInputPoints.rows() == 1) | (_normalizedInputPoints.cols() == 1));
    assert(static_cast<size_t>(_objectPoints.rows() * _objectPoints.cols()) == n);

    cv::Mat normalizedInputPoints;
    if (type_input == CV_32FC2) {
        _normalizedInputPoints.getMat().convertTo(normalizedInputPoints, CV_64FC2);
    }
    else {
        normalizedInputPoints = _normalizedInputPoints.getMat();
    }

    cv::Mat objectInputPoints;
    if (type_input == CV_32FC3) {
        _objectPoints.getMat().convertTo(objectInputPoints, CV_64FC3);
    }
    else {
        objectInputPoints = _objectPoints.getMat();
    }

    cv::Mat canonicalObjPoints;
    cv::Mat MmodelPoints2Canonical;

    //transform object points to the canonical position (zero centred and on the plane z=0):
    makeCanonicalObjectPoints(objectInputPoints, canonicalObjPoints, MmodelPoints2Canonical);

    //compute the homography mapping the model's points to normalizedInputPoints
    cv::Mat H;
    HomographyHO::homographyHO(canonicalObjPoints, _normalizedInputPoints, H);

    //now solve
    cv::Mat MaCanon, MbCanon;
    solveCanonicalForm(canonicalObjPoints, normalizedInputPoints, H, MaCanon, MbCanon);

    //transform computed poses to account for canonical transform:
    cv::Mat Ma = MaCanon * MmodelPoints2Canonical;
    cv::Mat Mb = MbCanon * MmodelPoints2Canonical;

    //output poses:
    Ma.copyTo(_Ma);
    Mb.copyTo(_Mb);
}
    FeatureValue FeatureShiCorner::calculate( cv::InputArray image )
    {
        Mat image_gray;

        cvtColor( image, image_gray, CV_BGR2GRAY );

        Mat corner( image.rows(), image.cols(), CV_32SC1, Scalar( 0 ) );
        computeRawCornerMat( image_gray, corner );
        auto points = genPoints( corner );


        // return genDescriptor;
        return FeatureValue();
    }
Esempio n. 3
0
inline static void to_nv12(cv::InputArray bgr, cv::Mat & Y, cv::Mat & UV)
{
    const int height = bgr.rows();
    const int width = bgr.cols();
    Mat yuv;
    cvtColor(bgr, yuv, CV_BGR2YUV_I420);
    CV_Assert(yuv.isContinuous());
    Mat Y_(Y, Rect(0, 0, width, height));
    yuv.rowRange(0, height).copyTo(Y_);
    Mat UV_planar(height, width / 2, CV_8UC1, yuv.ptr(height));
    Mat u_and_v[2] = {
        UV_planar.rowRange(0, height / 2),
        UV_planar.rowRange(height / 2, height),
    };
    Mat uv;
    cv::merge(u_and_v, 2, uv);
    Mat UV_(UV, Rect(0, 0, width, height / 2));
    uv.reshape(1).copyTo(UV_);
}
Esempio n. 4
0
File: ippe.cpp Progetto: lz89/IPPE
void IPPE::PoseSolver::evalReprojError(cv::InputArray _objectPoints, cv::InputArray _imagePoints, cv::InputArray _cameraMatrix, cv::InputArray _distCoeffs, cv::InputArray _M, float& err)
{
    cv::Mat projectedPoints;
    cv::Mat imagePoints = _imagePoints.getMat();
    cv::Mat r;
    rot2vec(_M.getMat().colRange(0, 3).rowRange(0, 3), r);

    if (_cameraMatrix.empty()) {
        //there is no camera matrix and image points are in normalized pixel coordinates
        cv::Mat K(3, 3, CV_64FC1);
        K.setTo(0);
        K.at<double>(0, 0) = 1;
        K.at<double>(1, 1) = 1;
        K.at<double>(2, 2) = 1;
        cv::Mat kc;
        cv::projectPoints(_objectPoints, r, _M.getMat().colRange(3, 4).rowRange(0, 3), K, kc, projectedPoints);
    }
    else {
        cv::projectPoints(_objectPoints, r, _M.getMat().colRange(3, 4).rowRange(0, 3), _cameraMatrix, _distCoeffs, projectedPoints);
    }

    err = 0;
    size_t n = _objectPoints.rows() * _objectPoints.cols();

    float dx, dy;
    for (size_t i = 0; i < n; i++) {
        if (projectedPoints.depth() == CV_32FC1) {
            dx = projectedPoints.at<Vec2f>(i)[0] - imagePoints.at<Vec2f>(i)[0];
            dy = projectedPoints.at<Vec2f>(i)[0] - imagePoints.at<Vec2f>(i)[0];
        }
        else {
            dx = projectedPoints.at<Vec2d>(i)[0] - imagePoints.at<Vec2d>(i)[0];
            dy = projectedPoints.at<Vec2d>(i)[0] - imagePoints.at<Vec2d>(i)[0];
        }

        err += dx * dx + dy * dy;
    }
    err = sqrt(err / (2.0f * n));
}
    // *
    FeatureValue FeatureAverageColor::calculate( cv::InputArray _image )
    {
        Mat image_gray;
        Mat shiImg( _image.rows(), _image.cols(), CV_32SC1, Scalar( 0 ) );

        cvtColor( _image, image_gray, CV_BGR2GRAY );

        FeatureShiCorner shi;
        shi.computeRawCornerMat( image_gray, shiImg );
        auto points = shi.genPoints( shiImg );


        if( points.empty() )
        {
            return {
                       0, 0, 0
            };
        }
        // ---------------------------------------------------------------------

        auto image = _image.getMat();

        auto ret = FeatureValue();
        uint64 r, g, b, n;

        const int radius = 15;

        r = g = b = 0;

        n = 0;

        const auto width  = _image.cols();
        const auto height = _image.rows();


        for( auto p : points )
        {
            int x = p.first, y = p.second;

            for( int dy = -radius; dy <= radius; dy++ )
            {
                for( int dx = -radius; dx <= radius; dx++ )
                {
                    int fx = x + dx;
                    if( (fx < 0) || (fx >= width) ) { continue; }
                    int fy = y + dy;
                    if( (fy < 0) || (fy >= height) ) { continue; }

                    Pixel ptr = image.at< Pixel >( fy, fx );
                    b += ptr.x;
                    g += ptr.y;
                    r += ptr.z;

                    n++;
                }
            }
        }

        ret.push_back( r / (n * 1.0) );
        ret.push_back( g / (n * 1.0) );
        ret.push_back( b / (n * 1.0) );

        return ret;
    }