예제 #1
0
void globalMatting(cv::InputArray _image, cv::InputArray _trimap, cv::OutputArray _foreground, cv::OutputArray _alpha, cv::OutputArray _conf)
{
    cv::Mat image = _image.getMat();
    cv::Mat trimap = _trimap.getMat();

    if (image.empty())
        CV_Error(CV_StsBadArg, "image is empty");
    if (image.type() != CV_8UC3)
        CV_Error(CV_StsBadArg, "image mush have CV_8UC3 type");

    if (trimap.empty())
        CV_Error(CV_StsBadArg, "trimap is empty");
    if (trimap.type() != CV_8UC1)
        CV_Error(CV_StsBadArg, "trimap mush have CV_8UC1 type");

    if (image.size() != trimap.size())
        CV_Error(CV_StsBadArg, "image and trimap mush have same size");

    cv::Mat &foreground = _foreground.getMatRef();
    cv::Mat &alpha = _alpha.getMatRef();
    cv::Mat tempConf;

    globalMattingHelper(image, trimap, foreground, alpha, tempConf);

    if(_conf.needed())
        tempConf.copyTo(_conf);
}
예제 #2
0
double
Camera::reprojectionError(const std::vector< std::vector<cv::Point3f> >& objectPoints,
                          const std::vector< std::vector<cv::Point2f> >& imagePoints,
                          const std::vector<cv::Mat>& rvecs,
                          const std::vector<cv::Mat>& tvecs,
                          cv::OutputArray _perViewErrors) const
{
    int imageCount = objectPoints.size();
    size_t pointsSoFar = 0;
    double totalErr = 0.0;

    bool computePerViewErrors = _perViewErrors.needed();
    cv::Mat perViewErrors;
    if (computePerViewErrors)
    {
        _perViewErrors.create(imageCount, 1, CV_64F);
        perViewErrors = _perViewErrors.getMat();
    }

    for (int i = 0; i < imageCount; ++i)
    {
        size_t pointCount = imagePoints.at(i).size();

        pointsSoFar += pointCount;

        std::vector<cv::Point2f> estImagePoints;
        projectPoints(objectPoints.at(i), rvecs.at(i), tvecs.at(i),
                      estImagePoints);

        double err = 0.0;
        for (size_t j = 0; j < imagePoints.at(i).size(); ++j)
        {
            err += cv::norm(imagePoints.at(i).at(j) - estImagePoints.at(j));
        }

        if (computePerViewErrors)
        {
            perViewErrors.at<double>(i) = err / pointCount;
        }

        totalErr += err;
    }

    return totalErr / pointsSoFar;
}
예제 #3
0
arma::fvec
HsvExtractor::extract(const cv::Mat& image, bool normalize, cv::OutputArray& qimage) const {
    cv::Mat hsv = toHsv(image);
    cv::Mat quantized = quantize(hsv, hsvlevels);

    if (medfilt) {
        quantized = medfilt2(quantized);
    }

    BOOST_ASSERT(quantized.size() == image.size());

    if (qimage.needed()) {
        cv::Mat render = toBgr(quantized, hsvlevels);
        render.copyTo(qimage);
    }

    return computeHistogram<float>(quantized, levels, normalize);
}
void findPose1pt(cv::InputArray _points1, cv::InputArray _points2, 
              double focal, cv::Point2d pp, 
              cv::OutputArray _rvec, cv::OutputArray _tvec, 
              int method, double prob, double threshold, cv::OutputArray _mask)
{
    cv::Mat points1, points2;
	_points1.getMat().copyTo(points1); 
	_points2.getMat().copyTo(points2); 

	int npoints = points1.checkVector(2);
    CV_Assert( npoints >= 1 && points2.checkVector(2) == npoints &&
				              points1.type() == points2.type());

	if (points1.channels() > 1)
	{
		points1 = points1.reshape(1, npoints); 
		points2 = points2.reshape(1, npoints); 
	}
	points1.convertTo(points1, CV_64F); 
	points2.convertTo(points2, CV_64F); 

	points1.col(0) = (points1.col(0) - pp.x) / focal; 
	points2.col(0) = (points2.col(0) - pp.x) / focal; 
	points1.col(1) = (points1.col(1) - pp.y) / focal; 
	points2.col(1) = (points2.col(1) - pp.y) / focal; 
	
	// Reshape data to fit opencv ransac function
	points1 = points1.reshape(2, 1); 
	points2 = points2.reshape(2, 1); 

    CvOnePointEstimator estimator; 
    cv::Mat theta(1, 1, CV_64F);

	CvMat p1 = points1; 
	CvMat p2 = points2; 
	CvMat _theta = theta; 
	CvMat* tempMask = cvCreateMat(1, npoints, CV_8U); 
	
	assert(npoints >= 1); 
	threshold /= focal; 
    if (method == CV_RANSAC)
	{
		estimator.runRANSAC(&p1, &p2, &_theta, tempMask, threshold, prob); 
	}
	else
	{
		estimator.runLMeDS(&p1, &p2, &_theta, tempMask, prob); 
	}

    if (_mask.needed())
    {
    	_mask.create(1, npoints, CV_8U, -1, true); 
        cv::Mat mask = _mask.getMat();
        cv::Mat(tempMask).copyTo(mask);
    }


    _rvec.create(3, 2, CV_64F, -1, true); 
    _tvec.create(3, 2, CV_64F, -1, true); 

    double t = theta.at<double>(0); 
    cv::Mat rvec = (cv::Mat_<double>(3, 2) << 0, 0,
                                     -t, -t, 
                                      0, 0); 
    cv::Mat tvec = (cv::Mat_<double>(3, 2) << -sin(t / 2.0), sin(t / 2.0),
                                                  0, 0, 
                                       cos(t / 2.0), -cos(t / 2.0)); 
    
    rvec.copyTo(_rvec.getMat()); 
    tvec.copyTo(_tvec.getMat()); 

}