Example #1
0
TEST_P(UMatBasicTests, GetUMat)
{
    if(useRoi)
    {
        a = Mat(a, roi);
        ua = UMat(ua,roi);
    }
    {
        UMat ub;
        ub = a.getUMat(ACCESS_RW);
        EXPECT_MAT_NEAR(ub, ua, 0);
    }
    {
        UMat u = a.getUMat(ACCESS_RW);
        {
            Mat b = u.getMat(ACCESS_RW);
            EXPECT_MAT_NEAR(b, a, 0);
        }
    }
    {
        Mat b;
        b = ua.getMat(ACCESS_RW);
        EXPECT_MAT_NEAR(b, a, 0);
    }
    {
        Mat m = ua.getMat(ACCESS_RW);
        {
            UMat ub = m.getUMat(ACCESS_RW);
            EXPECT_MAT_NEAR(ub, ua, 0);
        }
    }
}
Example #2
0
TEST(Core_OutputArrayAssign, _Matxd_UMatd)
{
    Mat expected = (Mat_<double>(2,3) << 1, 2, 3, .1, .2, .3);
    UMat uexpected = expected.getUMat(ACCESS_READ);
    Matx23d actualx;

    {
        OutputArray oa(actualx);
        oa.assign(uexpected);
    }

    Mat actual = (Mat) actualx;

    EXPECT_LE(maxAbsDiff(expected, actual), 0.0);
}
Example #3
0
int main(int argc, const char* argv[]) {
//ocl::setUseOpenCL(0);
Mat image = imread("img/h4.jpg",0);
CascadeClassifier faceCascade("haarcascade_frontalface_alt.xml");
vector<Rect> facesM, facesU;

faceCascade.detectMultiScale(image, facesM);
cout << "Mat" << endl << Mat(facesM) << endl << endl;

UMat imageUMat = image.getUMat(cv::ACCESS_READ);
faceCascade.detectMultiScale(imageUMat, facesU);
cout << "UMat" << endl << Mat(facesU) << endl << endl;
cout << cv::getBuildInformation();
  return 0;
}
Example #4
0
UMat Mat::getUMat(int accessFlags, UMatUsageFlags usageFlags) const
{
    UMat hdr;
    if(!data)
        return hdr;
    Size wholeSize;
    Point ofs;
    locateROI(wholeSize, ofs);
    Size sz(cols, rows);
    if (ofs.x != 0 || ofs.y != 0)
    {
        Mat src = *this;
        int dtop = ofs.y;
        int dbottom = wholeSize.height - src.rows - ofs.y;
        int dleft = ofs.x;
        int dright = wholeSize.width - src.cols - ofs.x;
        src.adjustROI(dtop, dbottom, dleft, dright);
        return src.getUMat(accessFlags, usageFlags)(cv::Rect(ofs.x, ofs.y, sz.width, sz.height));
    }
    CV_Assert(data =
Example #5
0
int main(int argc, char *argv[]) {
    VideoCapture v(0);
    Mat frame;
    UMat sqr;
    Target target;
    OCRTess tess;
    bool found = false;
    int result = STOP;


    if (MODE == TARGET_TEXT) {
        tess = OCRTess(true, REG_MSER, GR_EX);
        tess.init(10);
    }

    if (!v.isOpened()) {
        printf("ERR: camera is not opened !!\n\n");
        return 0;
    }

    while (true) {
        result = STOP;
        v.read(frame);
        resize(frame, frame, Size(frame.cols / 2, frame.rows / 2));
        target.init(frame.getUMat(ACCESS_READ));
        if (target.find_square(&sqr)) {
            if (MODE == TARGET_TEXT) {
                tess.set(sqr);
                found = tess.loop();
                tess.show(found);
            } else {
                found = target.is_inside(sqr);
            }
            result = target.found(found);
        }
        target.serial(result);
//        target.show();
        if ((char) waitKey(1) == 27) break;
    }
    return 0;
}
Example #6
0
TEST(Core_UMat, getUMat)
{
    {
        int a[3] = { 1, 2, 3 };
        Mat m = Mat(1, 1, CV_32SC3, a);
        UMat u = m.getUMat(ACCESS_READ);
        EXPECT_NE((void*)NULL, u.u);
    }

    {
        Mat m(10, 10, CV_8UC1), ref;
        for (int y = 0; y < m.rows; ++y)
        {
            uchar * const ptr = m.ptr<uchar>(y);
            for (int x = 0; x < m.cols; ++x)
                ptr[x] = (uchar)(x + y * 2);
        }

        ref = m.clone();
        Rect r(1, 1, 8, 8);
        ref(r).setTo(17);

        {
            UMat um = m(r).getUMat(ACCESS_WRITE);
            um.setTo(17);
        }

        double err = norm(m, ref, NORM_INF);
        if (err > 0)
        {
            std::cout << "m: " << std::endl << m << std::endl;
            std::cout << "ref: " << std::endl << ref << std::endl;
        }
        EXPECT_EQ(0., err);
    }
}
Example #7
0
bool OCRTess::detectAndRecog() {
    UMat grey = UMat::zeros(this->img.rows + 2, this->img.cols + 2, CV_8UC1);
    cvtColor(this->img.clone(), grey, COLOR_RGB2GRAY);

    vector<UMat> channels;
    channels.clear();
    channels.push_back(grey);
    Mat m = 255 - grey.getMat(ACCESS_READ | ACCESS_WRITE);
    channels.push_back(m.getUMat(ACCESS_READ));

    vector<vector<ERStat>> regions(2);
    regions[0].clear();
    regions[1].clear();

    switch (this->REGION) {
        case REG_CSER: {
            parallel_for_(Range(0, (int) channels.size()), Parallel_extractCSER(channels, regions, this->erf1, this->erf2));
            break;
        }
        case REG_MSER: {
            vector<vector<Point> > contours;
            vector<Rect> bboxes;
            Ptr<MSER> mser = MSER::create(21, (int) (0.00002 * grey.cols * grey.rows), (int) (0.05 * grey.cols * grey.rows), 1, 0.7);
            mser->detectRegions(grey, contours, bboxes);
            if (contours.size() > 0)
                MSERsToERStats(grey, contours, regions);
            break;
        }
        default: {
            break;
        }
    }

    /*Text Recognition (OCR)*/
    vector<vector<Vec2i> > nm_region_groups;
    vector<Rect> nm_boxes;
    switch (this->GROUP) {
        case 0:
            erGrouping(this->img, channels, regions, nm_region_groups, nm_boxes, ERGROUPING_ORIENTATION_HORIZ);
            break;
        case 1:
        default:
            erGrouping(this->img, channels, regions, nm_region_groups, nm_boxes, ERGROUPING_ORIENTATION_ANY, DIR + TR_GRP, 0.5);
            break;
    }

    if (!nm_boxes.size() || nm_boxes.size() > 1) return false;

    vector<string> words_detection;
    float min_confidence1 = 51.f, min_confidence2 = 60.f;

    vector<UMat> detections;
    for (int i = 0; i < (int) nm_boxes.size(); i++) {
//        rectangle(this->out, nm_boxes[i].tl(), nm_boxes[i].br(), Scalar(255, 255, 0), 3);
        UMat group_img = UMat::zeros(this->img.rows + 2, this->img.cols + 2, CV_8UC1);
        er_draw(channels, regions, nm_region_groups[i], group_img);
        group_img = group_img(nm_boxes[i]);
        copyMakeBorder(group_img.clone(), group_img, 15, 15, 15, 15, BORDER_CONSTANT, Scalar(0));
        detections.push_back(group_img);
    }
    vector<string> outputs((int) detections.size());
    vector<vector<Rect> > boxes((int) detections.size());
    vector<vector<string> > words((int) detections.size());
    vector<vector<float> > confidences((int) detections.size());

    if (!detections.size() || detections.size() > 1) return false;

    for (int i = 0; i < (int) detections.size(); i = i + this->num) {
        Range r;
        if (i + this->num <= (int) detections.size()) r = Range(i, i + this->num);
        else r = Range(i, (int) detections.size());
        parallel_for_(r, Parallel_OCR<OCRTesseract>(detections, outputs, boxes, words, confidences, this->ocrs));
    }

    for (int i = 0; i < (int) detections.size(); i++) {
        outputs[i].erase(remove(outputs[i].begin(), outputs[i].end(), '\n'), outputs[i].end());
        if (outputs[i].size() < 3) {
            continue;
        }
        for (int j = 0; j < (int) boxes[i].size(); j++) {
            boxes[i][j].x += nm_boxes[i].x - 15;
            boxes[i][j].y += nm_boxes[i].y - 15;
            if ((words[i][j].size() < 2) || (confidences[i][j] < min_confidence1) ||
                ((words[i][j].size() == 2) && (words[i][j][0] == words[i][j][1])) ||
                ((words[i][j].size() < 4) && (confidences[i][j] < min_confidence2)) ||
                isRepetitive(words[i][j]))
                continue;
            words_detection.push_back(words[i][j]);
//            rectangle(this->out, boxes[i][j].tl(), boxes[i][j].br(), Scalar(255, 0, 255), 3);
//            Size word_size = getTextSize(words[i][j], FONT_HERSHEY_SIMPLEX, (double) scale_font, (int) (3 * scale_font), NULL);
//            rectangle(this->out, boxes[i][j].tl() - Point(3, word_size.height + 3), boxes[i][j].tl() + Point(word_size.width, 0), Scalar(255, 0, 255), -1);
//            putText(this->out, words[i][j], boxes[i][j].tl() - Point(1, 1), FONT_HERSHEY_SIMPLEX, scale_font, Scalar(255, 255, 255), (int) (3 * scale_font));
        }
    }

    if (!words_detection.size() || words_detection.size() > 1) return false;
    return (words_detection[0].compare(WORD) == 0);
}
Example #8
0
/**
 * Main entry called from Matlab
 * @param nlhs number of left-hand-side arguments
 * @param plhs pointers to mxArrays in the left-hand-side
 * @param nrhs number of right-hand-side arguments
 * @param prhs pointers to mxArrays in the right-hand-side
 */
void mexFunction(int nlhs, mxArray *plhs[], int nrhs, const mxArray *prhs[])
{
    // Check the number of arguments
    nargchk(nrhs>=2 && nlhs<=1);

    // Argument vector
    vector<MxArray> rhs(prhs, prhs+nrhs);
    int id = rhs[0].toInt();
    string method(rhs[1].toString());

    // Constructor is called. Create a new object from argument
    if (method == "new") {
        nargchk(nrhs>=3 && nlhs<=1);
        obj_[++last_id] = createFeaturesMatcher(
            rhs[2].toString(), rhs.begin() + 3, rhs.end());
        plhs[0] = MxArray(last_id);
        return;
    }
    // static methods
    else if (method == "matchesGraphAsString") {
        nargchk(nrhs==4 && nlhs<=1);
        vector<MatchesInfo> pairwise_matches(MxArrayToVectorMatchesInfo(rhs[2]));
        float conf_threshold = rhs[3].toFloat();
        vector<String> pathes;
        pathes.reserve(pairwise_matches.size());
        for (int i=0; i<pairwise_matches.size(); ++i) {
            ostringstream ss;
            ss << "img" << (i+1);
            pathes.push_back(ss.str());
        }
        string str = matchesGraphAsString(
            pathes, pairwise_matches, conf_threshold);
        plhs[0] = MxArray(str);
        return;
    }
    else if (method == "leaveBiggestComponent") {
        nargchk(nrhs==5 && nlhs<=1);
        vector<ImageFeatures> features(MxArrayToVectorImageFeatures(rhs[2]));
        vector<MatchesInfo> pairwise_matches(MxArrayToVectorMatchesInfo(rhs[3]));
        float conf_threshold = rhs[4].toFloat();
        vector<int> indices = leaveBiggestComponent(
            features, pairwise_matches, conf_threshold);
        plhs[0] = MxArray(indices);
        return;
    }

    // Big operation switch
    Ptr<FeaturesMatcher> obj = obj_[id];
    if (method == "delete") {
        nargchk(nrhs==2 && nlhs==0);
        obj_.erase(id);
    }
    else if (method == "typeid") {
        nargchk(nrhs==2 && nlhs<=1);
        plhs[0] = MxArray(string(typeid(*obj).name()));
    }
    else if (method == "collectGarbage") {
        nargchk(nrhs==2 && nlhs==0);
        obj->collectGarbage();
    }
    else if (method == "isThreadSafe") {
        nargchk(nrhs==2 && nlhs<=1);
        bool tf = obj->isThreadSafe();
        plhs[0] = MxArray(tf);
    }
    else if (method == "match") {
        nargchk(nrhs==4 && nlhs<=1);
        ImageFeatures features1(MxArrayToImageFeatures(rhs[2])),
                      features2(MxArrayToImageFeatures(rhs[3]));
        MatchesInfo matches_info;
        obj->operator()(features1, features2, matches_info);
        plhs[0] = toStruct(matches_info);
    }
    else if (method == "match_pairwise") {
        nargchk(nrhs>=3 && (nrhs%2)==1 && nlhs<=1);
        Mat mask;
        for (int i=3; i<nrhs; i+=2) {
            string key(rhs[i].toString());
            if (key == "Mask")
                mask = rhs[i+1].toMat(CV_8U);
            else
                mexErrMsgIdAndTxt("mexopencv:error","Unrecognized option");
        }
        vector<ImageFeatures> features(MxArrayToVectorImageFeatures(rhs[2]));
        vector<MatchesInfo> pairwise_matches;
        obj->operator()(features, pairwise_matches, mask.getUMat(ACCESS_READ));
        plhs[0] = toStruct(pairwise_matches);
    }
    else
        mexErrMsgIdAndTxt("mexopencv:error",
            "Unrecognized operation %s", method.c_str());
}
Example #9
0
UMat Mat::getUMat(int accessFlags, UMatUsageFlags usageFlags) const
{
    UMat hdr;
    if(!data)
        return hdr;
    if (data != datastart)
    {
        Size wholeSize;
        Point ofs;
        locateROI(wholeSize, ofs);
        Size sz(cols, rows);
        if (ofs.x != 0 || ofs.y != 0)
        {
            Mat src = *this;
            int dtop = ofs.y;
            int dbottom = wholeSize.height - src.rows - ofs.y;
            int dleft = ofs.x;
            int dright = wholeSize.width - src.cols - ofs.x;
            src.adjustROI(dtop, dbottom, dleft, dright);
            return src.getUMat(accessFlags, usageFlags)(cv::Rect(ofs.x, ofs.y, sz.width, sz.height));
        }
    }
    CV_Assert(data == datastart);

    accessFlags |= ACCESS_RW;
    UMatData* new_u = NULL;
    {
        MatAllocator *a = allocator, *a0 = getDefaultAllocator();
        if(!a)
            a = a0;
        new_u = a->allocate(dims, size.p, type(), data, step.p, accessFlags, usageFlags);
    }
    bool allocated = false;
    try
    {
        allocated = UMat::getStdAllocator()->allocate(new_u, accessFlags, usageFlags);
    }
    catch (const cv::Exception& e)
    {
        fprintf(stderr, "Exception: %s\n", e.what());
    }
    if (!allocated)
    {
        allocated = getDefaultAllocator()->allocate(new_u, accessFlags, usageFlags);
        CV_Assert(allocated);
    }
    if (u != NULL)
    {
#ifdef HAVE_OPENCL
        if (ocl::useOpenCL() && new_u->currAllocator == ocl::getOpenCLAllocator())
        {
            CV_Assert(new_u->tempUMat());
        }
#endif
        new_u->originalUMatData = u;
        CV_XADD(&(u->refcount), 1);
        CV_XADD(&(u->urefcount), 1);
    }
    hdr.flags = flags;
    setSize(hdr, dims, size.p, step.p);
    finalizeHdr(hdr);
    hdr.u = new_u;
    hdr.offset = 0; //data - datastart;
    hdr.addref();
    return hdr;
}