Exemple #1
0
void FuncTest::in1dTest()
{
    int arrA[] = {1, 2, 3, 4, 5};
    int arrB[3][4] = {{1, 1, 3, 2},{5, 11, 12, 12},{10, 11, 12, 13}};

    bool binArr[3][5] = {{true, true, true, false, false},
                     {false, false, false, false, true},
                     {false, false, false, false, false}};
    for(int i = 0; i < 3; i++) {
        std::vector<int> a(arrA, arrA + sizeof(arrA) / sizeof(int));
        std::vector<int> b(arrB[i], arrB[i] + sizeof(arrB[i]) / sizeof(int));
        std::vector<bool> res0(binArr[i], binArr[i] + sizeof(binArr[i]) / sizeof(bool));
        std::vector<bool> res = in1d(a, b);
        QCOMPARE(res0, res);
    }

}
Exemple #2
0
void CMT::processFrame(cv::Mat im_gray)
{
    trackedKeypoints = std::vector<std::pair<cv::KeyPoint, int> >();
    std::vector<unsigned char> status;
    track(im_prev, im_gray, activeKeypoints, trackedKeypoints, status);

    cv::Point2f center;
    float scaleEstimate;
    float rotationEstimate;
    std::vector<std::pair<cv::KeyPoint, int> > trackedKeypoints2;
    estimate(trackedKeypoints, center, scaleEstimate, rotationEstimate, trackedKeypoints2);
    trackedKeypoints = trackedKeypoints2;

    //Detect keypoints, compute descriptors
    std::vector<cv::KeyPoint> keypoints;
    cv::Mat features;
    detector->detect(im_gray, keypoints);
    descriptorExtractor->compute(im_gray, keypoints, features);

    //Create list of active keypoints
    activeKeypoints = std::vector<std::pair<cv::KeyPoint, int> >();

    //For each keypoint and its descriptor
    for(int i = 0; i < keypoints.size(); i++)
    {
        cv::KeyPoint keypoint = keypoints[i];

        //First: Match over whole image
        //Compute distances to all descriptors
        std::vector<cv::DMatch> matches;
        descriptorMatcher->match(featuresDatabase,features.row(i), matches);

        //Convert distances to confidences, do not weight
        std::vector<float> combined;
        for(int i = 0; i < matches.size(); i++)
            combined.push_back(1 - matches[i].distance / descriptorLength);

        std::vector<int>& classes = classesDatabase;

        //Sort in descending order
        std::vector<PairFloat> sorted_conf;
        for(int i = 0; i < combined.size(); i++)
            sorted_conf.push_back(std::make_pair(combined[i], i));
        std::sort(&sorted_conf[0], &sorted_conf[0]+sorted_conf.size(), comparatorPairDesc<float>);

        //Get best and second best index
        int bestInd = sorted_conf[0].second;
        int secondBestInd = sorted_conf[1].second;

        //Compute distance ratio according to Lowe
        float ratio = (1-combined[bestInd]) / (1-combined[secondBestInd]);

        //Extract class of best match
        int keypoint_class = classes[bestInd];

        //If distance ratio is ok and absolute distance is ok and keypoint class is not background
        if(ratio < thrRatio && combined[bestInd] > thrConf && keypoint_class != 0)
            activeKeypoints.push_back(std::make_pair(keypoint, keypoint_class));

        //In a second step, try to match difficult keypoints
        //If structural constraints are applicable
        if(!(isnan(center.x) | isnan(center.y)))
        {
            //Compute distances to initial descriptors
            std::vector<cv::DMatch> matches;
            descriptorMatcher->match(selectedFeatures, features.row(i), matches);

            //Convert distances to confidences
            std::vector<float> confidences;
            for(int i = 0; i < matches.size(); i++)
                confidences.push_back(1 - matches[i].distance / descriptorLength);

            //Compute the keypoint location relative to the object center
            cv::Point2f relative_location = keypoint.pt - center;

            //Compute the distances to all springs
            std::vector<float> displacements;
            for(int i = 0; i < springs.size(); i++)
            {
                cv::Point2f p = (scaleEstimate * rotate(springs[i], -rotationEstimate) - relative_location);
                displacements.push_back(sqrt(p.dot(p)));
            }

            //For each spring, calculate weight
            std::vector<float> combined;
            for(int i = 0; i < confidences.size(); i++)
                combined.push_back((displacements[i] < thrOutlier)*confidences[i]);

            std::vector<int>& classes = selectedClasses;

            //Sort in descending order
            std::vector<PairFloat> sorted_conf;
            for(int i = 0; i < combined.size(); i++)
                sorted_conf.push_back(std::make_pair(combined[i], i));
            std::sort(&sorted_conf[0], &sorted_conf[0]+sorted_conf.size(), comparatorPairDesc<float>);

            //Get best and second best index
            int bestInd = sorted_conf[0].second;
            int secondBestInd = sorted_conf[1].second;

            //Compute distance ratio according to Lowe
            float ratio = (1-combined[bestInd]) / (1-combined[secondBestInd]);

            //Extract class of best match
            int keypoint_class = classes[bestInd];

            //If distance ratio is ok and absolute distance is ok and keypoint class is not background
            if(ratio < thrRatio && combined[bestInd] > thrConf && keypoint_class != 0)
            {
                for(int i = activeKeypoints.size()-1; i >= 0; i--)
                    if(activeKeypoints[i].second == keypoint_class)
                        activeKeypoints.erase(activeKeypoints.begin()+i);
                activeKeypoints.push_back(std::make_pair(keypoint, keypoint_class));
            }
        }
    }

    //If some keypoints have been tracked
    if(trackedKeypoints.size() > 0)
    {
        //Extract the keypoint classes
        std::vector<int> tracked_classes;
        for(int i = 0; i < trackedKeypoints.size(); i++)
            tracked_classes.push_back(trackedKeypoints[i].second);
        //If there already are some active keypoints
        if(activeKeypoints.size() > 0)
        {
            //Add all tracked keypoints that have not been matched
            std::vector<int> associated_classes;
            for(int i = 0; i < activeKeypoints.size(); i++)
                associated_classes.push_back(activeKeypoints[i].second);
            std::vector<bool> notmissing = in1d(tracked_classes, associated_classes);
            for(int i = 0; i < trackedKeypoints.size(); i++)
                if(!notmissing[i])
                    activeKeypoints.push_back(trackedKeypoints[i]);
        }
        else activeKeypoints = trackedKeypoints;
    }

    //Update object state estimate
    std::vector<std::pair<cv::KeyPoint, int> > activeKeypointsBefore = activeKeypoints;
    im_prev = im_gray;
    topLeft = cv::Point2f(NAN,NAN);
    topRight = cv::Point2f(NAN,NAN);
    bottomLeft = cv::Point2f(NAN,NAN);
    bottomRight = cv::Point2f(NAN,NAN);

    boundingbox = cv::Rect_<float>(NAN,NAN,NAN,NAN);
    hasResult = false;
    if(!(isnan(center.x) | isnan(center.y)) && activeKeypoints.size() > nbInitialKeypoints / 10)
    {
        hasResult = true;

        topLeft = center + scaleEstimate*rotate(centerToTopLeft, rotationEstimate);
        topRight = center + scaleEstimate*rotate(centerToTopRight, rotationEstimate);
        bottomLeft = center + scaleEstimate*rotate(centerToBottomLeft, rotationEstimate);
        bottomRight = center + scaleEstimate*rotate(centerToBottomRight, rotationEstimate);

        float minx = std::min(std::min(topLeft.x,topRight.x),std::min(bottomRight.x, bottomLeft.x));
        float miny = std::min(std::min(topLeft.y,topRight.y),std::min(bottomRight.y, bottomLeft.y));
        float maxx = std::max(std::max(topLeft.x,topRight.x),std::max(bottomRight.x, bottomLeft.x));
        float maxy = std::max(std::max(topLeft.y,topRight.y),std::max(bottomRight.y, bottomLeft.y));

        boundingbox = cv::Rect_<float>(minx, miny, maxx-minx, maxy-miny);
    }
}