Example #1
0
void BundlerMatcher::open(const std::string& inputPath, const std::string& inputFilename, const std::string& outMatchFilename)
{
    mInputPath = inputPath;

    if (!mIsInitialized)
    {
        std::cout << "Error : can not initialize opengl context for SiftGPU" <<std::endl;
        return;
    }

    if (!parseListFile(inputFilename))
    {
        std::cout << "Error : can not open file : " <<inputFilename.c_str() <<std::endl;
        return;
    }

    //Sift Feature Extraction
    for (unsigned int i=0; i<mFilenames.size(); ++i)
    {
        int percent = (int)(((i+1)*100.0f) / (1.0f*mFilenames.size()));
        int nbFeature = extractSiftFeature(i);
        clearScreen();
        std::cout << "[Extracting Sift Feature : " << percent << "%] - ("<<i+1<<"/"<<mFilenames.size()<<", #"<< nbFeature <<" features)";
    }
    clearScreen();
    std::cout << "[Sift Feature extracted]"<<std::endl;

    for (unsigned int i=0; i<mFilenames.size(); ++i)
    {
        int percent = (int)(((i+1)*100.0f) / (1.0f*mFilenames.size()));
        saveAsciiKeyFile(i);
        if (mBinaryKeyFileWritingEnabled)
            saveBinaryKeyFile(i);
        clearScreen();
        std::cout << "[Saving Sift Key files: " << percent << "%] - ("<<i+1<<"/"<<mFilenames.size()<<")";
    }
    saveVector();
    clearScreen();
    std::cout << "[Sift Key files saved]"<<std::endl;

    delete mSift;
    mSift = NULL;

    mMatcher->VerifyContextGL();

    //Sift Matching
    int currentIteration = 0;

    if (mSequenceMatchingEnabled) //sequence matching (video input)
    {
        std::cout << "[Sequence matching enabled: length " << mSequenceMatchingLength << "]" << std::endl;
        int maxIterations = (int) (mFilenames.size()-mSequenceMatchingLength)*mSequenceMatchingLength + mSequenceMatchingLength*(mSequenceMatchingLength-1)/2; // (N-m).m + m(m-1)/2
        for (unsigned int i=0; i<mFilenames.size()-1; ++i)
        {
            for (int j=1; j<=mSequenceMatchingLength; ++j)
            {
                int indexA = i;
                int indexB = i+j;

                if (indexB >= mFilenames.size())
                    continue;
                else
                {
                    clearScreen();
                    int percent = (int) (currentIteration*100.0f / maxIterations*1.0f);
                    std::cout << "[Matching Sift Feature : " << percent << "%] - (" << indexA << "/" << indexB << ")";
                    matchSiftFeature(indexA, indexB);
                    currentIteration++;
                }
            }
        }
    }
    else //classic quadratic matching
    {
        int maxIterations = (int) mFilenames.size()*((int) mFilenames.size()-1)/2; // Sum(1 -> n) = n(n-1)/2
        for (unsigned int i=0; i<mFilenames.size(); ++i)
        {
            for (unsigned int j=i+1; j<mFilenames.size(); ++j)
            {
                clearScreen();
                int percent = (int) (currentIteration*100.0f / maxIterations*1.0f);
                std::cout << "[Matching Sift Feature : " << percent << "%] - (" << i << "/" << j << ")";
                matchSiftFeature(i, j);
                currentIteration++;
            }
        }
    }

    clearScreen();
    std::cout << "[Sift Feature matched]"<<std::endl;

    delete mMatcher;
    mMatcher = NULL;

    saveMatches(outMatchFilename);
    saveMatrix();
}
    /*
     * The visualization code is from : Rob Hess <*****@*****.**>
     */
    void SIFT_ADAPTER::visualizeSiftFeature(Mat* sift_img)
    {
        check_image();

        if (!m_has_extracted)
            extractSiftFeature();

        int m_img_width = m_org_img.cols;
        int m_img_height = m_org_img.rows;

        if (m_sift_img.data)
            m_sift_img.release();
        m_sift_img.create(m_img_height, m_img_width, CV_8UC3);

        for (int y = 0; y < m_img_height; ++y)
        {
            cv::Vec3b* p_line = m_sift_img.ptr<cv::Vec3b>(y);
            const float* p_gray_data = m_gray_data + y * m_img_width;

            for (int x = 0; x < m_img_width; ++x)
            {
                for (int i = 0; i < 3; ++i)
                    p_line[x].val[i] = p_gray_data[x];
            }
        }

        const double scale = 5.0;
        const double hscale = 0.75;
        const cv::Scalar color = cv::Scalar(255, 0, 255);

        for (int i = 0; i < m_num_frames; ++i)
        {
            int len, hlen, blen, start_x, start_y, end_x, end_y, h1_x, h1_y,
                    h2_x, h2_y;
            double scl, ori;

            Point start, end, h1, h2;

            const SIFT_Frame& cur_frame = m_frames[i];

            start_x = cur_frame.x;
            start_y = cur_frame.y;
            scl = cur_frame.scale;
            ori = cur_frame.angle;

            len = cvRound(scl * scale);
            hlen = cvRound(scl * hscale);
            blen = len - hlen;
            end_x = cvRound(len * cos(ori)) + start_x;
            end_y = cvRound(len * -sin(ori)) + start_y;
            h1_x = cvRound(blen * cos(ori + CV_PI / 18.0)) + start_x;
            h1_y = cvRound(blen * -sin(ori + CV_PI / 18.0)) + start_y;
            h2_x = cvRound(blen * cos(ori - CV_PI / 18.0)) + start_x;
            h2_y = cvRound(blen * -sin(ori - CV_PI / 18.0)) + start_y;
            start = cvPoint(start_x, start_y);
            end = cvPoint(end_x, end_y);
            h1 = cvPoint(h1_x, h1_y);
            h2 = cvPoint(h2_x, h2_y);

            cv::line(m_sift_img, start, end, color, 1, 8, 0);
            cv::line(m_sift_img, end, h1, color, 1, 8, 0);
            cv::line(m_sift_img, end, h2, color, 1, 8, 0);
        }

        if (sift_img)
            m_sift_img.copyTo(*sift_img);
    }