Example #1
0
int main() {

    IplImage* lena = cvLoadImage("lena.tif", 0);
    Image img_sift = CreateImage(lena->height, lena->width);
    for(int r = 0; r < lena->height; ++r)
    {
        for(int c = 0; c < lena->width; ++c)
        {

            CvScalar s = cvGet2D(lena,c,r);
            img_sift->pixels[r*img_sift->stride+c] =
                (s.val[0] + s.val[1] + s.val[2]) / (3.*255);
        }
    }
    Keypoint keypts = GetKeypoints(img_sift);
    Keypoint key = keypts;
    while(key)
    {
        cvRectangle(lena,
                    cvPoint(key->row-1 ,key->col-1),
                    cvPoint(key->row+1 ,key->col+1),
                    cvScalar(255,0,0), 1);
        key = key->next;
    }

    FreeKeypoints(keypts);
    DestroyAllResources();
    cvNamedWindow("lena", 1);
    cvMoveWindow("lena", 800, 200);

    cvShowImage("lena", lena);

    cvWaitKey(0);

    cvDestroyAllWindows();
    cvReleaseImage(&lena);


}
Example #2
0
int main( int argc, char** argv )
{
    if (argc == 1)
    {
        std::cerr << "Usage: ./sift_detect -i image.pgm -o image.key "
                  << "[-t warp.xfm -i2 warped.pgm -o2 warped.key] [options]\n"
                  << "  number of points to take: -p 800\n"
                  << "  scale upper bound 1:      -ub1 6\n"
                  << "  scale lower bound 1:      -lb1 2\n"
                  << "  scale upper bound 2:      -ub2 9\n"
                  << "  scale lower bound 2:      -lb2 2.8\n";
        return 0;
    }

    char *image_file = NULL, *warped_file = NULL, *transform_file = NULL;
    int num_points = 800;
    std::string key_file = "source.key", warped_key_file = "warped.key";
    float scale_ub1 = -1, scale_lb1 = -1, scale_ub2 = -1, scale_lb2 = -1;

    int arg = 0;
    while (++arg < argc)
    {
        if (! strcmp(argv[arg], "-i"))
            image_file = argv[++arg];
        if (! strcmp(argv[arg], "-i2"))
            warped_file = argv[++arg];
        if (! strcmp(argv[arg], "-t"))
            transform_file = argv[++arg];
        if (! strcmp(argv[arg], "-o"))
            key_file = argv[++arg];
        if (! strcmp(argv[arg], "-o2"))
            warped_key_file = argv[++arg];
        if (! strcmp(argv[arg], "-p"))
            num_points = atoi(argv[++arg]);
        if (! strcmp(argv[arg], "-ub1"))
            scale_ub1 = atof(argv[++arg]);
        if (! strcmp(argv[arg], "-lb1"))
            scale_lb1 = atof(argv[++arg]);
        if (! strcmp(argv[arg], "-ub2"))
            scale_ub2 = atof(argv[++arg]);
        if (! strcmp(argv[arg], "-lb2"))
            scale_lb2 = atof(argv[++arg]);
    }

    assert(image_file);
    assert(!warped_file || transform_file);

    // Load the source image
    Image im = ReadPGMFile(image_file);
    int W = im->cols;
    int H = im->rows;

    // Find keypoints in source image
    LoweKeypoint lowe_keypts = NULL;
    {
        Timer t("SIFT detector");
        lowe_keypts = GetKeypoints(im);
    }
    std::vector<KeypointFl> keypts;
    for (LoweKeypoint pt = lowe_keypts; pt != NULL; pt = pt->next)
    {
        keypts.push_back(KeypointFl(pt->col, pt->row, pt->scale, pt->strength));
    }
    if (scale_ub1 > 0)
        keypts.erase(std::remove_if(keypts.begin(), keypts.end(),
                                    ScaleUpperBound(scale_ub1)),
                     keypts.end());
    if (scale_lb1 > 0)
        keypts.erase(std::remove_if(keypts.begin(), keypts.end(),
                                    ScaleLowerBound(scale_lb1)),
                     keypts.end());

    Image warped = NULL;
    CvMat *transform = NULL, *inv = NULL;
    if (warped_file)
    {
        warped = ReadPGMFile(warped_file);

        // Get inverse transform (warped -> source)
        transform = cvCreateMat(3, 3, CV_32FC1);
        ReadTransform(transform_file, transform);
        inv = cvCreateMat(3, 3, CV_32FC1);
        cvInvert(transform, inv);

        keypts.erase(std::remove_if(keypts.begin(), keypts.end(),
                                    OutsideSource(warped->cols, warped->rows, transform)),
                     keypts.end());
    }
    if ((int)keypts.size() < num_points)
    {
        num_points = keypts.size();
        printf("WARNING: Only taking %d points!\n", num_points);
        std::sort(keypts.begin(), keypts.end());
    }
    else
    {
        std::partial_sort(keypts.begin(), keypts.begin()+ num_points, keypts.end());
    }

    if (warped_file)
    {
        // Find keypoints in warped image
        LoweKeypoint lowe_warp_keypts = NULL;
        {
            Timer t("SIFT detector (warped)");
            lowe_warp_keypts = GetKeypoints(warped);
        }
        std::vector<KeypointFl> warp_keypts;
        for (LoweKeypoint pt = lowe_warp_keypts; pt != NULL; pt = pt->next)
        {
            warp_keypts.push_back(KeypointFl(pt->col, pt->row, pt->scale, pt->strength));
        }
        if (scale_ub2 > 0)
            warp_keypts.erase(std::remove_if(warp_keypts.begin(), warp_keypts.end(),
                                             ScaleUpperBound(scale_ub2)),
                              warp_keypts.end());
        if (scale_lb2 > 0)
            warp_keypts.erase(std::remove_if(warp_keypts.begin(), warp_keypts.end(),
                                             ScaleLowerBound(scale_lb2)),
                              warp_keypts.end());
        warp_keypts.erase(std::remove_if(warp_keypts.begin(), warp_keypts.end(),
                                         OutsideSource(W, H, inv)),
                          warp_keypts.end());
        if ((int)warp_keypts.size() < num_points)
        {
            num_points = warp_keypts.size();
            printf("WARNING: Only taking %d points!\n", num_points);
            std::sort(warp_keypts.begin(), warp_keypts.end());
        }
        else
        {
            std::partial_sort(warp_keypts.begin(), warp_keypts.begin() + num_points,
                              warp_keypts.end());
            warp_keypts.erase(warp_keypts.begin() + num_points, warp_keypts.end());
        }

        WriteKeypointsFl(warped_key_file, warp_keypts);
    }

    keypts.erase(keypts.begin() + num_points, keypts.end());
    WriteKeypointsFl(key_file, keypts);

    cvReleaseMat(&inv);
    cvReleaseMat(&transform);
    delete warped;
    delete im;

    return 0;
}
Status KeyframeVisualOdometry::Update(const Image& image) {
  // Extract keypoints from the frame.
  std::vector<Keypoint> keypoints;
  Status status = GetKeypoints(image, &keypoints);
  if (!status.ok()) return status;

  // Extract features and descriptors from the keypoints.
  std::vector<Feature> features;
  std::vector<Descriptor> descriptors;
  status = GetDescriptors(image, &keypoints, &features, &descriptors);
  if (!status.ok()) return status;

  // Set the annotator's image.
  if (options_.draw_tracks || options_.draw_features) {
    annotator_.SetImage(image);
  }

  // Initialize the very first view if we don't have one yet.
  if (current_keyframe_ == kInvalidView) {
    Landmark::SetRequiredObservations(2);
    InitializeFirstView(features, descriptors);
    return Status::Ok();
  }

  // Initialize the second view if we don't have one yet using 2D<-->2D
  // matching.
  if (view_indices_.size() == 1) {
    status = InitializeSecondView(features, descriptors);
    if (status.ok()) {
      Landmark::SetRequiredObservations(options_.num_observations_to_triangulate);
    }
    return status;
  }

  // Create a camera with an unknown pose.
  Camera camera;
  camera.SetIntrinsics(intrinsics_);
  View::Ptr new_view = View::Create(camera);

  // Is this new image going to be a keyframe?
  const bool is_keyframe =
      initialize_new_keyframe_ ||
      NumEstimatedTracks() < options_.min_num_feature_tracks;
  if (is_keyframe) {
    initialize_new_keyframe_ = false;
  }

  // Update feature tracks and add matched features to the view.
  status = UpdateFeatureTracks(features,
                               descriptors,
                               new_view->Index(),
                               is_keyframe);
  if (!status.ok()) {
    View::DeleteMostRecentView();
    return status;
  }

  // Compute the new camera pose.
  status = EstimatePose(new_view->Index());
  if (!status.ok()) {
    View::DeleteMostRecentView();
  } else {
    view_indices_.push_back(new_view->Index());
  }

  if (is_keyframe && options_.perform_bundle_adjustment) {
    // Bundle adjust views in the sliding window.
    BundleAdjuster bundle_adjuster;
    if (!bundle_adjuster.Solve(options_.bundle_adjustment_options,
                               SlidingWindowViewIndices()))
      return Status::Cancelled("Failed to perform bundle adjustment.");
  }

  // Annotate tracks and features.
  if (options_.draw_features) {
    annotator_.AnnotateFeatures(features);
  }
  if (options_.draw_tracks) {
    annotator_.AnnotateTracks(tracks_);
  }
  if (options_.draw_tracks || options_.draw_features) {
    annotator_.Draw();
  }

  return status;
}
Example #4
0
    bool SiftNode::detect(posedetection_msgs::Feature0D& features, const sensor_msgs::Image& imagemsg,
                          const sensor_msgs::Image::ConstPtr& mask_ptr)
    {
        boost::mutex::scoped_lock lock(_mutex);
        Image imagesift = NULL;
        cv::Rect region;
        try {
            cv::Mat image;
            cv_bridge::CvImagePtr framefloat;

            if (!(framefloat = cv_bridge::toCvCopy(imagemsg, "mono8")) )
                return false;
            
            if(imagesift != NULL && (imagesift->cols!=imagemsg.width || imagesift->rows!=imagemsg.height)) {
                ROS_INFO("clear sift resources");
                DestroyAllImages();
                imagesift = NULL;
            }
            
            image = framefloat->image;

            if (mask_ptr) {
                cv::Mat mask = cv_bridge::toCvShare(mask_ptr, mask_ptr->encoding)->image;
                region = jsk_perception::boundingRectOfMaskImage(mask);
                image = image(region);
            }
            else {
                region = cv::Rect(0, 0, imagemsg.width, imagemsg.height);
            }
            
            if(imagesift == NULL)
                imagesift = CreateImage(imagemsg.height,imagemsg.width);

            for(int i = 0; i < imagemsg.height; ++i) {
                uint8_t* psrc = (uint8_t*)image.data+image.step*i;
                float* pdst = imagesift->pixels+i*imagesift->stride;
                for(int j = 0; j < imagemsg.width; ++j)
                    pdst[j] = (float)psrc[j]*(1.0f/255.0f);
                //memcpy(imagesift->pixels+i*imagesift->stride,framefloat->imageData+framefloat->widthStep*i,imagemsg.width*sizeof(float));
            }
        }
        catch (cv_bridge::Exception error) {
            ROS_WARN("bad frame");
            return false;
        }

        // compute SIFT
        ros::Time siftbasetime = ros::Time::now();
        Keypoint keypts = GetKeypoints(imagesift);
        // write the keys to the output
        int numkeys = 0;
        Keypoint key = keypts;
        while(key) {
            numkeys++;
            key = key->next;
        }

        // publish
        features.header = imagemsg.header;
        features.positions.resize(numkeys*2);
        features.scales.resize(numkeys);
        features.orientations.resize(numkeys);
        features.confidences.resize(numkeys);
        features.descriptors.resize(numkeys*128);
        features.descriptor_dim = 128;
        features.type = "libsiftfast";

        int index = 0;
        key = keypts;
        while(key) {

            for(int j = 0; j < 128; ++j)
                features.descriptors[128*index+j] = key->descrip[j];

            features.positions[2*index+0] = key->col + region.x;
            features.positions[2*index+1] = key->row + region.y;
            features.scales[index] = key->scale;
            features.orientations[index] = key->ori;
            features.confidences[index] = 1.0; // SIFT has no confidence?

            key = key->next;
            ++index;
        }

        FreeKeypoints(keypts);
        DestroyAllImages();

        ROS_INFO("imagesift: image: %d(size=%lu), num: %d, sift time: %.3fs, total: %.3fs", imagemsg.header.seq,
                 imagemsg.data.size(),  numkeys,
                 (float)(ros::Time::now()-siftbasetime).toSec(), (float)(ros::Time::now()-lasttime).toSec());
        lasttime = ros::Time::now();
        return true;
    }