Example #1
0
int
main (int argc, char* argv [])
{
    ntk::arg_base::set_help_option("-h");
    ntk::arg_parse(argc, argv);
    ntk::ntk_debug_level = cmd::debug_level();
    cv::setBreakOnError(true);

    QApplication app (argc, argv);

    pcl::console::setVerbosityLevel (pcl::console::L_DEBUG);

    RGBDCalibration calibration;
    calibration.loadFromFile(cmd::calibration_file());

    OpenniRGBDProcessor processor;

    RGBDImage image1 (cmd::input_image1(), &calibration, &processor);
    RGBDImage image2 (cmd::input_image2(), &calibration, &processor);;

    MeshGenerator generator;
    generator.setMeshType(MeshGenerator::TriangleMesh);
    generator.setUseColor(true);

    generator.generate(image1);
    global::mesh1 = generator.mesh();
    global::mesh1.computeNormalsFromFaces();

    generator.generate(image2);
    global::mesh2 = generator.mesh();
    global::mesh2.computeNormalsFromFaces();

    ntk_dbg(0) << "============ ALIGN WITH ICP ============";
    alignWithICP(image1, image2);

    ntk_dbg(0) << "============ ALIGN WITH LEVENBERG ICP ============";
    alignWithLevenbergICP(image1, image2);

    ntk_dbg(0) << "============ ALIGN WITH RGBD ============";
    alignWithRGBD(image1, image2);

    ntk_dbg(0) << "============ ALIGN WITH RGBD THEN ICP ============";
    alignWithRGBDThenICP(image1, image2);

    ntk_dbg(0) << "============ ALIGN WITH RGBD-ICP ============";
    alignWithRGBDICP(image1, image2);

    global::mesh1.normals.clear();
    global::mesh1.saveToPlyFile("debug_mesh1.ply");

    global::mesh2.normals.clear();
    global::mesh2.saveToPlyFile("debug_mesh2.ply");

    return 0;
}
Example #2
0
int main(int argc, char **argv)
{
    // Parse command line options.
    arg_base::set_help_option("-h");
    arg_parse(argc, argv);

    // Set debug level to 1.
    ntk::ntk_debug_level = 1;

    // Set current directory to application directory.
    // This is to find Nite config in config/ directory.
    QApplication app (argc, argv);
    QDir::setCurrent(QApplication::applicationDirPath());

    // Declare the global OpenNI driver. Only one can be instantiated in a program.
    OpenniDriver ni_driver;

    // Declare the frame grabber.
    OpenniGrabber grabber(ni_driver, opt::kinect_id());

    // High resolution 1280x1024 RGB Image.
    if (opt::high_resolution())
        grabber.setHighRgbResolution(true);

    // Start the grabber.
    grabber.connectToDevice();
    grabber.start();

    // Holder for the current image.
    RGBDImage image;

    // Image post processor. Compute mappings when RGB resolution is 1280x1024.
    OpenniRGBDProcessor post_processor;

    // Wait for a new frame, get a local copy and postprocess it.
    grabber.waitForNextFrame();
    grabber.copyImageTo(image);
    post_processor.processImage(image);

    ntk_ensure(image.calibration(), "Uncalibrated rgbd image, cannot project to 3D!");

    // Generate a mesh out of the rgbd image.
    MeshGenerator generator;
    generator.setUseColor(true);
    generator.setMeshType(MeshGenerator::PointCloudMesh);
    generator.generate(image, *image.calibration()->depth_pose, *image.calibration()->rgb_pose);

    // Save the grabber mesh to a ply file.
    ntk_dbg(0) << "Saving mesh file to output.ply...";
    generator.mesh().saveToPlyFile("output.ply");
    grabber.stop();
}
Example #3
0
int main(int argc, char **argv)
{
    // Parse command line options.
    arg_base::set_help_option("-h");
    arg_parse(argc, argv);

    // Set debug level to 1.
    ntk::ntk_debug_level = 1;

    // Set current directory to application directory.
    // This is to find Nite config in config/ directory.
    QApplication app (argc, argv);
    QDir::setCurrent(QApplication::applicationDirPath());

    // Declare the global OpenNI driver. Only one can be instantiated in a program.
    OpenniDriver ni_driver;

    // Declare the frame grabber.
    OpenniGrabber grabber(ni_driver, opt::kinect_id());

    // Start the grabber.
    grabber.connectToDevice();
    grabber.start();

    // Holder for the current image.
    RGBDImage ref_image, transformed_image;

    // Image post processor. Compute mappings when RGB resolution is 1280x1024.
    OpenniRGBDProcessor post_processor;

    // Wait for a new frame, get a local copy and postprocess it.
    grabber.waitForNextFrame();
    grabber.copyImageTo(ref_image);
    post_processor.processImage(ref_image);

    // Stop the grabber, don't need the kinect anymore.
    grabber.stop();
    grabber.disconnectFromDevice();

    // Artificially apply an in-plane rotation of 5 degrees.
    ref_image.copyTo(transformed_image);
    cv::warpAffine(ref_image.depth(),
                   transformed_image.depthRef(),
                   cv::getRotationMatrix2D(Point2f(320,240), 5, 1),
                   cv::Size(640,480),
                   cv::INTER_NEAREST);
    cv::warpAffine(ref_image.rgb(),
                   transformed_image.rgbRef(),
                   cv::getRotationMatrix2D(Point2f(320,240), 5, 1),
                   cv::Size(640,480),
                   cv::INTER_LINEAR);

    imshow_normalized("depth", ref_image.depth());
    imshow_normalized("rotated_depth", transformed_image.depth());
    cv::waitKey(0);

    // Check if ICP can find the applied transformation.
    RelativePoseEstimatorICP estimator;
    estimator.setDistanceThreshold(0.5); // points are associated if distance is < 50cm
    estimator.setVoxelSize(0.01); // points are clustered into 1cm cells.
    estimator.setMaxIterations(100); // no more than 100 ICP iterations
    estimator.setReferenceImage(ref_image);
    estimator.estimateNewPose(transformed_image);

    // Should be almost 0
    ntk_dbg_print(estimator.currentPose().cvTranslation(), 1);

    // Should be around 5 degrees around the z axis.
    ntk_dbg_print(estimator.currentPose().cvEulerRotation()*float(180/ntk::math::pi), 1);

    // Save the processed images as point clouds.
    MeshGenerator generator;
    generator.setUseColor(true);
    generator.setMeshType(MeshGenerator::PointCloudMesh);

    generator.generate(ref_image);
    generator.mesh().saveToPlyFile("ref.ply");

    generator.generate(transformed_image);
    generator.mesh().saveToPlyFile("transformed.ply");

    generator.generate(transformed_image, estimator.currentPose(), estimator.currentPose());
    generator.mesh().saveToPlyFile("transformed_corrected.ply");
}
Example #4
0
int main(int argc, char **argv)
{
    // Parse command line options.
    arg_base::set_help_option("-h");
    arg_parse(argc, argv);

    // Set debug level to 1.
    ntk::ntk_debug_level = 1;

    // Set current directory to application directory.
    // This is to find Nite config in config/ directory.
    QApplication app (argc, argv);
    QDir::setCurrent(QApplication::applicationDirPath());

    // Declare the global OpenNI driver. Only one can be instantiated in a program.
    OpenniDriver ni_driver;

    // Declare the frame grabber.
    OpenniGrabber grabber(ni_driver);

    // Start the grabber.
    grabber.setHighRgbResolution(false);
    grabber.setCustomBayerDecoding(true);
    grabber.connectToDevice();
    grabber.start();

    // Holder for the current image.
    RGBDImage ref_image, transformed_image;

    // Image post processor. Compute mappings when RGB resolution is 1280x1024.
    OpenniRGBDProcessor post_processor;

    // Wait for a new frame, get a local copy and postprocess it.
    for (int i = 0; i < 5; ++i)
        grabber.waitForNextFrame();
    grabber.copyImageTo(ref_image);
    post_processor.processImage(ref_image);

    // Stop the grabber, don't need the kinect anymore.
    grabber.stop();
    grabber.disconnectFromDevice();

    // Artificially apply an in-plane rotation of 5 degrees.
    ref_image.copyTo(transformed_image);
#if 1
    cv::warpAffine(ref_image.depth(),
                   transformed_image.depthRef(),
                   cv::getRotationMatrix2D(Point2f(320,240), 5, 1),
                   cv::Size(640,480),
                   cv::INTER_NEAREST);
    cv::warpAffine(ref_image.rgb(),
                   transformed_image.rgbRef(),
                   cv::getRotationMatrix2D(Point2f(320,240), 5, 1),
                   cv::Size(640,480),
                   cv::INTER_LINEAR);
#endif

#if 0
    imshow_normalized("rgb", ref_image.rgb());
    imshow_normalized("rotated_rgb", transformed_image.rgb());
    cv::waitKey(0);
#endif

    Pose3D target_pose = *ref_image.calibration()->depth_pose;
    // Artificially apply some changes.
    target_pose.applyTransformBefore(cv::Vec3f(0,1.0,0), cv::Vec3f(M_PI*4.0,0,M_PI/2.0));

    RelativePoseEstimatorMarkers estimator;
    estimator.setMarkerSize(opt::marker_size());
    estimator.setSourceImage(transformed_image);
    estimator.setTargetImage(ref_image);
    estimator.setTargetPose(target_pose);
    estimator.estimateNewPose();

    // Should be almost -0.0874827 0.997087 0.000867769
    ntk_dbg_print(estimator.estimatedSourcePose().cvTranslation(), 1);

    // Should be almost -0.350163 -0.0560269 95.1153
    ntk_dbg_print(estimator.estimatedSourcePose().cvEulerRotation()*float(180/ntk::math::pi), 1);

    // Save the processed images as point clouds.
    MeshGenerator generator;
    generator.setUseColor(true);
    generator.setMeshType(MeshGenerator::PointCloudMesh);

    generator.generate(ref_image, target_pose, target_pose);
    generator.mesh().saveToPlyFile("ref.ply");

    generator.generate(transformed_image, target_pose, target_pose);
    generator.mesh().saveToPlyFile("transformed.ply");

    generator.generate(transformed_image, estimator.estimatedSourcePose(), estimator.estimatedSourcePose());
    generator.mesh().saveToPlyFile("transformed_corrected.ply");
}