void BoundingBoxMarker::create(const BoundingBox& bbox, const Vector3f& color, float transparency, double width)
{
    SgShape* shape = new SgShape;
    MeshGenerator meshGenerator;
    shape->setMesh(meshGenerator.generateBox(Vector3(width, width, width)));
    
    SgMaterial* material = shape->setMaterial(new SgMaterial);
    material->setTransparency(transparency);
    material->setDiffuseColor(color);
    material->setEmissiveColor(color);

    const double& x0 = bbox.min().x();
    const double& x1 = bbox.max().x();
    const double& y0 = bbox.min().y();
    const double& y1 = bbox.max().y();
    const double& z0 = bbox.min().z();
    const double& z1 = bbox.max().z();
    
    addMarker(shape, x0, y0, z0);
    addMarker(shape, x0, y0, z1);
    addMarker(shape, x0, y1, z0);
    addMarker(shape, x0, y1, z1);

    addMarker(shape, x1, y0, z0);
    addMarker(shape, x1, y0, z1);
    addMarker(shape, x1, y1, z0);
    addMarker(shape, x1, y1, z1);
}
ForceSensorVisualizerItem::ForceSensorVisualizerItem()
    : SensorVisualizerItemBase(this)
{
    setName("ForceSensor");
    
    scene = new SgGroup;

    SgMaterial* material = new SgMaterial;
    Vector3f color(1.0f, 0.2f, 0.2f);
    material->setDiffuseColor(Vector3f::Zero());
    material->setEmissiveColor(color);
    material->setAmbientIntensity(0.0f);
    material->setTransparency(0.6f);

    MeshGenerator meshGenerator;
    cone = new SgShape;
    cone->setMesh(meshGenerator.generateCone(0.03,  0.04));
    cone->setMaterial(material);

    cylinder = new SgShape;
    cylinder->setMesh(meshGenerator.generateCylinder(0.01, 1.0));
    cylinder->setMaterial(material);

    visualRatio = 0.002;
}
Beispiel #3
0
int main(int argc, char **argv)
{
    // Parse command line options.
    arg_base::set_help_option("-h");
    arg_parse(argc, argv);

    // Set debug level to 1.
    ntk::ntk_debug_level = 1;

    // Set current directory to application directory.
    // This is to find Nite config in config/ directory.
    QApplication app (argc, argv);
    QDir::setCurrent(QApplication::applicationDirPath());

    // Declare the global OpenNI driver. Only one can be instantiated in a program.
    OpenniDriver ni_driver;

    // Declare the frame grabber.
    OpenniGrabber grabber(ni_driver, opt::kinect_id());

    // High resolution 1280x1024 RGB Image.
    if (opt::high_resolution())
        grabber.setHighRgbResolution(true);

    // Start the grabber.
    grabber.connectToDevice();
    grabber.start();

    // Holder for the current image.
    RGBDImage image;

    // Image post processor. Compute mappings when RGB resolution is 1280x1024.
    OpenniRGBDProcessor post_processor;

    // Wait for a new frame, get a local copy and postprocess it.
    grabber.waitForNextFrame();
    grabber.copyImageTo(image);
    post_processor.processImage(image);

    ntk_ensure(image.calibration(), "Uncalibrated rgbd image, cannot project to 3D!");

    // Generate a mesh out of the rgbd image.
    MeshGenerator generator;
    generator.setUseColor(true);
    generator.setMeshType(MeshGenerator::PointCloudMesh);
    generator.generate(image, *image.calibration()->depth_pose, *image.calibration()->rgb_pose);

    // Save the grabber mesh to a ply file.
    ntk_dbg(0) << "Saving mesh file to output.ply...";
    generator.mesh().saveToPlyFile("output.ply");
    grabber.stop();
}
void SphereMarker::initialize(double radius, const Vector3f& color, float transparency)
{
    SgShapePtr shape = new SgShape;
    MeshGenerator meshGenerator;
    shape->setMesh(meshGenerator.generateSphere(1.0));
    material = shape->setMaterial(new SgMaterial);
    material->setDiffuseColor(color);
    material->setEmissiveColor(color);
    material->setTransparency(transparency);
    scale = new SgScaleTransform;
    scale->addChild(shape);
    scale->setScale(radius);
    addChild(scale);
}
Mesh* COLMeshConverter::convert(const COLSphere& sphere)
{
	MeshGenerator gen;
	float* vertices;
	float* normals;
	uint32_t* indices;
	unsigned int vertexCount, indexCount;
	gen.createSphere(vertices, normals, vertexCount, indices, indexCount, sphere.getRadius(), 4, 4);

	const COLSurface& surface = sphere.getSurface();
	uint8_t matNum = surface.getMaterial();
	uint8_t* colors = new uint8_t[vertexCount*4];
	uint8_t r, g, b;
	getMaterialColors(matNum, r, g, b);

	for (unsigned int i = 0 ; i < vertexCount ; i++) {
		colors[i*4] = r;
		colors[i*4 + 1] = g;
		colors[i*4 + 2] = b;
		colors[i*4 + 3] = 255;
	}

	//printf("glBufferData COLMeshConverter 1\n");

	GLuint dataBuffer;
	glGenBuffers(1, &dataBuffer);
	glBindBuffer(GL_ARRAY_BUFFER, dataBuffer);

	glBufferData(GL_ARRAY_BUFFER, vertexCount*3*4 + vertexCount*4, NULL, GL_STATIC_DRAW);
	Engine::getInstance()->increaseTestMem(vertexCount*3*4 + vertexCount*4, __FILE__, __LINE__);

	glBufferSubData(GL_ARRAY_BUFFER, 0, vertexCount*3*4, vertices);
	glBufferSubData(GL_ARRAY_BUFFER, vertexCount*3*4, vertexCount*4, colors);

	Mesh* mesh = new Mesh(vertexCount, VertexFormatTriangles, 0, dataBuffer, -1, -1, vertexCount*3*4);

	Submesh* submesh = new Submesh(mesh, indexCount, indices);
	mesh->link();

	const Vector3& center = sphere.getCenter();
	mesh->setBounds(center[0], center[1], center[2], sphere.getRadius());

	delete[] vertices;
	delete[] normals;
	delete[] indices;

	return mesh;
}
Beispiel #6
0
TranslationDragger::TranslationDragger(bool setDefaultAxes)
{
    draggableAxes_ = TX | TY | TZ;
    
    axisCylinderNormalizedRadius = 0.04;

    defaultAxesScale = new SgScaleTransform;
    customAxes = new SgGroup;

    for(int i=0; i < 3; ++i){
        SgMaterial* material = new SgMaterial;
        Vector3f color(0.2f, 0.2f, 0.2f);
        color[i] = 1.0f;
        material->setDiffuseColor(Vector3f::Zero());
        material->setEmissiveColor(color);
        material->setAmbientIntensity(0.0f);
        material->setTransparency(0.6f);
        axisMaterials[i] = material;
    }

    if(setDefaultAxes){
        MeshGenerator meshGenerator;
        SgMeshPtr mesh = meshGenerator.generateArrow(1.8, 0.08, 0.1, 2.5);
        for(int i=0; i < 3; ++i){
            SgShape* shape = new SgShape;
            shape->setMesh(mesh);
            shape->setMaterial(axisMaterials[i]);
            
            SgPosTransform* arrow = new SgPosTransform;
            arrow->addChild(shape);
            if(i == 0){
                arrow->setRotation(AngleAxis(-PI / 2.0, Vector3::UnitZ()));
            } else if(i == 2){
                arrow->setRotation(AngleAxis( PI / 2.0, Vector3::UnitX()));
            }
            SgInvariantGroup* invariant = new SgInvariantGroup;
            invariant->setName(axisNames[i]);
            invariant->addChild(arrow);
            SgSwitch* axis = new SgSwitch;
            axis->addChild(invariant);
            defaultAxesScale->addChild(axis);
        }
        addChild(defaultAxesScale);
    }

    isContainerMode_ = false;
}
Beispiel #7
0
int
main (int argc, char* argv [])
{
    ntk::arg_base::set_help_option("-h");
    ntk::arg_parse(argc, argv);
    ntk::ntk_debug_level = cmd::debug_level();
    cv::setBreakOnError(true);

    QApplication app (argc, argv);

    pcl::console::setVerbosityLevel (pcl::console::L_DEBUG);

    RGBDCalibration calibration;
    calibration.loadFromFile(cmd::calibration_file());

    OpenniRGBDProcessor processor;

    RGBDImage image1 (cmd::input_image1(), &calibration, &processor);
    RGBDImage image2 (cmd::input_image2(), &calibration, &processor);;

    MeshGenerator generator;
    generator.setMeshType(MeshGenerator::TriangleMesh);
    generator.setUseColor(true);

    generator.generate(image1);
    global::mesh1 = generator.mesh();
    global::mesh1.computeNormalsFromFaces();

    generator.generate(image2);
    global::mesh2 = generator.mesh();
    global::mesh2.computeNormalsFromFaces();

    ntk_dbg(0) << "============ ALIGN WITH ICP ============";
    alignWithICP(image1, image2);

    ntk_dbg(0) << "============ ALIGN WITH LEVENBERG ICP ============";
    alignWithLevenbergICP(image1, image2);

    ntk_dbg(0) << "============ ALIGN WITH RGBD ============";
    alignWithRGBD(image1, image2);

    ntk_dbg(0) << "============ ALIGN WITH RGBD THEN ICP ============";
    alignWithRGBDThenICP(image1, image2);

    ntk_dbg(0) << "============ ALIGN WITH RGBD-ICP ============";
    alignWithRGBDICP(image1, image2);

    global::mesh1.normals.clear();
    global::mesh1.saveToPlyFile("debug_mesh1.ply");

    global::mesh2.normals.clear();
    global::mesh2.saveToPlyFile("debug_mesh2.ply");

    return 0;
}
Beispiel #8
0
int main(int argc, char **argv)
{
    // Parse command line options.
    arg_base::set_help_option("-h");
    arg_parse(argc, argv);

    // Set debug level to 1.
    ntk::ntk_debug_level = 1;

    // Set current directory to application directory.
    // This is to find Nite config in config/ directory.
    QApplication app (argc, argv);
    QDir::setCurrent(QApplication::applicationDirPath());

    // Declare the global OpenNI driver. Only one can be instantiated in a program.
    OpenniDriver ni_driver;

    // Declare the frame grabber.
    OpenniGrabber grabber(ni_driver, opt::kinect_id());

    // Start the grabber.
    grabber.connectToDevice();
    grabber.start();

    // Holder for the current image.
    RGBDImage ref_image, transformed_image;

    // Image post processor. Compute mappings when RGB resolution is 1280x1024.
    OpenniRGBDProcessor post_processor;

    // Wait for a new frame, get a local copy and postprocess it.
    grabber.waitForNextFrame();
    grabber.copyImageTo(ref_image);
    post_processor.processImage(ref_image);

    // Stop the grabber, don't need the kinect anymore.
    grabber.stop();
    grabber.disconnectFromDevice();

    // Artificially apply an in-plane rotation of 5 degrees.
    ref_image.copyTo(transformed_image);
    cv::warpAffine(ref_image.depth(),
                   transformed_image.depthRef(),
                   cv::getRotationMatrix2D(Point2f(320,240), 5, 1),
                   cv::Size(640,480),
                   cv::INTER_NEAREST);
    cv::warpAffine(ref_image.rgb(),
                   transformed_image.rgbRef(),
                   cv::getRotationMatrix2D(Point2f(320,240), 5, 1),
                   cv::Size(640,480),
                   cv::INTER_LINEAR);

    imshow_normalized("depth", ref_image.depth());
    imshow_normalized("rotated_depth", transformed_image.depth());
    cv::waitKey(0);

    // Check if ICP can find the applied transformation.
    RelativePoseEstimatorICP estimator;
    estimator.setDistanceThreshold(0.5); // points are associated if distance is < 50cm
    estimator.setVoxelSize(0.01); // points are clustered into 1cm cells.
    estimator.setMaxIterations(100); // no more than 100 ICP iterations
    estimator.setReferenceImage(ref_image);
    estimator.estimateNewPose(transformed_image);

    // Should be almost 0
    ntk_dbg_print(estimator.currentPose().cvTranslation(), 1);

    // Should be around 5 degrees around the z axis.
    ntk_dbg_print(estimator.currentPose().cvEulerRotation()*float(180/ntk::math::pi), 1);

    // Save the processed images as point clouds.
    MeshGenerator generator;
    generator.setUseColor(true);
    generator.setMeshType(MeshGenerator::PointCloudMesh);

    generator.generate(ref_image);
    generator.mesh().saveToPlyFile("ref.ply");

    generator.generate(transformed_image);
    generator.mesh().saveToPlyFile("transformed.ply");

    generator.generate(transformed_image, estimator.currentPose(), estimator.currentPose());
    generator.mesh().saveToPlyFile("transformed_corrected.ply");
}
Beispiel #9
0
// handle default argument
bool initialize(MeshGenerator &self, const Cube *cube, Mesh *mesh, float iso)
{
  return self.initialize(cube, mesh, iso);
}
Beispiel #10
0
RotationDragger::RotationDragger()
{
    draggableAxes_ = RX | RY | RZ;

    MeshGenerator meshGenerator;
    meshGenerator.setDivisionNumber(36);
    SgMeshPtr beltMesh1 = meshGenerator.generateDisc(1.0, 1.0 - 0.2);
    meshGenerator.setDivisionNumber(24);
    SgMeshPtr beltMesh2 = meshGenerator.generateCylinder(1.0, 0.2, false, false);

    scale = new SgScaleTransform;
    
    // make dragger belts
    for(int i=0; i < 3; ++i){
        
        SgMaterial* material = new SgMaterial;
        Vector3f color(0.2f, 0.2f, 0.2f);
        color[i] = 1.0f;
        material->setDiffuseColor(Vector3f::Zero());
        material->setEmissiveColor(color);
        material->setAmbientIntensity(0.0f);
        material->setTransparency(0.6f);

        SgShape* beltShape1 = new SgShape;
        beltShape1->setMesh(beltMesh1);
        beltShape1->setMaterial(material);

        SgShape* beltShape2 = new SgShape;
        beltShape2->setMesh(beltMesh2);
        beltShape2->setMaterial(material);
        
        ViewpointDependentRenderingSelector* selector = new ViewpointDependentRenderingSelector;
        
        SgPosTransform* transform1 = new SgPosTransform;
        if(i == 0){ // x-axis
            selector->setAxis(Vector3::UnitX());
            transform1->setRotation(AngleAxis(PI / 2.0, Vector3::UnitY()));
        } else if(i == 1){ // y-axis
            selector->setAxis(Vector3::UnitY());
            transform1->setRotation(AngleAxis(-PI / 2.0, Vector3::UnitX()));
        } else if(i == 2) { // z-axis
            selector->setAxis(Vector3::UnitZ());
        }
        transform1->addChild(beltShape1);
        SgInvariantGroup* belt1 = new SgInvariantGroup;
        belt1->setName(axisNames[i]);
        belt1->addChild(transform1);
        selector->addChild(belt1);

        SgPosTransform* transform2 = new SgPosTransform;
        if(i == 0){ // x-axis
            transform2->setRotation(AngleAxis(-PI / 2.0, Vector3::UnitZ()));
        } else if(i == 2) { // z-axis
            transform2->setRotation(AngleAxis(PI / 2.0, Vector3::UnitX()));
        }
        transform2->addChild(beltShape2);
        SgInvariantGroup* belt2 = new SgInvariantGroup;
        belt2->setName(axisNames[i]);
        belt2->addChild(transform2);
        selector->addChild(belt2);

        SgSwitch* axis = new SgSwitch;
        axis->addChild(selector);

        scale->addChild(axis);
    }

    addChild(scale);

    isContainerMode_ = false;
}
Mesh* COLMeshConverter::convert(const COLModel& model)
{
	const COLSphere* spheres = model.getSphereCount() != 0 ? &model.getSpheres()[0] : NULL;
	const COLBox* boxes = model.getBoxCount() != 0 ? &model.getBoxes()[0] : NULL;

	GLuint dataBuffer;
    glGenBuffers(1, &dataBuffer);
    glBindBuffer(GL_ARRAY_BUFFER, dataBuffer);

    unsigned int vertexCount = 0;
    uint32_t sphereCount = model.getSphereCount();
    uint32_t boxCount = model.getBoxCount();

    float** boxVertexArrays = new float*[boxCount];
    float** sphereVertexArrays = new float*[sphereCount];
    uint32_t** boxIndexArrays = new uint32_t*[boxCount];
    uint32_t** sphereIndexArrays = new uint32_t*[sphereCount];
    unsigned int* boxVertexCounts = new unsigned int[boxCount];
    unsigned int* sphereVertexCounts = new unsigned int[sphereCount];
    unsigned int* boxIndexCounts = new unsigned int[boxCount];
    unsigned int* sphereIndexCounts = new unsigned int[sphereCount];

    MeshGenerator gen;

    for (uint32_t i = 0 ; i < sphereCount ; i++) {
		const COLSphere& sphere = spheres[i];
		float* tmpNormals;
		gen.createSphere(sphereVertexArrays[i], tmpNormals, sphereVertexCounts[i], sphereIndexArrays[i],
				sphereIndexCounts[i], sphere.getRadius(), 4, 4, sphere.getCenter());
		delete[] tmpNormals;
        vertexCount += sphereVertexCounts[i];
    }

    for (uint32_t i = 0 ; i < boxCount ; i++) {
        const COLBox& box = boxes[i];
        gen.createBox(boxVertexArrays[i], boxVertexCounts[i], boxIndexArrays[i], boxIndexCounts[i],
                  box.getMinimum(), box.getMaximum());
        vertexCount += boxVertexCounts[i];
    }

    unsigned int modelVertexCount;
    float* modelVertices;
    uint8_t* modelColors;
    uint32_t* modelIndices;
    unsigned int modelIndexCount;

    convertVertexModel(model.getVertices(), model.getVertexCount(),
    		model.getFaceCount() != 0 ? &model.getFaces()[0] : NULL, model.getFaceCount(),
    		modelVertexCount, modelVertices, modelColors, modelIndices, modelIndexCount);
    vertexCount += modelVertexCount;

    //printf("glBufferData COLMeshConverter 3\n");

    glBufferData(GL_ARRAY_BUFFER, vertexCount*3*4 + vertexCount*4, NULL, GL_STATIC_DRAW);
    Engine::getInstance()->increaseTestMem(vertexCount*3*4 + vertexCount*4, __FILE__, __LINE__);
    int vertexOffset = 0;
    int colorOffset = vertexCount*3*4;

    glBufferSubData(GL_ARRAY_BUFFER, 0, modelVertexCount*3*4, modelVertices);
    glBufferSubData(GL_ARRAY_BUFFER, colorOffset, modelVertexCount*4, modelColors);
    vertexOffset = modelVertexCount;

    /*Mesh* mesh = new Mesh(vertexCount, VertexFormatTriangles, MeshVertexColors, dataBuffer, -1, -1,
    		colorOffset);*/

    Mesh* mesh = new Mesh(vertexCount, VertexFormatTriangles, 0, dataBuffer, vertexCount*3*4 + vertexCount*4,
			0, 0, -1, -1, -1, -1, vertexCount*3*4, 0);

    delete[] modelVertices;
    delete[] modelColors;

    uint8_t r, g, b;

    for (uint32_t i = 0 ; i < sphereCount ; i++) {
    	const COLSphere& sphere = spheres[i];
    	unsigned int vertexCount = sphereVertexCounts[i];

    	// Create the vertex colors array
    	uint8_t* colors = new uint8_t[vertexCount*4];
    	const COLSurface& surface = sphere.getSurface();
    	uint8_t matNum = surface.getMaterial();
    	getMaterialColors(matNum, r, g, b);

        for (unsigned int j = 0 ; j < vertexCount ; j++) {
        	colors[j*4] = r;
        	colors[j*4+1] = g;
        	colors[j*4+2] = b;
        	colors[j*4+3] = 255;
        }

        // Fill the buffers with vertex data
        glBufferSubData(GL_ARRAY_BUFFER, vertexOffset*3*4, vertexCount*3*4, sphereVertexArrays[i]);
        glBufferSubData(GL_ARRAY_BUFFER, colorOffset + vertexOffset*4, vertexCount*4, colors);
        vertexOffset += vertexCount;

        delete[] colors;
    }

    for (uint32_t i = 0 ; i < boxCount ; i++) {
        const COLBox& box = boxes[i];
    	unsigned int vertexCount = boxVertexCounts[i];

    	// Create the vertex colors array
    	uint8_t* colors = new uint8_t[vertexCount*4];
    	const COLSurface& surface = box.getSurface();
    	uint8_t matNum = surface.getMaterial();
    	getMaterialColors(matNum, r, g, b);

        for (unsigned int j = 0 ; j < vertexCount ; j++) {
        	colors[j*4] = r;
        	colors[j*4+1] = g;
        	colors[j*4+2] = b;
        	colors[j*4+3] = 255;
        }

        // Fill the buffers with vertex data
        glBufferSubData(GL_ARRAY_BUFFER, vertexOffset*3*4, vertexCount*3*4, boxVertexArrays[i]);
        glBufferSubData(GL_ARRAY_BUFFER, colorOffset + vertexOffset*4, vertexCount*4, colors);
        vertexOffset += vertexCount;

        delete[] colors;
    }

    Submesh* submesh = new Submesh(mesh, modelIndexCount, modelIndices);
    vertexOffset = modelVertexCount;

    delete[] modelIndices;

    for (uint32_t i = 0 ; i < sphereCount ; i++) {
    	unsigned int indexCount = sphereIndexCounts[i];
    	uint32_t* indices = sphereIndexArrays[i];
    	for (unsigned int j = 0 ; j < indexCount ; j++) {
    		indices[j] += vertexOffset;
    	}

    	Submesh* submesh = new Submesh(mesh, indexCount, indices);
    	vertexOffset += sphereVertexCounts[i];

    	delete[] sphereVertexArrays[i];
    	delete[] sphereIndexArrays[i];
    }

    delete[] sphereVertexArrays;
    delete[] sphereIndexArrays;
    delete[] sphereVertexCounts;
    delete[] sphereIndexCounts;

    for (uint32_t i = 0 ; i < boxCount ; i++) {
    	unsigned int indexCount = boxIndexCounts[i];
    	uint32_t* indices = boxIndexArrays[i];
    	for (unsigned int j = 0 ; j < indexCount ; j++) {
    		indices[j] += vertexOffset;
    	}

        Submesh* submesh = new Submesh(mesh, indexCount, indices);
        vertexOffset += boxVertexCounts[i];

        delete[] boxVertexArrays[i];
        delete[] boxIndexArrays[i];
    }

    delete[] boxVertexArrays;
    delete[] boxIndexArrays;
    delete[] boxVertexCounts;
    delete[] boxIndexCounts;

    mesh->link();

    const COLBounds& bounds = model.getBounds();
    const Vector3& center = bounds.getCenter();
    mesh->setBounds(center.getX(), center.getY(), center.getZ(), bounds.getRadius());

    return mesh;
}
Beispiel #12
0
int main(int argc, char **argv)
{
    // Parse command line options.
    arg_base::set_help_option("-h");
    arg_parse(argc, argv);

    // Set debug level to 1.
    ntk::ntk_debug_level = 1;

    // Set current directory to application directory.
    // This is to find Nite config in config/ directory.
    QApplication app (argc, argv);
    QDir::setCurrent(QApplication::applicationDirPath());

    // Declare the global OpenNI driver. Only one can be instantiated in a program.
    OpenniDriver ni_driver;

    // Declare the frame grabber.
    OpenniGrabber grabber(ni_driver);

    // Start the grabber.
    grabber.setHighRgbResolution(false);
    grabber.setCustomBayerDecoding(true);
    grabber.connectToDevice();
    grabber.start();

    // Holder for the current image.
    RGBDImage ref_image, transformed_image;

    // Image post processor. Compute mappings when RGB resolution is 1280x1024.
    OpenniRGBDProcessor post_processor;

    // Wait for a new frame, get a local copy and postprocess it.
    for (int i = 0; i < 5; ++i)
        grabber.waitForNextFrame();
    grabber.copyImageTo(ref_image);
    post_processor.processImage(ref_image);

    // Stop the grabber, don't need the kinect anymore.
    grabber.stop();
    grabber.disconnectFromDevice();

    // Artificially apply an in-plane rotation of 5 degrees.
    ref_image.copyTo(transformed_image);
#if 1
    cv::warpAffine(ref_image.depth(),
                   transformed_image.depthRef(),
                   cv::getRotationMatrix2D(Point2f(320,240), 5, 1),
                   cv::Size(640,480),
                   cv::INTER_NEAREST);
    cv::warpAffine(ref_image.rgb(),
                   transformed_image.rgbRef(),
                   cv::getRotationMatrix2D(Point2f(320,240), 5, 1),
                   cv::Size(640,480),
                   cv::INTER_LINEAR);
#endif

#if 0
    imshow_normalized("rgb", ref_image.rgb());
    imshow_normalized("rotated_rgb", transformed_image.rgb());
    cv::waitKey(0);
#endif

    Pose3D target_pose = *ref_image.calibration()->depth_pose;
    // Artificially apply some changes.
    target_pose.applyTransformBefore(cv::Vec3f(0,1.0,0), cv::Vec3f(M_PI*4.0,0,M_PI/2.0));

    RelativePoseEstimatorMarkers estimator;
    estimator.setMarkerSize(opt::marker_size());
    estimator.setSourceImage(transformed_image);
    estimator.setTargetImage(ref_image);
    estimator.setTargetPose(target_pose);
    estimator.estimateNewPose();

    // Should be almost -0.0874827 0.997087 0.000867769
    ntk_dbg_print(estimator.estimatedSourcePose().cvTranslation(), 1);

    // Should be almost -0.350163 -0.0560269 95.1153
    ntk_dbg_print(estimator.estimatedSourcePose().cvEulerRotation()*float(180/ntk::math::pi), 1);

    // Save the processed images as point clouds.
    MeshGenerator generator;
    generator.setUseColor(true);
    generator.setMeshType(MeshGenerator::PointCloudMesh);

    generator.generate(ref_image, target_pose, target_pose);
    generator.mesh().saveToPlyFile("ref.ply");

    generator.generate(transformed_image, target_pose, target_pose);
    generator.mesh().saveToPlyFile("transformed.ply");

    generator.generate(transformed_image, estimator.estimatedSourcePose(), estimator.estimatedSourcePose());
    generator.mesh().saveToPlyFile("transformed_corrected.ply");
}