int main(int argc, char * argv[]) try { // Create a simple OpenGL window for rendering: window app(1280, 720, "RealSense Post Processing Example"); ImGui_ImplGlfw_Init(app, false); // Construct objects to manage view state glfw_state original_view_orientation{}; glfw_state filtered_view_orientation{}; // Declare pointcloud objects, for calculating pointclouds and texture mappings rs2::pointcloud original_pc; rs2::pointcloud filtered_pc; // Declare RealSense pipeline, encapsulating the actual device and sensors rs2::pipeline pipe; rs2::config cfg; // Use a configuration object to request only depth from the pipeline cfg.enable_stream(RS2_STREAM_DEPTH, 640, 480, RS2_FORMAT_Z16, 30); // Start streaming with the above configuration pipe.start(cfg); // Declare filters rs2::decimation_filter dec_filter; // Decimation - reduces depth frame density rs2::spatial_filter spat_filter; // Spatial - edge-preserving spatial smoothing rs2::temporal_filter temp_filter; // Temporal - reduces temporal noise // Declare disparity transform from depth to disparity and vice versa const std::string disparity_filter_name = "Disparity"; rs2::disparity_transform depth_to_disparity(true); rs2::disparity_transform disparity_to_depth(false); // Initialize a vector that holds filters and their options std::vector<filter_options> filters; // The following order of emplacement will dictate the orders in which filters are applied filters.emplace_back("Decimate", dec_filter); filters.emplace_back(disparity_filter_name, depth_to_disparity); filters.emplace_back("Spatial", spat_filter); filters.emplace_back("Temporal", temp_filter); // Declaring two concurrent queues that will be used to push and pop frames from different threads rs2::frame_queue original_data; rs2::frame_queue filtered_data; // Declare depth colorizer for pretty visualization of depth data rs2::colorizer color_map; // Atomic boolean to allow thread safe way to stop the thread std::atomic_bool stopped(false); // Create a thread for getting frames from the device and process them // to prevent UI thread from blocking due to long computations. std::thread processing_thread([&]() { while (!stopped) //While application is running { rs2::frameset data = pipe.wait_for_frames(); // Wait for next set of frames from the camera rs2::frame depth_frame = data.get_depth_frame(); //Take the depth frame from the frameset if (!depth_frame) // Should not happen but if the pipeline is configured differently return; // it might not provide depth and we don't want to crash rs2::frame filtered = depth_frame; // Does not copy the frame, only adds a reference /* Apply filters. The implemented flow of the filters pipeline is in the following order: 1. apply decimation filter 2. transform the scence into disparity domain 3. apply spatial filter 4. apply temporal filter 5. revert the results back (if step Disparity filter was applied to depth domain (each post processing block is optional and can be applied independantly). */ bool revert_disparity = false; for (auto&& filter : filters) { if (filter.is_enabled) { filtered = filter.filter.process(filtered); if (filter.filter_name == disparity_filter_name) { revert_disparity = true; } } } if (revert_disparity) { filtered = disparity_to_depth.process(filtered); } // Push filtered & original data to their respective queues // Note, pushing to two different queues might cause the application to display // original and filtered pointclouds from different depth frames // To make sure they are synchronized you need to push them together or add some // synchronization mechanisms filtered_data.enqueue(filtered); original_data.enqueue(depth_frame); } }); // Declare objects that will hold the calculated pointclouds and colored frames // We save the last set of data to minimize flickering of the view rs2::frame colored_depth; rs2::frame colored_filtered; rs2::points original_points; rs2::points filtered_points; // Save the time of last frame's arrival auto last_time = std::chrono::high_resolution_clock::now(); // Maximum angle for the rotation of the pointcloud const double max_angle = 15.0; // We'll use rotation_velocity to rotate the pointcloud for a better view of the filters effects float rotation_velocity = 0.3f; while (app) { float w = static_cast<float>(app.width()); float h = static_cast<float>(app.height()); // Render the GUI render_ui(w, h, filters); // Try to get new data from the queues and update the view with new texture update_data(original_data, colored_depth, original_points, original_pc, original_view_orientation, color_map); update_data(filtered_data, colored_filtered, filtered_points, filtered_pc, filtered_view_orientation, color_map); draw_text(10, 50, "Original"); draw_text(static_cast<int>(w / 2), 50, "Filtered"); // Draw the pointclouds of the original and the filtered frames (if the are available already) if (colored_depth && original_points) { glViewport(0, h / 2, w / 2, h / 2); draw_pointcloud(w / 2, h / 2, original_view_orientation, original_points); } if (colored_filtered && filtered_points) { glViewport(w / 2, h / 2, w / 2, h / 2); draw_pointcloud(w / 2, h / 2, filtered_view_orientation, filtered_points); } // Update time of current frame's arrival auto curr = std::chrono::high_resolution_clock::now(); // Time interval which must pass between movement of the pointcloud const std::chrono::milliseconds rotation_delta(40); // In order to calibrate the velocity of the rotation to the actual displaying speed, rotate // pointcloud only when enough time has passed between frames if (curr - last_time > rotation_delta) { if (fabs(filtered_view_orientation.yaw) > max_angle) { rotation_velocity = -rotation_velocity; } original_view_orientation.yaw += rotation_velocity; filtered_view_orientation.yaw += rotation_velocity; last_time = curr; } } // Signal the processing thread to stop, and join // (Not the safest way to join a thread, please wrap your threads in some RAII manner) stopped = true; processing_thread.join(); return EXIT_SUCCESS; } catch (const rs2::error & e) { std::cerr << "RealSense error calling " << e.get_failed_function() << "(" << e.get_failed_args() << "):\n " << e.what() << std::endl; return EXIT_FAILURE; } catch (const std::exception& e) { std::cerr << e.what() << std::endl; return EXIT_FAILURE; }
int main(int, char**) { // Setup window glfwSetErrorCallback(error_callback); if (!glfwInit()) exit(1); GLFWwindow* window = glfwCreateWindow(1280, 720, "ImGui OpenGL2 example", NULL, NULL); glfwMakeContextCurrent(window); // Setup ImGui binding ImGui_ImplGlfw_Init(window, true); // Load Fonts // (see extra_fonts/README.txt for more details) //ImGuiIO& io = ImGui::GetIO(); //io.Fonts->AddFontDefault(); //io.Fonts->AddFontFromFileTTF("../../extra_fonts/Cousine-Regular.ttf", 15.0f); //io.Fonts->AddFontFromFileTTF("../../extra_fonts/DroidSans.ttf", 16.0f); //io.Fonts->AddFontFromFileTTF("../../extra_fonts/ProggyClean.ttf", 13.0f); //io.Fonts->AddFontFromFileTTF("../../extra_fonts/ProggyTiny.ttf", 10.0f); //io.Fonts->AddFontFromFileTTF("c:\\Windows\\Fonts\\ArialUni.ttf", 18.0f, NULL, io.Fonts->GetGlyphRangesJapanese()); // Merge glyphs from multiple fonts into one (e.g. combine default font with another with Chinese glyphs, or add icons) //ImWchar icons_ranges[] = { 0xf000, 0xf3ff, 0 }; //ImFontConfig icons_config; icons_config.MergeMode = true; icons_config.PixelSnapH = true; //io.Fonts->AddFontFromFileTTF("../../extra_fonts/DroidSans.ttf", 18.0f); //io.Fonts->AddFontFromFileTTF("../../extra_fonts/fontawesome-webfont.ttf", 18.0f, &icons_config, icons_ranges); bool show_test_window = true; bool show_another_window = false; ImVec4 clear_color = ImColor(114, 144, 154); // Main loop while (!glfwWindowShouldClose(window)) { glfwPollEvents(); ImGui_ImplGlfw_NewFrame(); // 1. Show a simple window // Tip: if we don't call ImGui::Begin()/ImGui::End() the widgets appears in a window automatically called "Debug" { static float f = 0.0f; ImGui::Text("Hello, world!"); ImGui::SliderFloat("float", &f, 0.0f, 1.0f); ImGui::ColorEdit3("clear color", (float*)&clear_color); if (ImGui::Button("Test Window")) show_test_window ^= 1; if (ImGui::Button("Another Window")) show_another_window ^= 1; ImGui::Text("Application average %.3f ms/frame (%.1f FPS)", 1000.0f / ImGui::GetIO().Framerate, ImGui::GetIO().Framerate); } // 2. Show another simple window, this time using an explicit Begin/End pair if (show_another_window) { ImGui::SetNextWindowSize(ImVec2(200,100), ImGuiSetCond_FirstUseEver); ImGui::Begin("Another Window", &show_another_window); ImGui::Text("Hello"); ImGui::End(); } // 3. Show the ImGui test window. Most of the sample code is in ImGui::ShowTestWindow() if (show_test_window) { ImGui::SetNextWindowPos(ImVec2(650, 20), ImGuiSetCond_FirstUseEver); ImGui::ShowTestWindow(&show_test_window); } // Rendering int display_w, display_h; glfwGetFramebufferSize(window, &display_w, &display_h); glViewport(0, 0, display_w, display_h); glClearColor(clear_color.x, clear_color.y, clear_color.z, clear_color.w); glClear(GL_COLOR_BUFFER_BIT); ImGui::Render(); glfwSwapBuffers(window); } // Cleanup ImGui_ImplGlfw_Shutdown(); glfwTerminate(); return 0; }
int main(int, char**) { // Setup window glfwSetErrorCallback(error_callback); if (!glfwInit()) exit(1); GLFWwindow* window = glfwCreateWindow(1280, 720, "ImGui OpenGL2 example", NULL, NULL); glfwMakeContextCurrent(window); // Setup ImGui binding ImGui_ImplGlfw_Init(window, true); bool show_test_window = true; bool show_another_window = false; ImVec4 clear_color = ImColor(114, 144, 154); // Main loop while (!glfwWindowShouldClose(window)) { glfwPollEvents(); ImGui_ImplGlfw_NewFrame(); // 1. Show a simple window // Tip: if we don't call ImGui::Begin()/ImGui::End() the widgets appears in a window automatically called "Debug" { static float f = 0.0f; ImGui::Text("Hello, world!"); ImGui::SliderFloat("float", &f, 0.0f, 1.0f); ImGui::ColorEdit3("clear color", (float*)&clear_color); if (ImGui::Button("Test Window")) show_test_window ^= 1; if (ImGui::Button("Another Window")) show_another_window ^= 1; ImGui::Text("Application average %.3f ms/frame (%.1f FPS)", 1000.0f / ImGui::GetIO().Framerate, ImGui::GetIO().Framerate); } // 2. Show another simple window, this time using an explicit Begin/End pair if (show_another_window) { ImGui::SetNextWindowSize(ImVec2(200,100), ImGuiSetCond_FirstUseEver); ImGui::Begin("Another Window"); ImGui::Text("Hello"); ImGui::End(); } // 3. Show the ImGui test window. Most of the sample code is in ImGui::ShowTestWindow() if (show_test_window) { ImGui::SetNextWindowPos(ImVec2(650, 20), ImGuiSetCond_FirstUseEver); ImGui::ShowTestWindow(); } // Rendering int display_w, display_h; glfwGetFramebufferSize(window, &display_w, &display_h); glViewport(0, 0, display_w, display_h); glClearColor(clear_color.x, clear_color.y, clear_color.z, clear_color.w); glClear(GL_COLOR_BUFFER_BIT); //Needed to disable pipeline when using OpenGL 2 code on 3+ glUseProgram(0); ImGui::Render(); glfwSwapBuffers(window); } // Cleanup ImGui_ImplGlfw_Shutdown(); glfwTerminate(); return 0; }
bool ImGui_ImplGlfw_InitForVulkan(GLFWwindow* window, bool install_callbacks) { return ImGui_ImplGlfw_Init(window, install_callbacks, GlfwClientApi_Vulkan); }
bool ImGui_ImplGlfw_InitForOpenGL(GLFWwindow* window, bool install_callbacks) { return ImGui_ImplGlfw_Init(window, install_callbacks, GlfwClientApi_OpenGL); }
int main(int argc, char * argv[]) try { // Create and initialize GUI related objects window app(1280, 720, "CPP - Align Example"); // Simple window handling ImGui_ImplGlfw_Init(app, false); // ImGui library intializition rs2::colorizer c; // Helper to colorize depth images texture renderer; // Helper for renderig images const rs2_stream align_to = RS2_STREAM_COLOR; // Using the context to create a rs2::align object. // rs2::align allows you to perform aliment of depth frames to others rs2::align align(align_to); // Create a pipeline to easily configure and start the camera rs2::pipeline pipe; //Calling pipeline's start() without any additional parameters will start the first device // with its default streams. //The start function returns the pipeline profile which the pipeline used to start the device rs2::pipeline_profile profile = pipe.start(); // Each depth camera might have different units for depth pixels, so we get it here float depth_scale; //Using the pipeline's profile, we can retrieve the device that the pipeline uses if (!try_get_depth_scale(profile.get_device(), depth_scale)) { std::cerr << "Device does not have a depth sensor" << std::endl; return EXIT_FAILURE; } // Define a variable for controlling the distance to clip float depth_clipping_distance = 1.f; while (app) // Application still alive? { // Using the align object, we block the application until a frameset is available rs2::frameset frameset; while (!frameset.first_or_default(RS2_STREAM_DEPTH) || !frameset.first_or_default(align_to)) { frameset = pipe.wait_for_frames(); } auto proccessed = align.proccess(frameset); // Trying to get both color and aligned depth frames rs2::video_frame color_frame = proccessed.get_color_frame(); rs2::depth_frame aligned_depth_frame = proccessed.get_depth_frame(); //If one of them is unavailable, continue iteration if (!aligned_depth_frame || !color_frame) { continue; } // Passing both frames to remove_background so it will "strip" the background // NOTE: in this example, we alter the buffer of the color frame, instead of copying it and altering the copy // This behavior is not recommended in real application since the color frame could be used elsewhere remove_background(color_frame, aligned_depth_frame, depth_scale, depth_clipping_distance); // Taking dimensions of the window for rendering purposes float w = static_cast<float>(app.width()); float h = static_cast<float>(app.height()); // At this point, "color_frame" is an altered color frame, stripped form its background // Calculating the position to place the frame in the window rect altered_color_frame_rect{ 0, 0, w, h }; altered_color_frame_rect = altered_color_frame_rect.adjust_ratio({ static_cast<float>(color_frame.get_width()),static_cast<float>(color_frame.get_height()) }); // Render aligned color renderer.render(color_frame, altered_color_frame_rect); // The example also renders the depth frame, as a picture-in-picture // Calculating the position to place the depth frame in the window rect pip_stream{ 0, 0, w / 5, h / 5 }; pip_stream = pip_stream.adjust_ratio({ static_cast<float>(aligned_depth_frame.get_width()),static_cast<float>(aligned_depth_frame.get_height()) }); pip_stream.x = altered_color_frame_rect.x + altered_color_frame_rect.w - pip_stream.w - (std::max(w, h) / 25); pip_stream.y = altered_color_frame_rect.y + altered_color_frame_rect.h - pip_stream.h - (std::max(w, h) / 25); // Render depth (as picture in pipcture) renderer.upload(c(aligned_depth_frame)); renderer.show(pip_stream); // Using ImGui library to provide a slide controller to select the depth clipping distance ImGui_ImplGlfw_NewFrame(1); render_slider({ 5.f, 0, w, h }, depth_clipping_distance); ImGui::Render(); } return EXIT_SUCCESS; } catch (const rs2::error & e) { std::cerr << "RealSense error calling " << e.get_failed_function() << "(" << e.get_failed_args() << "):\n " << e.what() << std::endl; return EXIT_FAILURE; } catch (const std::exception & e) { std::cerr << e.what() << std::endl; return EXIT_FAILURE; }
bool ImGui_ImplGlfw_InitForVulkan(GLFWwindow* window) { return ImGui_ImplGlfw_Init(window, GlfwClientApi_Vulkan); }
bool ImGui_ImplGlfw_InitForOpenGL(GLFWwindow* window) { return ImGui_ImplGlfw_Init(window, GlfwClientApi_OpenGL); }