void PerspectiveCamera::commit() { if (!ospCamera) create(); ospSetVec3f(ospCamera,"pos",from); ospSetVec3f(ospCamera,"dir",at - from); ospSetVec3f(ospCamera,"up",up); ospSetf(ospCamera,"aspect",aspect); ospSetf(ospCamera,"fovy",fovy); ospCommit(ospCamera); }
void GLFWOSPRayWindow::reshape(const ospcommon::vec2i &newWindowSize) { windowSize = newWindowSize; // release the current frame buffer, if it exists if (framebuffer) ospRelease(framebuffer); // create new frame buffer framebuffer = ospNewFrameBuffer(*reinterpret_cast<osp::vec2i *>(&windowSize), OSP_FB_SRGBA, OSP_FB_COLOR | OSP_FB_ACCUM); // reset OpenGL viewport and orthographic projection glViewport(0, 0, windowSize.x, windowSize.y); glMatrixMode(GL_PROJECTION); glLoadIdentity(); glOrtho(0.0, windowSize.x, 0.0, windowSize.y, -1.0, 1.0); // update camera arcballCamera->updateWindowSize(windowSize); ospSetf(camera, "aspect", windowSize.x / float(windowSize.y)); ospCommit(camera); }
void GLFWOSPRayWindow::motion(const ospcommon::vec2f &position) { static ospcommon::vec2f previousMouse(-1); const ospcommon::vec2f mouse(position.x, position.y); if (previousMouse != ospcommon::vec2f(-1)) { const bool leftDown = glfwGetMouseButton(glfwWindow, GLFW_MOUSE_BUTTON_LEFT) == GLFW_PRESS; const bool rightDown = glfwGetMouseButton(glfwWindow, GLFW_MOUSE_BUTTON_RIGHT) == GLFW_PRESS; const bool middleDown = glfwGetMouseButton(glfwWindow, GLFW_MOUSE_BUTTON_MIDDLE) == GLFW_PRESS; const ospcommon::vec2f prev = previousMouse; bool cameraChanged = leftDown || rightDown || middleDown; if (leftDown) { const ospcommon::vec2f mouseFrom( ospcommon::clamp(prev.x * 2.f / windowSize.x - 1.f, -1.f, 1.f), ospcommon::clamp(prev.y * 2.f / windowSize.y - 1.f, -1.f, 1.f)); const ospcommon::vec2f mouseTo( ospcommon::clamp(mouse.x * 2.f / windowSize.x - 1.f, -1.f, 1.f), ospcommon::clamp(mouse.y * 2.f / windowSize.y - 1.f, -1.f, 1.f)); arcballCamera->rotate(mouseFrom, mouseTo); } else if (rightDown) { arcballCamera->zoom(mouse.y - prev.y); } else if (middleDown) { arcballCamera->pan(ospcommon::vec2f(mouse.x - prev.x, prev.y - mouse.y)); } if (cameraChanged) { ospFrameBufferClear(framebuffer, OSP_FB_COLOR | OSP_FB_ACCUM); ospSetf(camera, "aspect", windowSize.x / float(windowSize.y)); ospSetVec3f(camera, "pos", osp::vec3f{arcballCamera->eyePos().x, arcballCamera->eyePos().y, arcballCamera->eyePos().z}); ospSetVec3f(camera, "dir", osp::vec3f{arcballCamera->lookDir().x, arcballCamera->lookDir().y, arcballCamera->lookDir().z}); ospSetVec3f(camera, "up", osp::vec3f{arcballCamera->upDir().x, arcballCamera->upDir().y, arcballCamera->upDir().z}); ospCommit(camera); } } previousMouse = mouse; }
GLFWOSPRayWindow::GLFWOSPRayWindow(const ospcommon::vec2i &windowSize, const ospcommon::box3f &worldBounds, OSPModel model, OSPRenderer renderer) : windowSize(windowSize), worldBounds(worldBounds), model(model), renderer(renderer) { if (activeWindow != nullptr) throw std::runtime_error("Cannot create more than one GLFWOSPRayWindow!"); activeWindow = this; // initialize GLFW if (!glfwInit()) throw std::runtime_error("Failed to initialize GLFW!"); // create GLFW window glfwWindow = glfwCreateWindow( windowSize.x, windowSize.y, "OSPRay Tutorial", NULL, NULL); if (!glfwWindow) { glfwTerminate(); throw std::runtime_error("Failed to create GLFW window!"); } // make the window's context current glfwMakeContextCurrent(glfwWindow); ImGui_ImplGlfwGL3_Init(glfwWindow, true); // set initial OpenGL state glEnable(GL_TEXTURE_2D); glDisable(GL_LIGHTING); // create OpenGL frame buffer texture glGenTextures(1, &framebufferTexture); glEnable(GL_TEXTURE_2D); glBindTexture(GL_TEXTURE_2D, framebufferTexture); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR); // set GLFW callbacks glfwSetFramebufferSizeCallback( glfwWindow, [](GLFWwindow *, int newWidth, int newHeight) { activeWindow->reshape(ospcommon::vec2i{newWidth, newHeight}); }); glfwSetCursorPosCallback(glfwWindow, [](GLFWwindow *, double x, double y) { ImGuiIO &io = ImGui::GetIO(); if (!io.WantCaptureMouse) activeWindow->motion(ospcommon::vec2f{float(x), float(y)}); }); glfwSetKeyCallback(glfwWindow, [](GLFWwindow *, int key, int, int action, int) { if (action == GLFW_PRESS) { switch (key) { case GLFW_KEY_G: activeWindow->showUi = !(activeWindow->showUi); break; } } }); // OSPRay setup // set the model on the renderer ospSetObject(renderer, "model", model); // create the arcball camera model arcballCamera = std::unique_ptr<ArcballCamera>( new ArcballCamera(worldBounds, windowSize)); // create camera camera = ospNewCamera("perspective"); ospSetf(camera, "aspect", windowSize.x / float(windowSize.y)); ospSetVec3f(camera, "pos", osp::vec3f{arcballCamera->eyePos().x, arcballCamera->eyePos().y, arcballCamera->eyePos().z}); ospSetVec3f(camera, "dir", osp::vec3f{arcballCamera->lookDir().x, arcballCamera->lookDir().y, arcballCamera->lookDir().z}); ospSetVec3f(camera, "up", osp::vec3f{arcballCamera->upDir().x, arcballCamera->upDir().y, arcballCamera->upDir().z}); ospCommit(camera); // set camera on the renderer ospSetObject(renderer, "camera", camera); // finally, commit the renderer ospCommit(renderer); // trigger window reshape events with current window size glfwGetFramebufferSize(glfwWindow, &this->windowSize.x, &this->windowSize.y); reshape(this->windowSize); }
int main(int ac, const char **av) { // image size osp_vec2i imgSize; imgSize.x = 1024; // width imgSize.y = 768; // height // camera float cam_pos[] = {0.f, 0.f, 0.f}; float cam_up [] = {0.f, 1.f, 0.f}; float cam_view [] = {0.1f, 0.f, 1.f}; // triangle mesh data float vertex[] = { -1.0f, -1.0f, 3.0f, 0.f, -1.0f, 1.0f, 3.0f, 0.f, 1.0f, -1.0f, 3.0f, 0.f, 0.1f, 0.1f, 0.3f, 0.f }; float color[] = { 0.9f, 0.5f, 0.5f, 1.0f, 0.8f, 0.8f, 0.8f, 1.0f, 0.8f, 0.8f, 0.8f, 1.0f, 0.5f, 0.9f, 0.5f, 1.0f }; int32_t index[] = { 0, 1, 2, 1, 2, 3 }; // initialize OSPRay; OSPRay parses (and removes) its commandline parameters, e.g. "--osp:debug" ospInit(&ac, av); // create and setup camera OSPCamera camera = ospNewCamera("perspective"); ospSetf(camera, "aspect", imgSize.x/(float)imgSize.y); ospSet3fv(camera, "pos", cam_pos); ospSet3fv(camera, "dir", cam_view); ospSet3fv(camera, "up", cam_up); ospCommit(camera); // commit each object to indicate modifications are done // create and setup model and mesh OSPGeometry mesh = ospNewGeometry("triangles"); OSPData data = ospNewData(4, OSP_FLOAT3A, vertex, 0); // OSP_FLOAT3 format is also supported for vertex positions (currently not on MIC) ospCommit(data); ospSetData(mesh, "vertex", data); data = ospNewData(4, OSP_FLOAT4, color, 0); ospCommit(data); ospSetData(mesh, "vertex.color", data); data = ospNewData(2, OSP_INT3, index, 0); // OSP_INT4 format is also supported for triangle indices ospCommit(data); ospSetData(mesh, "index", data); ospCommit(mesh); OSPModel world = ospNewModel(); ospAddGeometry(world, mesh); ospCommit(world); // create and setup renderer OSPRenderer renderer = ospNewRenderer("scivis"); // choose Scientific Visualization renderer ospSet1f(renderer, "aoWeight", 1.0f); // with full Ambient Occlusion ospSet1i(renderer, "aoSamples", 1); ospSetObject(renderer, "model", world); ospSetObject(renderer, "camera", camera); ospCommit(renderer); // create and setup framebuffer OSPFrameBuffer framebuffer = ospNewFrameBuffer(&imgSize, OSP_FB_SRGBA, OSP_FB_COLOR | /*OSP_FB_DEPTH |*/ OSP_FB_ACCUM); ospFrameBufferClear(framebuffer, OSP_FB_COLOR | OSP_FB_ACCUM); // render one frame ospRenderFrame(framebuffer, renderer, OSP_FB_COLOR | OSP_FB_ACCUM); // access framebuffer and write its content as PPM file const uint32_t * fb = (uint32_t*)ospMapFrameBuffer(framebuffer, OSP_FB_COLOR); writePPM("firstFrameC.ppm", &imgSize, fb); ospUnmapFrameBuffer(fb, framebuffer); // render 10 more frames, which are accumulated to result in a better converged image for (int frames = 0; frames < 10; frames++) ospRenderFrame(framebuffer, renderer, OSP_FB_COLOR | OSP_FB_ACCUM); fb = (uint32_t*)ospMapFrameBuffer(framebuffer, OSP_FB_COLOR); writePPM("accumulatedFrameC.ppm", &imgSize, fb); ospUnmapFrameBuffer(fb, framebuffer); return 0; }