float * getOpenGLDepthFromOSPPerspective(OSPFrameBuffer frameBuffer, const osp::vec2i &frameBufferSize) { // compute fovy, aspect, zNear, zFar from OpenGL projection matrix // this assumes fovy, aspect match the values set on the OSPRay perspective camera! GLdouble glProjectionMatrix[16]; glGetDoublev(GL_PROJECTION_MATRIX, glProjectionMatrix); const double m0 = glProjectionMatrix[0]; const double m5 = glProjectionMatrix[5]; const double m10 = glProjectionMatrix[10]; const double m14 = glProjectionMatrix[14]; const double k = (m10 - 1.0f) / (m10 + 1.0f); const double fovy = 2. * atan(1.0f / m5) * 180./M_PI; const double aspect = m5 / m0; const double zNear = (m14 * (1.0f - k)) / (2.0f * k); const double zFar = k * zNear; // get camera direction and up vectors from model view matrix // again, this assumes these values match those set on the OSPRay camera! GLdouble glModelViewMatrix[16]; glGetDoublev(GL_MODELVIEW_MATRIX, glModelViewMatrix); const ospray::vec3f cameraUp( glModelViewMatrix[1], glModelViewMatrix[5], glModelViewMatrix[9]); const ospray::vec3f cameraDir(-glModelViewMatrix[2], -glModelViewMatrix[6], -glModelViewMatrix[10]); // map OSPRay depth buffer from provided frame buffer const float *ospDepthBuffer = (const float *)ospMapFrameBuffer(frameBuffer, OSP_FB_DEPTH); // get OpenGL depth from OSPRay depth float *glDepth = getOpenGLDepthFromOSPPerspective(fovy, aspect, zNear, zFar, (osp::vec3f&)cameraDir, (osp::vec3f&)cameraUp, ospDepthBuffer, frameBufferSize); // unmap OSPRay depth buffer ospUnmapFrameBuffer(ospDepthBuffer, frameBuffer); return glDepth; }
void GLFWOSPRayWindow::display() { // clock used to compute frame rate static auto displayStart = std::chrono::high_resolution_clock::now(); if (showUi && uiCallback) { ImGuiWindowFlags flags = ImGuiWindowFlags_AlwaysAutoResize; ImGui::Begin( "Tutorial Controls (press 'g' to hide / show)", nullptr, flags); uiCallback(); ImGui::End(); } // if a display callback has been registered, call it if (displayCallback) { displayCallback(this); } // render OSPRay frame ospRenderFrame(framebuffer, renderer, OSP_FB_COLOR | OSP_FB_ACCUM); // map OSPRay frame buffer, update OpenGL texture with its contents, then // unmap uint32_t *fb = (uint32_t *)ospMapFrameBuffer(framebuffer, OSP_FB_COLOR); glBindTexture(GL_TEXTURE_2D, framebufferTexture); glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, windowSize.x, windowSize.y, 0, GL_RGBA, GL_UNSIGNED_BYTE, fb); ospUnmapFrameBuffer(fb, framebuffer); // clear current OpenGL color buffer glClear(GL_COLOR_BUFFER_BIT); // render textured quad with OSPRay frame buffer contents glBegin(GL_QUADS); glTexCoord2f(0.f, 0.f); glVertex2f(0.f, 0.f); glTexCoord2f(0.f, 1.f); glVertex2f(0.f, windowSize.y); glTexCoord2f(1.f, 1.f); glVertex2f(windowSize.x, windowSize.y); glTexCoord2f(1.f, 0.f); glVertex2f(windowSize.x, 0.f); glEnd(); if (showUi && uiCallback) { ImGui::Render(); } // swap buffers glfwSwapBuffers(glfwWindow); // display frame rate in window title auto displayEnd = std::chrono::high_resolution_clock::now(); auto durationMilliseconds = std::chrono::duration_cast<std::chrono::milliseconds>(displayEnd - displayStart); displayStart = displayEnd; const float frameRate = 1000.f / float(durationMilliseconds.count()); std::stringstream windowTitle; windowTitle << "OSPRay: " << std::setprecision(3) << frameRate << " fps"; glfwSetWindowTitle(glfwWindow, windowTitle.str().c_str()); }
void FrameBuffer::unmap(const void *mem) { ospUnmapFrameBuffer(mem, ospFrameBuffer); }
int main(int ac, const char **av) { // image size osp_vec2i imgSize; imgSize.x = 1024; // width imgSize.y = 768; // height // camera float cam_pos[] = {0.f, 0.f, 0.f}; float cam_up [] = {0.f, 1.f, 0.f}; float cam_view [] = {0.1f, 0.f, 1.f}; // triangle mesh data float vertex[] = { -1.0f, -1.0f, 3.0f, 0.f, -1.0f, 1.0f, 3.0f, 0.f, 1.0f, -1.0f, 3.0f, 0.f, 0.1f, 0.1f, 0.3f, 0.f }; float color[] = { 0.9f, 0.5f, 0.5f, 1.0f, 0.8f, 0.8f, 0.8f, 1.0f, 0.8f, 0.8f, 0.8f, 1.0f, 0.5f, 0.9f, 0.5f, 1.0f }; int32_t index[] = { 0, 1, 2, 1, 2, 3 }; // initialize OSPRay; OSPRay parses (and removes) its commandline parameters, e.g. "--osp:debug" ospInit(&ac, av); // create and setup camera OSPCamera camera = ospNewCamera("perspective"); ospSetf(camera, "aspect", imgSize.x/(float)imgSize.y); ospSet3fv(camera, "pos", cam_pos); ospSet3fv(camera, "dir", cam_view); ospSet3fv(camera, "up", cam_up); ospCommit(camera); // commit each object to indicate modifications are done // create and setup model and mesh OSPGeometry mesh = ospNewGeometry("triangles"); OSPData data = ospNewData(4, OSP_FLOAT3A, vertex, 0); // OSP_FLOAT3 format is also supported for vertex positions (currently not on MIC) ospCommit(data); ospSetData(mesh, "vertex", data); data = ospNewData(4, OSP_FLOAT4, color, 0); ospCommit(data); ospSetData(mesh, "vertex.color", data); data = ospNewData(2, OSP_INT3, index, 0); // OSP_INT4 format is also supported for triangle indices ospCommit(data); ospSetData(mesh, "index", data); ospCommit(mesh); OSPModel world = ospNewModel(); ospAddGeometry(world, mesh); ospCommit(world); // create and setup renderer OSPRenderer renderer = ospNewRenderer("scivis"); // choose Scientific Visualization renderer ospSet1f(renderer, "aoWeight", 1.0f); // with full Ambient Occlusion ospSet1i(renderer, "aoSamples", 1); ospSetObject(renderer, "model", world); ospSetObject(renderer, "camera", camera); ospCommit(renderer); // create and setup framebuffer OSPFrameBuffer framebuffer = ospNewFrameBuffer(&imgSize, OSP_FB_SRGBA, OSP_FB_COLOR | /*OSP_FB_DEPTH |*/ OSP_FB_ACCUM); ospFrameBufferClear(framebuffer, OSP_FB_COLOR | OSP_FB_ACCUM); // render one frame ospRenderFrame(framebuffer, renderer, OSP_FB_COLOR | OSP_FB_ACCUM); // access framebuffer and write its content as PPM file const uint32_t * fb = (uint32_t*)ospMapFrameBuffer(framebuffer, OSP_FB_COLOR); writePPM("firstFrameC.ppm", &imgSize, fb); ospUnmapFrameBuffer(fb, framebuffer); // render 10 more frames, which are accumulated to result in a better converged image for (int frames = 0; frames < 10; frames++) ospRenderFrame(framebuffer, renderer, OSP_FB_COLOR | OSP_FB_ACCUM); fb = (uint32_t*)ospMapFrameBuffer(framebuffer, OSP_FB_COLOR); writePPM("accumulatedFrameC.ppm", &imgSize, fb); ospUnmapFrameBuffer(fb, framebuffer); return 0; }