void get_model_matrix(float result[16]) { mat4f_identity(result); if(FIT_TO_VIEW_AND_ROTATE == 0) { /* Translate the model to where we were asked to put it */ float translate[16]; mat4f_translateVec_new(translate, placeToPutModel); /* Do inches to meters conversion if we are asked to. */ float scale[16]; mat4f_identity(scale); if(INCHES_TO_METERS) { float inchesToMeters=1/39.3701; mat4f_scale_new(scale, inchesToMeters, inchesToMeters, inchesToMeters); } mat4f_mult_mat4f_new(result, translate, scale); return; } /* Change angle for animation. */ int count = glutGet(GLUT_ELAPSED_TIME) % 10000; // get a counter that repeats every 10 seconds /* Animate the model if there is animation information available. */ kuhl_update_model_file_ogl3(modelFilename, 0, count/1000.0); dgr_setget("count", &count, sizeof(int)); /* Calculate the width/height/depth of the bounding box and * determine which one of the three is the largest. Then, scale * the scene by 1/(largest value) to ensure that it fits in our * view frustum. */ float bb_min[3], bb_max[3], bb_center[3]; kuhl_model_bounding_box(modelFilename, bb_min, bb_max, bb_center); #define mymax(a,b) (a>b?a:b) float tmp; tmp = bb_max[0] - bb_min[0]; tmp = mymax(bb_max[1] - bb_min[1], tmp); tmp = mymax(bb_max[2] - bb_min[2], tmp); tmp = 1.f / tmp; #undef mymax float scaleBoundBox[16], moveToOrigin[16], moveToLookPoint[16]; mat4f_translate_new(moveToOrigin, -bb_center[0], -bb_center[1], -bb_center[2]); // move to origin // printf("Scaling by factor %f\n", tmp); mat4f_scale_new(scaleBoundBox, tmp, tmp, tmp); // scale model based on bounding box size mat4f_translateVec_new(moveToLookPoint, placeToPutModel); mat4f_mult_mat4f_new(result, moveToOrigin, result); mat4f_mult_mat4f_new(result, scaleBoundBox, result); mat4f_mult_mat4f_new(result, moveToLookPoint, result); }
static void viewmat_get_vrpn(float viewmatrix[16], int viewportNum) { if(viewmat_vrpn_obj == NULL) return; float pos[3] = { 0,0,0 }; float rotMat[16], posMat[16]; vrpn_get(viewmat_vrpn_obj, NULL, pos, rotMat); mat4f_translate_new(posMat, -pos[0], -pos[1], -pos[2]); // position viewmat_fix_rotation(rotMat); mat4f_transpose(rotMat); /* orientation sensor rotates camera, not world */ float cyclopsViewMatrix[16]; mat4f_mult_mat4f_new(cyclopsViewMatrix, rotMat, posMat); viewmat_get_generic(viewmatrix, cyclopsViewMatrix, viewportNum); }
/** Given a view matrix for a single eye, return a new view matrix. If * there are two viewports (one for each eye) different matrices will * be returned depending on the viewport. * * @param viewmatrix The new view matrix (filled in by this function) * * @param cyclopsViewMatrix A single view matrix. If two eyes, this * view matrix represents the point between the eyes. * * @param viewportNum The viewport number. */ static void viewmat_get_generic(float viewmatrix[16], const float cyclopsViewMatrix[16], const int viewportNum) { /* Update the view matrix based on which eye we are rendering */ float eyeDist = 0.055; // TODO: Make this configurable. viewmat_eye eye = viewmat_viewport_to_eye(viewportNum); float eyeShift = 0; if(eye == VIEWMAT_EYE_LEFT) eyeShift = -eyeDist/2.0; else if(eye == VIEWMAT_EYE_RIGHT) eyeShift = eyeDist/2.0; float shiftMatrix[16]; // Negate eyeShift because the matrix would shift the world, not // the eye by default. mat4f_translate_new(shiftMatrix, -eyeShift, 0, 0); /* Adjust the view matrix by the eye offset */ mat4f_mult_mat4f_new(viewmatrix, shiftMatrix, cyclopsViewMatrix); }
void display() { /* If we are using DGR, send or receive data to keep multiple * processes/computers synchronized. */ dgr_update(); /* Get current frames per second calculations. */ float fps = kuhl_getfps(&fps_state); if(dgr_is_enabled() == 0 || dgr_is_master()) { // If DGR is being used, only display dgr counter if we are // the master process. // Check if FPS value was just updated by kuhl_getfps() if(fps_state.frame == 0) { char label[1024]; snprintf(label, 1024, "FPS: %0.1f", fps); /* Delete old label if it exists */ if(fpsLabel != 0) glDeleteTextures(1, &fpsLabel); /* Make a new label */ float labelColor[3] = { 1,1,1 }; float labelBg[4] = { 0,0,0,.3 }; /* Change the last parameter (point size) to adjust the * size of the texture that the text is rendered in to. */ fpsLabelAspectRatio = kuhl_make_label(label, &fpsLabel, labelColor, labelBg, 24); if(fpsLabel != 0) kuhl_geometry_texture(&labelQuad, fpsLabel, "tex", 1); } } /* Ensure the slaves use the same render style as the master * process. */ dgr_setget("style", &renderStyle, sizeof(int)); /* Render the scene once for each viewport. Frequently one * viewport will fill the entire screen. However, this loop will * run twice for HMDs (once for the left eye and once for the * right. */ viewmat_begin_frame(); for(int viewportID=0; viewportID<viewmat_num_viewports(); viewportID++) { viewmat_begin_eye(viewportID); /* Where is the viewport that we are drawing onto and what is its size? */ int viewport[4]; // x,y of lower left corner, width, height viewmat_get_viewport(viewport, viewportID); glViewport(viewport[0], viewport[1], viewport[2], viewport[3]); /* Clear the current viewport. Without glScissor(), glClear() * clears the entire screen. We could call glClear() before * this viewport loop---but on order for all variations of * this code to work (Oculus support, etc), we can only draw * after viewmat_begin_eye(). */ glScissor(viewport[0], viewport[1], viewport[2], viewport[3]); glEnable(GL_SCISSOR_TEST); glClearColor(.2,.2,.2,0); // set clear color to grey glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT); glDisable(GL_SCISSOR_TEST); glEnable(GL_DEPTH_TEST); // turn on depth testing kuhl_errorcheck(); /* Turn on blending (note, if you are using transparent textures, the transparency may not look correct unless you draw further items before closer items.). */ glEnable(GL_BLEND); glBlendEquationSeparate(GL_FUNC_ADD, GL_FUNC_ADD); glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_ONE, GL_ZERO); /* Get the view or camera matrix; update the frustum values if needed. */ float viewMat[16], perspective[16]; viewmat_get(viewMat, perspective, viewportID); glUseProgram(program); kuhl_errorcheck(); /* Send the perspective projection matrix to the vertex program. */ glUniformMatrix4fv(kuhl_get_uniform("Projection"), 1, // number of 4x4 float matrices 0, // transpose perspective); // value float modelMat[16]; get_model_matrix(modelMat); float modelview[16]; mat4f_mult_mat4f_new(modelview, viewMat, modelMat); // modelview = view * model /* Send the modelview matrix to the vertex program. */ glUniformMatrix4fv(kuhl_get_uniform("ModelView"), 1, // number of 4x4 float matrices 0, // transpose modelview); // value glUniform1i(kuhl_get_uniform("renderStyle"), renderStyle); // Copy far plane value into vertex program so we can render depth buffer. float f[6]; // left, right, bottom, top, near>0, far>0 projmat_get_frustum(f, viewport[2], viewport[3]); glUniform1f(kuhl_get_uniform("farPlane"), f[5]); kuhl_errorcheck(); kuhl_geometry_draw(modelgeom); /* Draw the model */ kuhl_errorcheck(); if(showOrigin) { /* Save current line width */ GLfloat origLineWidth; glGetFloatv(GL_LINE_WIDTH, &origLineWidth); glLineWidth(4); // make lines thick /* Object coordinate system origin */ kuhl_geometry_draw(origingeom); /* Draw the origin marker */ /* World coordinate origin */ mat4f_copy(modelview, viewMat); glUniformMatrix4fv(kuhl_get_uniform("ModelView"), 1, // number of 4x4 float matrices 0, // transpose modelview); // value kuhl_geometry_draw(origingeom); /* Draw the origin marker */ /* Restore line width */ glLineWidth(origLineWidth); } if(dgr_is_enabled() == 0 || dgr_is_master()) { /* The shape of the frames per second quad depends on the * aspect ratio of the label texture and the aspect ratio of * the window (because we are placing the quad in normalized * device coordinates). */ int windowWidth, windowHeight; viewmat_window_size(&windowWidth, &windowHeight); float windowAspect = windowWidth / (float)windowHeight; float stretchLabel[16]; mat4f_scale_new(stretchLabel, 1/8.0 * fpsLabelAspectRatio / windowAspect, 1/8.0, 1); /* Position label in the upper left corner of the screen */ float transLabel[16]; mat4f_translate_new(transLabel, -.9, .8, 0); mat4f_mult_mat4f_new(modelview, transLabel, stretchLabel); glUniformMatrix4fv(kuhl_get_uniform("ModelView"), 1, 0, modelview); /* Make sure we don't use a projection matrix */ float identity[16]; mat4f_identity(identity); glUniformMatrix4fv(kuhl_get_uniform("Projection"), 1, 0, identity); /* Don't use depth testing and make sure we use the texture * rendering style */ glDisable(GL_DEPTH_TEST); glUniform1i(kuhl_get_uniform("renderStyle"), 1); kuhl_geometry_draw(&labelQuad); /* Draw the quad */ glEnable(GL_DEPTH_TEST); kuhl_errorcheck(); } glUseProgram(0); // stop using a GLSL program. } // finish viewport loop viewmat_end_frame(); /* Update the model for the next frame based on the time. We * convert the time to seconds and then use mod to cause the * animation to repeat. */ int time = glutGet(GLUT_ELAPSED_TIME); dgr_setget("time", &time, sizeof(int)); kuhl_update_model(modelgeom, 0, ((time%10000)/1000.0)); /* Check for errors. If there are errors, consider adding more * calls to kuhl_errorcheck() in your code. */ kuhl_errorcheck(); //kuhl_video_record("videoout", 30); /* Ask GLUT to call display() again. We shouldn't call display() * ourselves recursively because it will not leave time for GLUT * to call other callback functions for when a key is pressed, the * window is resized, etc. */ glutPostRedisplay(); }
/** Get view and projection matrices appropriate for the Oculus HMD */ static void viewmat_get_hmd_oculus(float viewmatrix[16], float projmatrix[16], int viewportID) { #ifndef MISSING_OVR /* Oculus recommends the order that we should render eyes. We * assume that smaller viewportIDs are rendered first. So, we need * to map the viewportIDs to the specific Oculus HMD eye. The * "eye" variable will be set to either ovrEye_Left (if we are * rendering the left eye) or ovrEye_Right (if we are rendering * the right eye). */ ovrEyeType eye = hmd->EyeRenderOrder[viewportID]; /* Oculus doesn't provide us with easy access to the view * frustum information. We get the projection matrix directly * from libovr. */ ovrMatrix4f ovrpersp = ovrMatrix4f_Projection(hmd->DefaultEyeFov[eye], 0.5, 500, 1); mat4f_setRow(projmatrix, &(ovrpersp.M[0][0]), 0); mat4f_setRow(projmatrix, &(ovrpersp.M[1][0]), 1); mat4f_setRow(projmatrix, &(ovrpersp.M[2][0]), 2); mat4f_setRow(projmatrix, &(ovrpersp.M[3][0]), 3); float offsetMat[16], rotMat[16], posMat[16], initPosMat[16]; mat4f_identity(offsetMat); // Viewpoint offset (IPD, etc); mat4f_identity(rotMat); // tracking system rotation mat4f_identity(posMat); // tracking system position mat4f_identity(initPosMat); // camera starting location /* Construct posMat and rotMat matrices which indicate the * position and orientation of the HMD. */ if(viewmat_vrpn_obj) // get position from VRPN { /* Get the offset for the left and right eyes from * Oculus. If you are using a separate tracking system, you * may also want to apply an offset here between the tracked * point and the eye location. */ mat4f_translate_new(offsetMat, eye_rdesc[eye].HmdToEyeViewOffset.x, // left & right IPD offset eye_rdesc[eye].HmdToEyeViewOffset.y, // vertical offset eye_rdesc[eye].HmdToEyeViewOffset.z); // forward/back offset float pos[3] = { 0,0,0 }; vrpn_get(viewmat_vrpn_obj, NULL, pos, rotMat); mat4f_translate_new(posMat, -pos[0], -pos[1], -pos[2]); // position viewmat_fix_rotation(rotMat); } else // get position from Oculus tracker { pose[eye] = ovrHmd_GetHmdPosePerEye(hmd, eye); mat4f_translate_new(posMat, // position (includes IPD offset) -pose[eye].Position.x, -pose[eye].Position.y, -pose[eye].Position.z); mat4f_rotateQuat_new(rotMat, // rotation pose[eye].Orientation.x, pose[eye].Orientation.y, pose[eye].Orientation.z, pose[eye].Orientation.w); // Starting point: // Translate the world based on the initial camera position // specified in viewmat_init(). You may choose to initialize the // camera position with y=1.5 meters to approximate a normal // standing eyeheight. float initPosVec[3]; vec3f_scalarMult_new(initPosVec, oculus_initialPos, -1.0f); mat4f_translateVec_new(initPosMat, initPosVec); // TODO: Could also get eyeheight via ovrHmd_GetFloat(hmd, OVR_KEY_EYE_HEIGHT, 1.65) } mat4f_transpose(rotMat); /* orientation sensor rotates camera, not world */ // viewmatrix = offsetMat * rotMat * posMat * initposmat mat4f_mult_mat4f_new(viewmatrix, offsetMat, rotMat); // offset is identity if we are using Oculus tracker mat4f_mult_mat4f_new(viewmatrix, viewmatrix, posMat); mat4f_mult_mat4f_new(viewmatrix, viewmatrix, initPosMat); if(0) { printf("ViewportID=%d; eye=%s\n", viewportID, eye == ovrEye_Left ? "left" : "right"); printf("Eye offset according to OVR (only used if VRPN is used): "); mat4f_print(offsetMat); printf("Rotation sensing (from OVR or VRPN): "); mat4f_print(rotMat); printf("Position tracking (from OVR or VRPN): "); mat4f_print(posMat); printf("Initial position (from set in viewmat_init()): "); mat4f_print(initPosMat); printf("Final view matrix: "); mat4f_print(viewmatrix); } #else /* We shouldn't ever get here, but we'll generate a generic view * and projection matrix just in case... */ mat4f_lookat_new(viewmatrix, 0,1.55,0, 0,1.55,-1, 0,1,0); mat4f_perspective_new(projmatrix, 50, 1, 0.5, 500); #endif }