コード例 #1
0
ファイル: viewer.c プロジェクト: runewake2/opengl-examples
void get_model_matrix(float result[16])
{
	mat4f_identity(result);
	if(fitToView == 0)
	{
		/* Translate the model to where we were asked to put it */
		float translate[16];
		mat4f_translateVec_new(translate, placeToPutModel);

		/* Do inches to meters conversion if we are asked to. */
		float scale[16];
		mat4f_identity(scale);
		if(INCHES_TO_METERS)
		{
			float inchesToMeters=1/39.3701;
			mat4f_scale_new(scale, inchesToMeters, inchesToMeters, inchesToMeters);
		}
		mat4f_mult_mat4f_new(result, translate, scale);
		return;
	}
	
	/* Get a matrix to scale+translate the model based on the bounding
	 * box. If the last parameter is 1, the bounding box will sit on
	 * the XZ plane. If it is set to 0, the bounding box will be
	 * centered at the specified point. */
	float fitMatrix[16];
	kuhl_bbox_fit(fitMatrix, bbox, 1);

	/* Get a matrix that moves the model to the correct location. */
	float moveToLookPoint[16];
	mat4f_translateVec_new(moveToLookPoint, placeToPutModel);

	/* Create a single model matrix. */
	mat4f_mult_mat4f_new(result, moveToLookPoint, fitMatrix);
}
コード例 #2
0
ファイル: viewmat.c プロジェクト: sborar/opengl-examples
/** Some VRPN orientation sensors may be rotated differently than what we
 * expect them to be (for example, orientation is correct except that
 * the camera is pointing in the wrong direction). This function will
 * adjust the orientation matrix so that the camera is pointing in the
 * correct direction. */
static void viewmat_fix_rotation(float orient[16])
{
	// Fix rotation for a hard-wired orientation sensor.
	if(viewmat_control_mode == VIEWMAT_CONTROL_ORIENT)
	{
		float adjustLeft[16] = { 0, 1, 0, 0,
		                         0, 0, 1, 0,
		                         1, 0, 0, 0,
		                         0, 0, 0, 1 };
		mat4f_transpose(adjustLeft); // transpose to column-major order

		float adjustRight[16] = { 0, 0, -1, 0,
		                         -1, 0,  0, 0,
		                          0, 1,  0, 0,
		                          0, 0,  0, 1 };
		mat4f_transpose(adjustRight);

		mat4f_mult_mat4f_new(orient, adjustLeft, orient);
		mat4f_mult_mat4f_new(orient, orient, adjustRight);
		return;
	}

	// Fix rotation for VRPN
	if(viewmat_vrpn_obj == NULL || strlen(viewmat_vrpn_obj) == 0)
		return;

	char *hostname = vrpn_default_host();
	if(hostname == NULL)
		return;
	
	/* Some objects in the IVS lab need to be rotated to match the
	 * orientation that we expect. Apply the fix here. */
	if(vrpn_is_vicon(hostname)) // MTU vicon tracker
	{
		/* Note, orient has not been transposed/inverted yet. Doing
		 * orient*offset will effectively effectively be rotating the
		 * camera---not the world. */ 
		if(strcmp(viewmat_vrpn_obj, "DK2") == 0)
		{
			float offsetVicon[16];
			mat4f_identity(offsetVicon);
			mat4f_rotateAxis_new(offsetVicon, 90, 1,0,0);
			mat4f_mult_mat4f_new(orient, orient, offsetVicon);
		}

		if(strcmp(viewmat_vrpn_obj, "DSight") == 0)
		{
			float offsetVicon1[16];
			mat4f_identity(offsetVicon1);
			mat4f_rotateAxis_new(offsetVicon1, 90, 1,0,0);
			float offsetVicon2[16];
			mat4f_identity(offsetVicon2);
			mat4f_rotateAxis_new(offsetVicon2, 180, 0,1,0);
			
			// orient = orient * offsetVicon1 * offsetVicon2
			mat4f_mult_mat4f_many(orient, orient, offsetVicon1, offsetVicon2, NULL);
		}
	}
}
コード例 #3
0
void get_model_matrix(float result[16])
{
	mat4f_identity(result);
	if(FIT_TO_VIEW_AND_ROTATE == 0)
	{
		/* Translate the model to where we were asked to put it */
		float translate[16];
		mat4f_translateVec_new(translate, placeToPutModel);

		/* Do inches to meters conversion if we are asked to. */
		float scale[16];
		mat4f_identity(scale);
		if(INCHES_TO_METERS)
		{
			float inchesToMeters=1/39.3701;
			mat4f_scale_new(scale, inchesToMeters, inchesToMeters, inchesToMeters);
		}
		mat4f_mult_mat4f_new(result, translate, scale);
		
		
		return;
	}
	
	/* Change angle for animation. */
	int count = glutGet(GLUT_ELAPSED_TIME) % 10000; // get a counter that repeats every 10 seconds
	/* Animate the model if there is animation information available. */
	kuhl_update_model_file_ogl3(modelFilename, 0, count/1000.0);
	dgr_setget("count", &count, sizeof(int));

	/* Calculate the width/height/depth of the bounding box and
	 * determine which one of the three is the largest. Then, scale
	 * the scene by 1/(largest value) to ensure that it fits in our
	 * view frustum. */
	float bb_min[3], bb_max[3], bb_center[3];
	kuhl_model_bounding_box(modelFilename, bb_min, bb_max, bb_center);
#define mymax(a,b) (a>b?a:b)
	float tmp;
	tmp = bb_max[0] - bb_min[0];
	tmp = mymax(bb_max[1] - bb_min[1], tmp);
	tmp = mymax(bb_max[2] - bb_min[2], tmp);
	tmp = 1.f / tmp;
#undef mymax
	float scaleBoundBox[16], moveToOrigin[16], moveToLookPoint[16];
	mat4f_translate_new(moveToOrigin, -bb_center[0], -bb_center[1], -bb_center[2]); // move to origin
//	printf("Scaling by factor %f\n", tmp); 
	mat4f_scale_new(scaleBoundBox, tmp, tmp, tmp); // scale model based on bounding box size
	mat4f_translateVec_new(moveToLookPoint, placeToPutModel);

	mat4f_mult_mat4f_new(result, moveToOrigin, result);
	mat4f_mult_mat4f_new(result, scaleBoundBox, result);
	mat4f_mult_mat4f_new(result, moveToLookPoint, result);
}
コード例 #4
0
ファイル: viewer.c プロジェクト: runewake2/opengl-examples
void display()
{
	/* If we are using DGR, send or receive data to keep multiple
	 * processes/computers synchronized. */
	dgr_update();

	/* Get current frames per second calculations. */
	float fps = kuhl_getfps(&fps_state);

	if(dgr_is_enabled() == 0 || dgr_is_master())
	{
		// If DGR is being used, only display dgr counter if we are
		// the master process.

		// Check if FPS value was just updated by kuhl_getfps()
		if(fps_state.frame == 0)
		{
			char label[1024];
			snprintf(label, 1024, "FPS: %0.1f", fps);

			/* Delete old label if it exists */
			if(fpsLabel != 0) 
				glDeleteTextures(1, &fpsLabel);

			/* Make a new label */
			float labelColor[3] = { 1,1,1 };
			float labelBg[4] = { 0,0,0,.3 };
			/* Change the last parameter (point size) to adjust the
			 * size of the texture that the text is rendered in to. */
			fpsLabelAspectRatio = kuhl_make_label(label,
			                                      &fpsLabel,
			                                      labelColor, labelBg, 24);

			if(fpsLabel != 0)
				kuhl_geometry_texture(&labelQuad, fpsLabel, "tex", 1);
		}
	}
	
	/* Ensure the slaves use the same render style as the master
	 * process. */
	dgr_setget("style", &renderStyle, sizeof(int));

	
	/* Render the scene once for each viewport. Frequently one
	 * viewport will fill the entire screen. However, this loop will
	 * run twice for HMDs (once for the left eye and once for the
	 * right. */
	viewmat_begin_frame();
	for(int viewportID=0; viewportID<viewmat_num_viewports(); viewportID++)
	{
		viewmat_begin_eye(viewportID);

		/* Where is the viewport that we are drawing onto and what is its size? */
		int viewport[4]; // x,y of lower left corner, width, height
		viewmat_get_viewport(viewport, viewportID);
		glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);

		/* Clear the current viewport. Without glScissor(), glClear()
		 * clears the entire screen. We could call glClear() before
		 * this viewport loop---but on order for all variations of
		 * this code to work (Oculus support, etc), we can only draw
		 * after viewmat_begin_eye(). */
		glScissor(viewport[0], viewport[1], viewport[2], viewport[3]);
		glEnable(GL_SCISSOR_TEST);
		glClearColor(.2,.2,.2,0); // set clear color to grey
		glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
		glDisable(GL_SCISSOR_TEST);
		glEnable(GL_DEPTH_TEST); // turn on depth testing
		kuhl_errorcheck();

		/* Turn on blending (note, if you are using transparent textures,
		   the transparency may not look correct unless you draw further
		   items before closer items.). */
		glEnable(GL_BLEND);
		glBlendEquationSeparate(GL_FUNC_ADD, GL_FUNC_ADD);
		glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_ONE, GL_ZERO);

		/* Get the view or camera matrix; update the frustum values if needed. */
		float viewMat[16], perspective[16];
		viewmat_get(viewMat, perspective, viewportID);

		glUseProgram(program);
		kuhl_errorcheck();
		/* Send the perspective projection matrix to the vertex program. */
		glUniformMatrix4fv(kuhl_get_uniform("Projection"),
		                   1, // number of 4x4 float matrices
		                   0, // transpose
		                   perspective); // value

		float modelMat[16];
		get_model_matrix(modelMat);
		float modelview[16];
		mat4f_mult_mat4f_new(modelview, viewMat, modelMat); // modelview = view * model

		/* Send the modelview matrix to the vertex program. */
		glUniformMatrix4fv(kuhl_get_uniform("ModelView"),
		                   1, // number of 4x4 float matrices
		                   0, // transpose
		                   modelview); // value

		glUniform1i(kuhl_get_uniform("renderStyle"), renderStyle);
		// Copy far plane value into vertex program so we can render depth buffer.
		float f[6]; // left, right, bottom, top, near>0, far>0
		projmat_get_frustum(f, viewport[2], viewport[3]);
		glUniform1f(kuhl_get_uniform("farPlane"), f[5]);

		kuhl_errorcheck();
		kuhl_geometry_draw(modelgeom); /* Draw the model */
		kuhl_errorcheck();
		if(showOrigin)
		{
			/* Save current line width */
			GLfloat origLineWidth;
			glGetFloatv(GL_LINE_WIDTH, &origLineWidth);
			glLineWidth(4); // make lines thick
			
			/* Object coordinate system origin */
			kuhl_geometry_draw(origingeom); /* Draw the origin marker */

			/* World coordinate origin */
			mat4f_copy(modelview, viewMat);
			glUniformMatrix4fv(kuhl_get_uniform("ModelView"),
			                   1, // number of 4x4 float matrices
			                   0, // transpose
			                   modelview); // value
			kuhl_geometry_draw(origingeom); /* Draw the origin marker */

			/* Restore line width */
			glLineWidth(origLineWidth);
		}


		if(dgr_is_enabled() == 0 || dgr_is_master())
		{

			/* The shape of the frames per second quad depends on the
			 * aspect ratio of the label texture and the aspect ratio of
			 * the window (because we are placing the quad in normalized
			 * device coordinates). */
			int windowWidth, windowHeight;
			viewmat_window_size(&windowWidth, &windowHeight);
			float windowAspect = windowWidth / (float)windowHeight;
			
			float stretchLabel[16];
			mat4f_scale_new(stretchLabel, 1/8.0 * fpsLabelAspectRatio / windowAspect, 1/8.0, 1);

			/* Position label in the upper left corner of the screen */
			float transLabel[16];
			mat4f_translate_new(transLabel, -.9, .8, 0);
			mat4f_mult_mat4f_new(modelview, transLabel, stretchLabel);
			glUniformMatrix4fv(kuhl_get_uniform("ModelView"), 1, 0, modelview);

			/* Make sure we don't use a projection matrix */
			float identity[16];
			mat4f_identity(identity);
			glUniformMatrix4fv(kuhl_get_uniform("Projection"), 1, 0, identity);

			/* Don't use depth testing and make sure we use the texture
			 * rendering style */
			glDisable(GL_DEPTH_TEST);
			glUniform1i(kuhl_get_uniform("renderStyle"), 1);
			kuhl_geometry_draw(&labelQuad); /* Draw the quad */
			glEnable(GL_DEPTH_TEST);
			kuhl_errorcheck();
		}

		glUseProgram(0); // stop using a GLSL program.

	} // finish viewport loop
	viewmat_end_frame();
	
	/* Update the model for the next frame based on the time. We
	 * convert the time to seconds and then use mod to cause the
	 * animation to repeat. */
	int time = glutGet(GLUT_ELAPSED_TIME);
	dgr_setget("time", &time, sizeof(int));
	kuhl_update_model(modelgeom, 0, ((time%10000)/1000.0));

	/* Check for errors. If there are errors, consider adding more
	 * calls to kuhl_errorcheck() in your code. */
	kuhl_errorcheck();

	//kuhl_video_record("videoout", 30);
	
	/* Ask GLUT to call display() again. We shouldn't call display()
	 * ourselves recursively because it will not leave time for GLUT
	 * to call other callback functions for when a key is pressed, the
	 * window is resized, etc. */
	glutPostRedisplay();
}
コード例 #5
0
ファイル: picker.c プロジェクト: runewake2/opengl-examples
/* Called by GLUT whenever the window needs to be redrawn. This
 * function should not be called directly by the programmer. Instead,
 * we can call glutPostRedisplay() to request that GLUT call display()
 * at some point. */
void display()
{
	/* If we are using DGR, send or receive data to keep multiple
	 * processes/computers synchronized. */
	dgr_update();

	/* Render the scene once for each viewport. Frequently one
	 * viewport will fill the entire screen. However, this loop will
	 * run twice for HMDs (once for the left eye and once for the
	 * right.) */
	viewmat_begin_frame();
	for(int viewportID=0; viewportID<viewmat_num_viewports(); viewportID++)
	{
		viewmat_begin_eye(viewportID);

		/* Where is the viewport that we are drawing onto and what is its size? */
		int viewport[4]; // x,y of lower left corner, width, height
		viewmat_get_viewport(viewport, viewportID);
		glViewport(viewport[0], viewport[1], viewport[2], viewport[3]);

		/* Clear the current viewport. Without glScissor(), glClear()
		 * clears the entire screen. We could call glClear() before
		 * this viewport loop---but on order for all variations of
		 * this code to work (Oculus support, etc), we can only draw
		 * after viewmat_begin_eye(). */
		glScissor(viewport[0], viewport[1], viewport[2], viewport[3]);
		glEnable(GL_SCISSOR_TEST);
		glClearColor(.2,.2,.2,0); // set clear color to grey
		glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT|GL_STENCIL_BUFFER_BIT);
		glDisable(GL_SCISSOR_TEST);
		glEnable(GL_DEPTH_TEST); // turn on depth testing
		kuhl_errorcheck();

		/* Get the view or camera matrix; update the frustum values if needed. */
		float viewMat[16], perspective[16];
		viewmat_get(viewMat, perspective, viewportID);

		/* Calculate an angle to rotate the
		 * object. glutGet(GLUT_ELAPSED_TIME) is the number of
		 * milliseconds since glutInit() was called. */
		int count = glutGet(GLUT_ELAPSED_TIME) % 10000; // get a counter that repeats every 10 seconds
		float angle = count / 10000.0 * 360; // rotate 360 degrees every 10 seconds
		/* Make sure all computers/processes use the same angle */
		dgr_setget("angle", &angle, sizeof(GLfloat));
		/* Create a 4x4 rotation matrix based on the angle we computed. */
		float rotateMat[16];
		mat4f_rotateAxis_new(rotateMat, angle, 0,1,0);

		/* Create a scale matrix. */
		float scaleMatrix[16];
		mat4f_scale_new(scaleMatrix, 3, 3, 3);

		// Modelview = (viewMatrix * scaleMatrix) * rotationMatrix
		float modelview[16];
		mat4f_mult_mat4f_new(modelview, viewMat, scaleMatrix);
		mat4f_mult_mat4f_new(modelview, modelview, rotateMat);

		kuhl_errorcheck();
		glUseProgram(program);
		kuhl_errorcheck();
		/* Send the perspective projection matrix to the vertex program. */
		glUniformMatrix4fv(kuhl_get_uniform("Projection"),
		                   1, // number of 4x4 float matrices
		                   0, // transpose
		                   perspective); // value
		/* Send the modelview matrix to the vertex program. */
		glUniformMatrix4fv(kuhl_get_uniform("ModelView"),
		                   1, // number of 4x4 float matrices
		                   0, // transpose
		                   modelview); // value
		kuhl_errorcheck();

			/* Draw the geometry using the matrices that we sent to the
			 * vertex programs immediately above. Use the stencil buffer
			 * to keep track of which object appears on top. */
		if(viewportID == 0)
			glEnable(GL_STENCIL_TEST);
		glStencilOp(GL_KEEP, GL_KEEP, GL_REPLACE);
		glStencilFunc(GL_ALWAYS, 1, -1);
		kuhl_geometry_draw(&triangle);
		
		glStencilFunc(GL_ALWAYS, 2, -1);
		kuhl_geometry_draw(&quad);
		glDisable(GL_STENCIL_TEST);
		
		/* If we have multiple viewports, only draw cursor in the
		 * first viewport. */
		if(viewportID == 0)
		{
			/* Draw the cursor in normalized device coordinates. Don't
			 * use any matrices. */
			float identity[16];
			mat4f_identity(identity);
			glUniformMatrix4fv(kuhl_get_uniform("Projection"),
			                   1, 0, identity);
			glUniformMatrix4fv(kuhl_get_uniform("ModelView"),
			                   1, 0, identity);

			/* Disable depth testing so the cursor isn't occluded by
			 * anything. */
			glDisable(GL_DEPTH_TEST);
			kuhl_geometry_draw(&cursor);
			glEnable(GL_DEPTH_TEST);

			/* When we render images on the Oculus, we are rendering
			 * into a multisampled framebuffer object, and we can't
			 * read from the multisample FBO until we have blitted it
			 * into a normal FBO. Here, we get the blitted FBO for the
			 * *previous* frame. */
			GLint fb =viewmat_get_blitted_framebuffer(viewportID);
			glBindFramebuffer(GL_FRAMEBUFFER, fb);
			
			GLuint stencilVal = 0;
			kuhl_errorcheck();
			glReadPixels(viewport[0]+viewport[2]/2, viewport[1]+viewport[3]/2,
			             1,1, // get data for 1x1 area (i.e., a pixel)
			             GL_STENCIL_INDEX, // query the stencil buffer
			             GL_UNSIGNED_INT,
			             &stencilVal);
			kuhl_errorcheck();
			if(stencilVal == 1)
				printf("Cursor is on triangle.\n");
			else if(stencilVal == 2)
				printf("Cursor is on quad.\n");
			else
				printf("Cursor isn't on anything.\n");
		}

		glUseProgram(0); // stop using a GLSL program.
		
	} // finish viewport loop
	viewmat_end_frame();

	/* Check for errors. If there are errors, consider adding more
	 * calls to kuhl_errorcheck() in your code. */
	kuhl_errorcheck();
    
	/* Ask GLUT to call display() again. We shouldn't call display()
	 * ourselves recursively because it will not leave time for GLUT
	 * to call other callback functions for when a key is pressed, the
	 * window is resized, etc. */
	glutPostRedisplay();
}
コード例 #6
0
/** Uses the VRPN library to get the position and orientation of a
 * tracked object.
 *
 * @param object The name of the object being tracked.
 *
 * @param hostname The IP address or hostname of the VRPN server or
 * tracking system computer. If hostname is set to NULL, the
 * ~/.vrpn-server file is consulted.
 *
 * @param pos An array to be filled in with the position information
 * for the tracked object. If we are unable to track the object, a
 * message may be printed and pos will be set to a fixed value.
 *
 * @param orient An array to be filled in with the orientation matrix
 * for the tracked object. The orientation matrix is in row-major
 * order can be used with OpenGL. If the tracking system is moving an
 * object around on the screen, this matrix can be used directly. If
 * the tracking system is moving the OpenGL camera, this matrix may
 * need to be inverted. If we are unable to track the object, a
 * message may be printed and orient will be set to the identity
 * matrix.
 *
 * @return 1 if we returned data from the tracker. 0 if there was
 * problems connecting to the tracker.
 */
int vrpn_get(const char *object, const char *hostname, float pos[3], float orient[16])
{
	/* Set to default values */
	vec3f_set(pos, 10000,10000,10000);
	mat4f_identity(orient);
#ifdef MISSING_VRPN
	printf("You are missing VRPN support.\n");
	return 0;
#else
	if(object == NULL || strlen(object) == 0)
	{
		msg(WARNING, "Empty or NULL object name was passed into this function.\n");
		return 0;
	}
	if(hostname != NULL && strlen(hostname) == 0)
	{
		msg(WARNING, "Hostname is an empty string.\n");
		return 0;
	}
	
	/* Construct an object@hostname string. */
	std::string hostnamecpp;
	std::string objectcpp;
	if(hostname == NULL)
	{
		char *hostnameInFile = vrpn_default_host();
		if(hostnameInFile)
			hostnamecpp = hostnameInFile;
		else
		{
			msg(ERROR, "Failed to find hostname of VRPN server.\n");
			exit(EXIT_FAILURE);
		}
		
	}
	else
		hostnamecpp = hostname;

	objectcpp = object;
	std::string fullname = objectcpp + "@" + hostnamecpp;

	/* Check if we have a tracker object for that string in our map. */
	if(nameToTracker.count(fullname))
	{
		/* If we already have a tracker object, ask it to run the main
		 * loop (and therefore call our handle_tracker() function if
		 * there is new data). */
		nameToTracker[fullname]->mainloop();

		/* If our callback has been called, get the callback object
		 * and get the data out of it. */
		if(nameToCallbackData.count(fullname))
		{
			vrpn_TRACKERCB t = nameToCallbackData[fullname];
			float pos4[4];
			for(int i=0; i<3; i++)
				pos4[i] = t.pos[i];
			pos4[3]=1;

			double orientd[16];
			// Convert quaternion into orientation matrix.
			q_to_ogl_matrix(orientd, t.quat);
			for(int i=0; i<16; i++)
				orient[i] = (float) orientd[i];

			/* VICON in the MTU IVS lab is typically calibrated so that:
			 * X = points to the right (while facing screen)
			 * Y = points into the screen
			 * Z = up
			 * (left-handed coordinate system)
			 *
			 * PPT is typically calibrated so that:
			 * X = the points to the wall that has two closets at both corners
			 * Y = up
			 * Z = points to the door
			 * (right-handed coordinate system)
			 *
			 * By default, OpenGL assumes that:
			 * X = points to the right (while facing screen in the IVS lab)
			 * Y = up
			 * Z = points OUT of the screen (i.e., -Z points into the screen in te IVS lab)
			 * (right-handed coordinate system)
			 *
			 * Below, we convert the position and orientation
			 * information into the OpenGL convention.
			 */
			if(strlen(hostnamecpp.c_str()) > 14 && strncmp(hostnamecpp.c_str(), "tcp://141.219.", 14) == 0) // MTU vicon tracker
			{
				float viconTransform[16] = { 1,0,0,0,  // column major order!
				                             0,0,-1,0,
				                             0,1,0,0,
				                             0,0,0,1 };
				mat4f_mult_mat4f_new(orient, viconTransform, orient);
				mat4f_mult_vec4f_new(pos4, viconTransform, pos4);
				vec3f_copy(pos,pos4);
				return 1; // we successfully collected some data
			}
			else // Non-Vicon tracker
			{
				/* Don't transform other tracking systems */
				// orient is already filled in
				vec3f_copy(pos, pos4);
				return 1; // we successfully collected some data
			}
		}
	}
	else
	{
		/* If this is our first time, create a tracker for the object@hostname string, register the callback handler. */
		msg(INFO, "Connecting to VRPN server: %s\n", hostnamecpp.c_str());
		// If we are making a TCP connection and the server isn't up, the following function call may hang for a long time
		vrpn_Connection *connection = vrpn_get_connection_by_name(hostnamecpp.c_str());

		/* Wait for a bit to see if we can connect. Sometimes we don't immediately connect! */
		for(int i=0; i<1000 && !connection->connected(); i++)
		{
		    usleep(1000); // 1000 microseconds * 1000 = up to 1 second of waiting.
		    connection->mainloop();
		}
		/* If connection failed, exit. */
		if(!connection->connected())
		{
		    delete connection;
		    msg(ERROR, "Failed to connect to tracker: %s\n", fullname.c_str());
		    return 0;
		}
		vrpn_Tracker_Remote *tkr = new vrpn_Tracker_Remote(fullname.c_str(), connection);
		nameToTracker[fullname] = tkr;
		tkr->register_change_handler((void*) fullname.c_str(), handle_tracker);
		kuhl_getfps_init(&fps_state);
		kalman_initialize(&kalman, 0.1, 0.1);
	}
	return 0;
#endif
}
コード例 #7
0
ファイル: viewmat.c プロジェクト: sborar/opengl-examples
/** Get view and projection matrices appropriate for the Oculus HMD */
static void viewmat_get_hmd_oculus(float viewmatrix[16], float projmatrix[16], int viewportID)
{
#ifndef MISSING_OVR
	/* Oculus recommends the order that we should render eyes. We
	 * assume that smaller viewportIDs are rendered first. So, we need
	 * to map the viewportIDs to the specific Oculus HMD eye. The
	 * "eye" variable will be set to either ovrEye_Left (if we are
	 * rendering the left eye) or ovrEye_Right (if we are rendering
	 * the right eye). */
	ovrEyeType eye = hmd->EyeRenderOrder[viewportID];

	/* Oculus doesn't provide us with easy access to the view
	 * frustum information. We get the projection matrix directly
	 * from libovr. */
	ovrMatrix4f ovrpersp = ovrMatrix4f_Projection(hmd->DefaultEyeFov[eye], 0.5, 500, 1);
	mat4f_setRow(projmatrix, &(ovrpersp.M[0][0]), 0);
	mat4f_setRow(projmatrix, &(ovrpersp.M[1][0]), 1);
	mat4f_setRow(projmatrix, &(ovrpersp.M[2][0]), 2);
	mat4f_setRow(projmatrix, &(ovrpersp.M[3][0]), 3);
	
	float offsetMat[16], rotMat[16], posMat[16], initPosMat[16];
	mat4f_identity(offsetMat);  // Viewpoint offset (IPD, etc);
	mat4f_identity(rotMat);     // tracking system rotation
	mat4f_identity(posMat);     // tracking system position
	mat4f_identity(initPosMat); // camera starting location
	
	/* Construct posMat and rotMat matrices which indicate the
	 * position and orientation of the HMD. */
	if(viewmat_vrpn_obj) // get position from VRPN
	{
		/* Get the offset for the left and right eyes from
		 * Oculus. If you are using a separate tracking system, you
		 * may also want to apply an offset here between the tracked
		 * point and the eye location. */
		mat4f_translate_new(offsetMat,
		                    eye_rdesc[eye].HmdToEyeViewOffset.x, // left & right IPD offset
		                    eye_rdesc[eye].HmdToEyeViewOffset.y, // vertical offset
		                    eye_rdesc[eye].HmdToEyeViewOffset.z); // forward/back offset

		float pos[3] = { 0,0,0 };
		vrpn_get(viewmat_vrpn_obj, NULL, pos, rotMat);
		mat4f_translate_new(posMat, -pos[0], -pos[1], -pos[2]); // position
		viewmat_fix_rotation(rotMat);
	}
	else // get position from Oculus tracker
	{
		pose[eye] = ovrHmd_GetHmdPosePerEye(hmd, eye);
		mat4f_translate_new(posMat,                           // position (includes IPD offset)
		                    -pose[eye].Position.x,
		                    -pose[eye].Position.y,
		                    -pose[eye].Position.z);
		mat4f_rotateQuat_new(rotMat,                          // rotation
		                     pose[eye].Orientation.x,
		                     pose[eye].Orientation.y,
		                     pose[eye].Orientation.z,
		                     pose[eye].Orientation.w);

		// Starting point:
		
		// Translate the world based on the initial camera position
		// specified in viewmat_init(). You may choose to initialize the
		// camera position with y=1.5 meters to approximate a normal
		// standing eyeheight.
		float initPosVec[3];
		vec3f_scalarMult_new(initPosVec, oculus_initialPos, -1.0f);
		mat4f_translateVec_new(initPosMat, initPosVec);
		// TODO: Could also get eyeheight via ovrHmd_GetFloat(hmd, OVR_KEY_EYE_HEIGHT, 1.65)
	}
	mat4f_transpose(rotMat); /* orientation sensor rotates camera, not world */

	// viewmatrix = offsetMat * rotMat *  posMat * initposmat
	mat4f_mult_mat4f_new(viewmatrix, offsetMat, rotMat); // offset is identity if we are using Oculus tracker
	mat4f_mult_mat4f_new(viewmatrix, viewmatrix, posMat);
	mat4f_mult_mat4f_new(viewmatrix, viewmatrix, initPosMat);

	if(0)
	{
		printf("ViewportID=%d; eye=%s\n", viewportID, eye == ovrEye_Left ? "left" : "right");
		printf("Eye offset according to OVR (only used if VRPN is used): ");
		mat4f_print(offsetMat);
		printf("Rotation sensing (from OVR or VRPN): ");
		mat4f_print(rotMat);
		printf("Position tracking (from OVR or VRPN): ");
		mat4f_print(posMat);
		printf("Initial position (from set in viewmat_init()): ");
		mat4f_print(initPosMat);
		printf("Final view matrix: ");
		mat4f_print(viewmatrix);
	}
#else
	/* We shouldn't ever get here, but we'll generate a generic view
	 * and projection matrix just in case... */
	mat4f_lookat_new(viewmatrix,
	                 0,1.55,0,
	                 0,1.55,-1,
	                 0,1,0);
	mat4f_perspective_new(projmatrix, 50, 1, 0.5, 500);
#endif
}