static void calc_r(struct vec3f_s *r, const struct vec3f_s *v) { if (!almost_zero(v->x)) vec3f_set(r, -v->y, v->x, v->z); /* take x-y plane */ else if (!almost_zero(v->y)) vec3f_set(r, v->x, -v->z, v->y); /* take y-z plane */ else vec3f_set(r, v->z, v->y, -v->x); /* take z-x plane */ }
/** Sets the currently used camera position, look at point, and up * vector. Note that the look variables are not a vector---but are a * point that the camera should be pointing at. This function is * typically called at the beginning of a program to initialize the * location that the camera should be at. * * @param posX The X position to place the camera. * @param posY The Y position to place the camera. * @param posZ The Z position to place the camera. * @param lookX The X component of a point that the camera is looking at. * @param lookY The Y component of a point that the camera is looking at. * @param lookZ The Z component of a point that the camera is looking at. * @param upX The X component of the camera up vector. * @param upY The Y component of the camera up vector. * @param upZ The Z component of the camera up vector. * @see mousemove_setVec() */ void mousemove_set(float posX, float posY, float posZ, float lookX, float lookY, float lookZ, float upX, float upY, float upZ) { float pos[3],look[3],up[3]; vec3f_set(pos, posX, posY, posZ); vec3f_set(look, lookX, lookY, lookZ); vec3f_set(up, upX, upY, upZ); mousemove_setVec(pos, look, up); }
/** A callback function that will get called whenever the tracker * provides us with new data. This may be called repeatedly for each * record that we have missed if many records have been delivered * since the last call to the VRPN mainloop() function. */ static void VRPN_CALLBACK handle_tracker(void *name, vrpn_TRACKERCB t) { float fps = kuhl_getfps(&fps_state); if(fps_state.frame == 0) msg(INFO, "VRPN records per second: %.1f\n", fps); /* Some tracking systems return large values when a point gets * lost. If the tracked point seems to be lost, ignore this * update. */ float pos[3]; vec3f_set(pos, t.pos[0], t.pos[1], t.pos[2]); long microseconds = (t.msg_time.tv_sec* 1000000L) + t.msg_time.tv_usec; if(0) { printf("Current time %ld; VRPN record time: %ld\n", kuhl_microseconds(), microseconds); printf("Received position from vrpn: "); vec3f_print(pos); } if(vec3f_norm(pos) > 100) return; // Store the data in our map so that someone can use it later. std::string s = (char*)name; nameToCallbackData[s] = t; smooth(nameToCallbackData[s]); }
ParticleEngine *ParticleEngine_Init(int size, int life, int spray, float floor, GLuint texture, float position[3], float force[3], float gravity[3] ) { ParticleEngine *p = malloc(sizeof(ParticleEngine) + sizeof(Particle) * size); memset(p, 0, sizeof(ParticleEngine)); if (position) vec3f_set(position, p->startPosition); else vec3f_clear(p->startPosition); if (force) vec3f_set(force, p->startForce); else vec3f_clear(p->startForce); p->particles = malloc(sizeof(Particle) * size); memset(p->particles, 0, sizeof(Particle) * size); p->size = size; p->floor = floor; p->startLife = life; p->spray = spray; p->texture = texture; return p; }
/*! Inverse Operation of \c sphere<T,N>_getSub_box2f(). */ inline void sphere<T,N>_getRel_box2f(const box2f& a, const sphere<T,N>& b, sphere<T,N>& ba) { vec2f ad; box2f_rdDim(a, &ad); vec3f_set(&ba.c(), (b.c()(0) - a.l(0)) / ad(0), (b.c()(1) - a.l(1)) / ad(1), b.c()(2)); /* z is ignored */ ba.r() = b.r() / mean(ad(0), ad(1)); if (ad(0) != ad(1)) { PTODO("ad's components not all equal => may result in an ellipse\n"); } }
static void Particle_Spawn(ParticleEngine *pengine, Particle *particle) { int i; /* Set position */ vec3f_set(pengine->startPosition, particle->position); /* Randomize life */ particle->life = pengine->startLife - (rand() % pengine->spray * 3); /* Randomize direction */ for (i = 0; i < 3; i++) { particle->force[i] = pengine->startForce[i] + (double)((rand() % pengine->spray) - ((double)pengine->spray/50)) / ((double)pengine->spray * 20); } }
/*! Sub-Sphere when \p ba is viewed relative to \p a and put * result in \p b. * The inverse operation of \c sphere<T,N>_getRel(). */ inline void sphere<T,N>_getSub(const box<T,N>& a, const sphere<T,N>& ba, sphere<T,N>& b) { vec3f ad; box3f_rdDim(a, &ad); vec3f_set(&b.c(), a.l(0) + ad(0) * ba.c()(0), a.l(1) + ad(1) * ba.c()(1), a.l(2) + ad(2) * ba.c()(2)); b.r() = ba.r() * pnw::mean(ad(0), ad(1), ad(2)); if (ad(0) != ad(1) or ad(1) != ad(2) or ad(2) != ad(0)) { PTODO("ad's components not all equal => may result in an ellipse\n"); } }
void mouse_callback(GLFWwindow *window, double xpos, double ypos) { GLfloat x_offset, y_offset, sensitivity, x, y, z; UNUSED(window); xpos = (float)xpos; ypos = (float)ypos; if(first_mouse) { last_x = xpos; last_y = ypos; first_mouse = false; } x_offset = xpos - last_x; y_offset = last_y - ypos; last_x = xpos; last_y = ypos; sensitivity = 0.001f; x_offset *= sensitivity; y_offset *= sensitivity; yaw += x_offset; pitch += y_offset; if(pitch > M_PI/2) { pitch = M_PI/2; } if(pitch < -M_PI/2) { pitch = -M_PI/2; } x = cosf(yaw) * cosf(pitch); y = sinf(pitch); z = sinf(yaw) * cosf(pitch); vec3f_set(camera_front, x, y, z); vec3f_normalize(camera_front, camera_front); }
int main(void) { if(!glfwInit()) { fprintf(stderr, "Could not load GLFW, aborting.\n"); return(EXIT_FAILURE); } int WIDTH, HEIGHT; WIDTH = 800; HEIGHT = 600; GLFWwindow *window; window = glfwCreateWindow(WIDTH,HEIGHT,"05 camera.", NULL, NULL); if(!window) { fprintf(stderr, "Could not create main window, aborting.\n"); return(EXIT_FAILURE); } glfwMakeContextCurrent(window); glfwSetKeyCallback(window, key_callback); glfwSetCursorPosCallback(window, mouse_callback); glfwSetScrollCallback(window, scroll_callback); glewExperimental = GL_TRUE; if(glewInit() != GLEW_OK) { fprintf(stderr, "Could not initialize GLEW, aborting.\n"); return(EXIT_FAILURE); } glEnable(GL_DEPTH_TEST); glfwSetInputMode(window, GLFW_CURSOR, GLFW_CURSOR_DISABLED); GLuint VAO, VBO, lightVAO; glGenVertexArrays(1, &VAO); glGenBuffers(1, &VBO); glBindVertexArray(VAO); glBindBuffer(GL_ARRAY_BUFFER, VBO); glBufferData(GL_ARRAY_BUFFER, sizeof(vertices), vertices, GL_STATIC_DRAW); glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, stride, (GLvoid*)0); glEnableVertexAttribArray(0); glVertexAttribPointer(1, 3, GL_FLOAT, GL_FALSE, stride, (GLvoid*)(3 * sizeof(GLfloat))); glEnableVertexAttribArray(1); //glVertexAttribPointer(1, 2, GL_FLOAT, GL_FALSE, stride, // (GLvoid*)(3*sizeof(GLfloat))); //glEnableVertexAttribArray(1); glBindVertexArray(0); /* LightVAO definition. */ glGenVertexArrays(1, &lightVAO); glBindVertexArray(lightVAO); glBindBuffer(GL_ARRAY_BUFFER, VBO); // Same vertex data as cube. glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, stride, (GLvoid*)0); glEnableVertexAttribArray(0); glBindVertexArray(0); char* vertex_source = read_shader("shaders/07.vert"); char* fragment_source = read_shader("shaders/07.frag"); char* lamp_fragment_source = read_shader("shaders/07_lamp.frag"); GLuint shader_program, lamp_program; shader_program = create_shader_program(vertex_source, fragment_source); lamp_program = create_shader_program(vertex_source, lamp_fragment_source); free(vertex_source); free(fragment_source); free(lamp_fragment_source); //GLuint texture; //glActiveTexture(GL_TEXTURE0); //glGenTextures(1, &texture); //glBindTexture(GL_TEXTURE_2D, texture); //load_texture("textures/02_container.jpg"); //glUniform1i(glGetUniformLocation(shader_program, "texture_sampler"), 0); //glBindTexture(GL_TEXTURE_2D, 0); Mat4f *projection, *model, *view, *temp, *temp2; mat4f_allocate(&projection); mat4f_allocate(&model); mat4f_allocate(&view); mat4f_allocate(&temp); mat4f_allocate(&temp2); mat4f_translate(view, 0.0f, 0.0f, -3.0f); //mat4f_rotate_x(model, -M_PI/4); mat4f_rotate_x(model, 0.0f); Vec3f *light_position; vec3f_allocate(&light_position); vec3f_allocate(&camera_pos); vec3f_allocate(&camera_target); vec3f_allocate(&camera_up); vec3f_allocate(&camera_front); vec3f_allocate(&temp_vec3f); vec3f_set(camera_target, 0.0f, 0.0f, 0.0f); vec3f_set(camera_up, 0.0f, 1.0f, 0.0f); vec3f_set(camera_front, 0.0f, 0.0f, -1.0f); vec3f_set(camera_pos, 0.0f, 0.0f, 3.0f); vec3f_set(light_position, 1.2f, 1.0f, 2.0f); /* shader locations */ GLuint model_location, projection_location, view_location, light_position_location, view_position_location; glUseProgram(shader_program); model_location = glGetUniformLocation(shader_program, "model"); projection_location = glGetUniformLocation(shader_program, "perspective"); view_location = glGetUniformLocation(shader_program, "view"); light_position_location = glGetUniformLocation(shader_program, "light_position"); view_position_location = glGetUniformLocation(shader_program, "view_position"); GLuint object_color_location, light_color_location; object_color_location = glGetUniformLocation(shader_program, "object_color"); light_color_location = glGetUniformLocation(shader_program, "light_color"); glUniform3f(object_color_location, 1.0f, 0.5f, 0.31f); glUniform3f(light_color_location, 1.0f, 1.0f, 1.0); glUniform3f(light_position_location, light_position->data[0], light_position->data[1], light_position->data[2]); glUseProgram(0); glUseProgram(lamp_program); GLuint lamp_model_location, lamp_projection_location, lamp_view_location; lamp_model_location = glGetUniformLocation(lamp_program, "model"); lamp_projection_location = glGetUniformLocation(lamp_program, "perspective"); lamp_view_location = glGetUniformLocation(lamp_program, "view"); glUseProgram(0); glClearColor(0.0f, 0.0f, 0.0f, 1.0f); current_frame = 0.0f; last_frame = 0.0f; last_x = WIDTH / 2.0f; last_y = HEIGHT / 2.0f; fov = M_PI/4; yaw = -M_PI/2; pitch = 0.0f; first_mouse = true; while(!glfwWindowShouldClose(window)) { glfwPollEvents(); current_frame = glfwGetTime(); delta_time = current_frame - last_frame; last_frame = current_frame; do_movement(); glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); glUseProgram(shader_program); // glActiveTexture(GL_TEXTURE0); // glBindTexture(GL_TEXTURE_2D, texture); // cam_x = sinf(time) * radius; // cam_z = cosf(time) * radius; vec3f_add(camera_target, camera_pos, camera_front); mat4f_look_at(view, camera_pos, camera_target, camera_up); mat4f_perspective(projection, fov, (float)WIDTH/(float)HEIGHT, 0.1f, 100.0f); //mat4f_rotate_x(model, sinf(time) * M_PI); glUniformMatrix4fv(model_location, 1, GL_TRUE, mat4f_pointer(model)); glUniformMatrix4fv(view_location, 1, GL_TRUE, mat4f_pointer(view)); glUniformMatrix4fv(projection_location, 1, GL_TRUE, mat4f_pointer(projection)); glUniform3f(view_position_location, camera_pos->data[0], camera_pos->data[1], camera_pos->data[2]); glBindVertexArray(VAO); glDrawArrays(GL_TRIANGLES, 0, 36); // glBindTexture(GL_TEXTURE_2D, 0); glBindVertexArray(0); glUseProgram(lamp_program); //glUseProgram(shader_program); mat4f_scale(temp, 0.2f, 0.2f, 0.2f); mat4f_mul(temp, temp, model); mat4f_translate_vec3f(temp2, light_position); mat4f_mul(temp2, temp2, temp); //mat4f_print(temp); glUniformMatrix4fv(lamp_model_location, 1, GL_TRUE, mat4f_pointer(temp2)); glUniformMatrix4fv(lamp_view_location, 1, GL_TRUE, mat4f_pointer(view)); glUniformMatrix4fv(lamp_projection_location, 1, GL_TRUE, mat4f_pointer(projection)); glBindVertexArray(lightVAO); glDrawArrays(GL_TRIANGLES, 0, 36); glBindVertexArray(0); glfwSwapBuffers(window); } glfwTerminate(); return(EXIT_SUCCESS); }
/** Uses the VRPN library to get the position and orientation of a * tracked object. * * @param object The name of the object being tracked. * * @param hostname The IP address or hostname of the VRPN server or * tracking system computer. If hostname is set to NULL, the * ~/.vrpn-server file is consulted. * * @param pos An array to be filled in with the position information * for the tracked object. If we are unable to track the object, a * message may be printed and pos will be set to a fixed value. * * @param orient An array to be filled in with the orientation matrix * for the tracked object. The orientation matrix is in row-major * order can be used with OpenGL. If the tracking system is moving an * object around on the screen, this matrix can be used directly. If * the tracking system is moving the OpenGL camera, this matrix may * need to be inverted. If we are unable to track the object, a * message may be printed and orient will be set to the identity * matrix. * * @return 1 if we returned data from the tracker. 0 if there was * problems connecting to the tracker. */ int vrpn_get(const char *object, const char *hostname, float pos[3], float orient[16]) { /* Set to default values */ vec3f_set(pos, 10000,10000,10000); mat4f_identity(orient); #ifdef MISSING_VRPN printf("You are missing VRPN support.\n"); return 0; #else if(object == NULL || strlen(object) == 0) { msg(WARNING, "Empty or NULL object name was passed into this function.\n"); return 0; } if(hostname != NULL && strlen(hostname) == 0) { msg(WARNING, "Hostname is an empty string.\n"); return 0; } /* Construct an object@hostname string. */ std::string hostnamecpp; std::string objectcpp; if(hostname == NULL) { char *hostnameInFile = vrpn_default_host(); if(hostnameInFile) hostnamecpp = hostnameInFile; else { msg(ERROR, "Failed to find hostname of VRPN server.\n"); exit(EXIT_FAILURE); } } else hostnamecpp = hostname; objectcpp = object; std::string fullname = objectcpp + "@" + hostnamecpp; /* Check if we have a tracker object for that string in our map. */ if(nameToTracker.count(fullname)) { /* If we already have a tracker object, ask it to run the main * loop (and therefore call our handle_tracker() function if * there is new data). */ nameToTracker[fullname]->mainloop(); /* If our callback has been called, get the callback object * and get the data out of it. */ if(nameToCallbackData.count(fullname)) { vrpn_TRACKERCB t = nameToCallbackData[fullname]; float pos4[4]; for(int i=0; i<3; i++) pos4[i] = t.pos[i]; pos4[3]=1; double orientd[16]; // Convert quaternion into orientation matrix. q_to_ogl_matrix(orientd, t.quat); for(int i=0; i<16; i++) orient[i] = (float) orientd[i]; /* VICON in the MTU IVS lab is typically calibrated so that: * X = points to the right (while facing screen) * Y = points into the screen * Z = up * (left-handed coordinate system) * * PPT is typically calibrated so that: * X = the points to the wall that has two closets at both corners * Y = up * Z = points to the door * (right-handed coordinate system) * * By default, OpenGL assumes that: * X = points to the right (while facing screen in the IVS lab) * Y = up * Z = points OUT of the screen (i.e., -Z points into the screen in te IVS lab) * (right-handed coordinate system) * * Below, we convert the position and orientation * information into the OpenGL convention. */ if(strlen(hostnamecpp.c_str()) > 14 && strncmp(hostnamecpp.c_str(), "tcp://141.219.", 14) == 0) // MTU vicon tracker { float viconTransform[16] = { 1,0,0,0, // column major order! 0,0,-1,0, 0,1,0,0, 0,0,0,1 }; mat4f_mult_mat4f_new(orient, viconTransform, orient); mat4f_mult_vec4f_new(pos4, viconTransform, pos4); vec3f_copy(pos,pos4); return 1; // we successfully collected some data } else // Non-Vicon tracker { /* Don't transform other tracking systems */ // orient is already filled in vec3f_copy(pos, pos4); return 1; // we successfully collected some data } } } else { /* If this is our first time, create a tracker for the object@hostname string, register the callback handler. */ msg(INFO, "Connecting to VRPN server: %s\n", hostnamecpp.c_str()); // If we are making a TCP connection and the server isn't up, the following function call may hang for a long time vrpn_Connection *connection = vrpn_get_connection_by_name(hostnamecpp.c_str()); /* Wait for a bit to see if we can connect. Sometimes we don't immediately connect! */ for(int i=0; i<1000 && !connection->connected(); i++) { usleep(1000); // 1000 microseconds * 1000 = up to 1 second of waiting. connection->mainloop(); } /* If connection failed, exit. */ if(!connection->connected()) { delete connection; msg(ERROR, "Failed to connect to tracker: %s\n", fullname.c_str()); return 0; } vrpn_Tracker_Remote *tkr = new vrpn_Tracker_Remote(fullname.c_str(), connection); nameToTracker[fullname] = tkr; tkr->register_change_handler((void*) fullname.c_str(), handle_tracker); kuhl_getfps_init(&fps_state); kalman_initialize(&kalman, 0.1, 0.1); } return 0; #endif }
int main(int argc, char** argv) { char *modelFilename = NULL; char *modelTexturePath = NULL; if(argc == 2) { modelFilename = argv[1]; modelTexturePath = NULL; } else if(argc == 3) { modelFilename = argv[1]; modelTexturePath = argv[2]; } else { printf("Usage:\n" "%s modelFile - Textures are assumed to be in the same directory as the model.\n" "- or -\n" "%s modelFile texturePath\n", argv[0], argv[0]); exit(1); } /* set up our GLUT window */ glutInit(&argc, argv); glutInitWindowSize(512, 512); /* Ask GLUT to for a double buffered, full color window that * includes a depth buffer */ #ifdef FREEGLUT glutSetOption(GLUT_MULTISAMPLE, 4); // set msaa samples; default to 4 glutInitDisplayMode(GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH | GLUT_MULTISAMPLE); glutInitContextVersion(3,2); glutInitContextProfile(GLUT_CORE_PROFILE); #else glutInitDisplayMode(GLUT_3_2_CORE_PROFILE | GLUT_DOUBLE | GLUT_RGB | GLUT_DEPTH | GLUT_MULTISAMPLE); #endif glutCreateWindow(argv[0]); // set window title to executable name glEnable(GL_MULTISAMPLE); /* Initialize GLEW */ glewExperimental = GL_TRUE; GLenum glewError = glewInit(); if(glewError != GLEW_OK) { fprintf(stderr, "Error initializing GLEW: %s\n", glewGetErrorString(glewError)); exit(EXIT_FAILURE); } /* When experimental features are turned on in GLEW, the first * call to glGetError() or kuhl_errorcheck() may incorrectly * report an error. So, we call glGetError() to ensure that a * later call to glGetError() will only see correct errors. For * details, see: * http://www.opengl.org/wiki/OpenGL_Loading_Library */ glGetError(); // setup callbacks glutDisplayFunc(display); glutKeyboardFunc(keyboard); /* Compile and link a GLSL program composed of a vertex shader and * a fragment shader. */ program = kuhl_create_program(GLSL_VERT_FILE, GLSL_FRAG_FILE); dgr_init(); /* Initialize DGR based on environment variables. */ projmat_init(); /* Figure out which projection matrix we should use based on environment variables */ float initCamPos[3] = {0,1.55,2}; // 1.55m is a good approx eyeheight float initCamLook[3] = {0,0,0}; // a point the camera is facing at float initCamUp[3] = {0,1,0}; // a vector indicating which direction is up viewmat_init(initCamPos, initCamLook, initCamUp); // Clear the screen while things might be loading glClearColor(.2,.2,.2,1); glClear(GL_COLOR_BUFFER_BIT); // Load the model from the file modelgeom = kuhl_load_model(modelFilename, modelTexturePath, program, bbox); /* Count the number of kuhl_geometry objects for this model */ unsigned int geomCount = kuhl_geometry_count(modelgeom); /* Allocate an array of particle arrays */ particles = malloc(sizeof(particle*)*geomCount); int i = 0; for(kuhl_geometry *g = modelgeom; g != NULL; g=g->next) { /* allocate space to store velocity information for all of the * vertices in this kuhl_geometry */ particles[i] = malloc(sizeof(particle)*g->vertex_count); for(unsigned int j=0; j<g->vertex_count; j++) vec3f_set(particles[i][j].velocity, 0,0,0); /* Change the geometry to be drawn as points */ g->primitive_type = GL_POINTS; // Comment out this line to default to triangle rendering. i++; } /* Tell GLUT to start running the main loop and to call display(), * keyboard(), etc callback methods as needed. */ glutMainLoop(); /* // An alternative approach: while(1) glutMainLoopEvent(); */ exit(EXIT_SUCCESS); }