void rut_graphable_fully_transform_point (RutObject *graphable, RutCamera *camera, float *x, float *y, float *z) { CoglMatrix modelview; const CoglMatrix *projection; const float *viewport; float point[3] = { *x, *y, *z }; rut_graphable_get_modelview (graphable, camera, &modelview); projection = rut_camera_get_projection (camera); viewport = rut_camera_get_viewport (camera); rut_util_fully_transform_vertices (&modelview, projection, viewport, point, point, 1); *x = point[0]; *y = point[1]; *z = point[2]; }
void rut_camera_unproject_coord (RutCamera *camera, const CoglMatrix *modelview, const CoglMatrix *inverse_modelview, float object_coord_z, float *x, float *y) { const CoglMatrix *projection = rut_camera_get_projection (camera); const CoglMatrix *inverse_projection = rut_camera_get_inverse_projection (camera); //float z; float ndc_x, ndc_y, ndc_z, ndc_w; float eye_x, eye_y, eye_z, eye_w; const float *viewport = rut_camera_get_viewport (camera); /* Convert item z into NDC z */ { //float x = 0, y = 0, z = 0, w = 1; float z = 0, w = 1; float tmp_x, tmp_y, tmp_z; const CoglMatrix *m = modelview; tmp_x = m->xw; tmp_y = m->yw; tmp_z = m->zw; m = projection; z = m->zx * tmp_x + m->zy * tmp_y + m->zz * tmp_z + m->zw; w = m->wx * tmp_x + m->wy * tmp_y + m->wz * tmp_z + m->ww; ndc_z = z / w; } /* Undo the Viewport transform, putting us in Normalized Device Coords */ ndc_x = (*x - viewport[0]) * 2.0f / viewport[2] - 1.0f; ndc_y = ((viewport[3] - 1 + viewport[1] - *y) * 2.0f / viewport[3] - 1.0f); /* Undo the Projection, putting us in Eye Coords. */ ndc_w = 1; cogl_matrix_transform_point (inverse_projection, &ndc_x, &ndc_y, &ndc_z, &ndc_w); eye_x = ndc_x / ndc_w; eye_y = ndc_y / ndc_w; eye_z = ndc_z / ndc_w; eye_w = 1; /* Undo the Modelview transform, putting us in Object Coords */ cogl_matrix_transform_point (inverse_modelview, &eye_x, &eye_y, &eye_z, &eye_w); *x = eye_x; *y = eye_y; //*z = eye_z; }
static void _rut_camera_flush_transforms (RutCamera *camera) { const CoglMatrix *projection; CoglFramebuffer *fb = camera->fb; CameraFlushState *state; /* While a camera is in a suspended state then we don't expect to * _flush() and use that camera before it is restored. */ g_return_if_fail (camera->suspended == FALSE); state = cogl_object_get_user_data (COGL_OBJECT (fb), &fb_camera_key); if (!state) { state = g_slice_new (CameraFlushState); cogl_object_set_user_data (COGL_OBJECT (fb), &fb_camera_key, state, free_camera_flush_state); } else if (state->current_camera == camera && camera->transform_age == state->transform_age) goto done; if (camera->in_frame) { g_warning ("Un-balanced rut_camera_flush/_end calls: " "repeat _flush() calls before _end()"); } cogl_framebuffer_set_viewport (fb, camera->viewport[0], camera->viewport[1], camera->viewport[2], camera->viewport[3]); projection = rut_camera_get_projection (camera); cogl_framebuffer_set_projection_matrix (fb, projection); cogl_framebuffer_set_modelview_matrix (fb, &camera->view); state->current_camera = camera; state->transform_age = camera->transform_age; done: camera->in_frame = TRUE; }
const CoglMatrix * rut_camera_get_inverse_projection (RutCamera *camera) { const CoglMatrix *projection; if (camera->inverse_projection_age == camera->projection_age) return &camera->inverse_projection; projection = rut_camera_get_projection (camera); if (!cogl_matrix_get_inverse (projection, &camera->inverse_projection)) return NULL; camera->inverse_projection_age = camera->projection_age; return &camera->inverse_projection; }
void update_control_point_positions(rig_selection_tool_t *tool, rut_object_t *paint_camera) /* 2d ui camera */ { rut_object_t *camera = tool->camera_component; c_llist_t *l; for (l = tool->selected_entities; l; l = l->next) { entity_state_t *entity_state = l->data; c_matrix_t transform; const c_matrix_t *projection; float screen_space[4], x, y; const float *viewport; c_llist_t *l2; get_modelview_matrix(tool->camera, entity_state->entity, &transform); projection = rut_camera_get_projection(camera); viewport = rut_camera_get_viewport(camera); for (l2 = entity_state->control_points; l2; l2 = l2->next) { control_point_t *point = l2->data; point->position[0] = point->x; point->position[1] = point->y; point->position[2] = point->z; c_matrix_transform_points(&transform, 3, /* num components for input */ sizeof(float) * 3, /* input stride */ point->position, sizeof(float) * 3, /* output stride */ point->position, 1 /* n_points */); /* update the input region, need project the transformed point and * do * the viewport transform */ screen_space[0] = point->position[0]; screen_space[1] = point->position[1]; screen_space[2] = point->position[2]; c_matrix_project_points(projection, 3, /* num components for input */ sizeof(float) * 3, /* input stride */ screen_space, sizeof(float) * 4, /* output stride */ screen_space, 1 /* n_points */); /* perspective divide */ screen_space[0] /= screen_space[3]; screen_space[1] /= screen_space[3]; /* apply viewport transform */ x = VIEWPORT_TRANSFORM_X(screen_space[0], viewport[0], viewport[2]); y = VIEWPORT_TRANSFORM_Y(screen_space[1], viewport[1], viewport[3]); point->screen_pos[0] = x; point->screen_pos[1] = y; map_window_coords_to_overlay_coord( paint_camera, tool->tool_overlay, &x, &y); rut_transform_init_identity(point->transform); rut_transform_translate(point->transform, x, y, 0); rut_input_region_set_circle(point->input_region, x, y, 10); } } }