static void drawFindContainedViewport(IceTInt contained_viewport[4], IceTDouble *znear, IceTDouble *zfar) { IceTDouble total_transform[16]; IceTDouble left, right, bottom, top; IceTDouble *transformed_verts; IceTInt global_viewport[4]; IceTInt num_bounding_verts; int i; icetGetIntegerv(ICET_GLOBAL_VIEWPORT, global_viewport); { IceTDouble projection_matrix[16]; IceTDouble modelview_matrix[16]; IceTDouble viewport_matrix[16]; IceTDouble tmp_matrix[16]; icetGetDoublev(ICET_PROJECTION_MATRIX, projection_matrix); icetGetDoublev(ICET_MODELVIEW_MATRIX, modelview_matrix); /* Strange projection matrix that transforms the x and y of normalized screen coordinates into viewport coordinates that may be cast to integers. */ viewport_matrix[ 0] = global_viewport[2]; viewport_matrix[ 1] = 0.0; viewport_matrix[ 2] = 0.0; viewport_matrix[ 3] = 0.0; viewport_matrix[ 4] = 0.0; viewport_matrix[ 5] = global_viewport[3]; viewport_matrix[ 6] = 0.0; viewport_matrix[ 7] = 0.0; viewport_matrix[ 8] = 0.0; viewport_matrix[ 9] = 0.0; viewport_matrix[10] = 2.0; viewport_matrix[11] = 0.0; viewport_matrix[12] = global_viewport[2] + global_viewport[0]*2.0; viewport_matrix[13] = global_viewport[3] + global_viewport[1]*2.0; viewport_matrix[14] = 0.0; viewport_matrix[15] = 2.0; icetMatrixMultiply(tmp_matrix, (const IceTDouble *)projection_matrix, (const IceTDouble *)modelview_matrix); icetMatrixMultiply(total_transform, (const IceTDouble *)viewport_matrix, (const IceTDouble *)tmp_matrix); } icetGetIntegerv(ICET_NUM_BOUNDING_VERTS, &num_bounding_verts); transformed_verts = icetGetStateBuffer( ICET_TRANSFORMED_BOUNDS, sizeof(IceTDouble)*num_bounding_verts*4); /* Transform each vertex to find where it lies in the global viewport and normalized z. Leave the results in homogeneous coordinates for now. */ { const IceTDouble *bound_vert = icetUnsafeStateGetDouble(ICET_GEOMETRY_BOUNDS); for (i = 0; i < num_bounding_verts; i++) { IceTDouble bound_vert_4vec[4]; bound_vert_4vec[0] = bound_vert[3*i+0]; bound_vert_4vec[1] = bound_vert[3*i+1]; bound_vert_4vec[2] = bound_vert[3*i+2]; bound_vert_4vec[3] = 1.0; icetMatrixVectorMultiply(transformed_verts + 4*i, (const IceTDouble *)total_transform, (const IceTDouble *)bound_vert_4vec); } } /* Set absolute mins and maxes. */ left = global_viewport[0] + global_viewport[2]; right = global_viewport[0]; bottom = global_viewport[1] + global_viewport[3]; top = global_viewport[1]; *znear = 1.0; *zfar = -1.0; /* Now iterate over all the transformed verts and adjust the absolute mins and maxs to include them all. */ for (i = 0; i < num_bounding_verts; i++) { IceTDouble *vert = transformed_verts + 4*i; /* Check to see if the vertex is in front of the near cut plane. This is true when z/w >= -1 or z + w >= 0. The second form is better just in case w is 0. */ if (vert[2] + vert[3] >= 0.0) { /* Normalize homogeneous coordinates. */ IceTDouble invw = 1.0/vert[3]; IceTDouble x = vert[0]*invw; IceTDouble y = vert[1]*invw; IceTDouble z = vert[2]*invw; /* Update contained region. */ if (left > x) left = x; if (right < x) right = x; if (bottom > y) bottom = y; if (top < y) top = y; if (*znear > z) *znear = z; if (*zfar < z) *zfar = z; } else { /* The vertex is being clipped by the near plane. In perspective mode, vertices behind the near clipping plane can sometimes give misleading projections. Instead, find all the other vertices on the other side of the near plane, compute the intersection of the segment between the two points and the near plane (in homogeneous coordinates) and use that as the projection. */ int j; for (j = 0; j < num_bounding_verts; j++) { IceTDouble *vert2 = transformed_verts + 4*j; double t; IceTDouble x, y, invw; if (vert2[2] + vert2[3] < 0.0) { /* Ignore other points behind near plane. */ continue; } /* Let the two points in question be v_i and v_j. Define the segment between them with the parametric equation p(t) = (vert - vert2)t + vert2. First, find t where the z and w coordinates of p(t) sum to zero. */ t = (vert2[2]+vert2[3])/(vert2[2]-vert[2] + vert2[3]-vert[3]); /* Use t to find the intersection point. While we are at it, normalize the resulting coordinates. We don't need z because we know it is going to be -1. */ invw = 1.0/((vert[3] - vert2[3])*t + vert2[3] ); x = ((vert[0] - vert2[0])*t + vert2[0] ) * invw; y = ((vert[1] - vert2[1])*t + vert2[1] ) * invw; /* Update contained region. */ if (left > x) left = x; if (right < x) right = x; if (bottom > y) bottom = y; if (top < y) top = y; *znear = -1.0; } } } left = floor(left); right = ceil(right); bottom = floor(bottom); top = ceil(top); /* Clip bounds to global viewport. */ if (left < global_viewport[0]) left = global_viewport[0]; if (right > global_viewport[0] + global_viewport[2]) right = global_viewport[0] + global_viewport[2]; if (bottom < global_viewport[1]) bottom = global_viewport[1]; if (top > global_viewport[1] + global_viewport[3]) top = global_viewport[1] + global_viewport[3]; if (*znear < -1.0) *znear = -1.0; if (*zfar > 1.0) *zfar = 1.0; /* Use this information to build a containing viewport. */ contained_viewport[0] = (IceTInt)left; contained_viewport[1] = (IceTInt)bottom; contained_viewport[2] = (IceTInt)(right - left); contained_viewport[3] = (IceTInt)(top - bottom); }
static void BackgroundCorrectDraw(const IceTDouble *projection_matrix, const IceTDouble *modelview_matrix, const IceTFloat *background_color, const IceTInt *readback_viewport, IceTImage result) { IceTDouble full_transform[16]; IceTSizeType width; IceTSizeType height; IceTFloat *colors; IceTFloat blended_color[4]; /* This is mostly done for completeness. Because we are blending and correcting the color, we totally expect background_color to be all zeros, and therefore blended_color should be equal to g_foreground_color. The real blending will happen within IceT under the covers. */ ICET_BLEND_FLOAT(g_foreground_color, background_color, blended_color); width = icetImageGetWidth(result); height = icetImageGetHeight(result); colors = icetImageGetColorf(result); /* Get full transform all the way to window coordinates (pixels). */ { IceTDouble scale_transform[16]; IceTDouble translate_transform[16]; icetMatrixScale(0.5*width, 0.5*height, 0.5, scale_transform); icetMatrixTranslate(1.0, 1.0, 1.0, translate_transform); icetMatrixMultiply(full_transform,scale_transform,translate_transform); icetMatrixPostMultiply(full_transform, projection_matrix); icetMatrixPostMultiply(full_transform, modelview_matrix); } /* Clear out the image (testing purposes only). */ { IceTSizeType pixel; for (pixel = 0; pixel < width*height; pixel++) { colors[pixel] = -1.0; } } /* Set my pixels. */ { IceTInt rank; IceTSizeType region_y_start; IceTSizeType region_x; IceTSizeType region_y; icetGetIntegerv(ICET_RANK, &rank); region_y_start = rank*PROC_REGION_HEIGHT; for (region_y = 0; region_y < PROC_REGION_HEIGHT; region_y++) { for (region_x = 0; region_x < PROC_REGION_WIDTH; region_x++) { IceTDouble object_coord[4]; IceTDouble window_coord[4]; IceTSizeType window_pixel[2]; IceTSizeType readback_lower[2]; IceTSizeType readback_upper[2]; IceTBoolean in_readback; object_coord[0] = (IceTDouble)region_x; object_coord[1] = (IceTDouble)(region_y + region_y_start); object_coord[2] = 0.0; object_coord[3] = 1.0; icetMatrixVectorMultiply(window_coord, full_transform, object_coord); window_pixel[0]=(IceTSizeType)(window_coord[0]/window_coord[3]); window_pixel[1]=(IceTSizeType)(window_coord[1]/window_coord[3]); readback_lower[0] = readback_viewport[0]; readback_lower[1] = readback_viewport[1]; readback_upper[0] = readback_viewport[0] + readback_viewport[2]; readback_upper[1] = readback_viewport[1] + readback_viewport[3]; in_readback = (readback_lower[0] <= window_pixel[0]); in_readback &= (readback_lower[1] <= window_pixel[1]); in_readback &= (window_pixel[0] < readback_upper[0]); in_readback &= (window_pixel[1] < readback_upper[1]); if (in_readback) { IceTSizeType pixel_idx = window_pixel[0] + window_pixel[1]*PROC_REGION_WIDTH; colors[4*pixel_idx + 0] = blended_color[0]; colors[4*pixel_idx + 1] = blended_color[1]; colors[4*pixel_idx + 2] = blended_color[2]; colors[4*pixel_idx + 3] = blended_color[3]; } } } } }