static void camera_frame_fit_data_init( const Scene *scene, const Object *ob, CameraParams *params, CameraViewFrameData *data) { float camera_rotmat_transposed_inversed[4][4]; unsigned int i; /* setup parameters */ BKE_camera_params_init(params); BKE_camera_params_from_object(params, ob); /* compute matrix, viewplane, .. */ if (scene) { BKE_camera_params_compute_viewplane(params, scene->r.xsch, scene->r.ysch, scene->r.xasp, scene->r.yasp); } else { BKE_camera_params_compute_viewplane(params, 1, 1, 1.0f, 1.0f); } BKE_camera_params_compute_matrix(params); /* initialize callback data */ copy_m3_m4(data->camera_rotmat, (float (*)[4])ob->obmat); normalize_m3(data->camera_rotmat); /* To transform a plane which is in its homogeneous representation (4d vector), * we need the inverse of the transpose of the transform matrix... */ copy_m4_m3(camera_rotmat_transposed_inversed, data->camera_rotmat); transpose_m4(camera_rotmat_transposed_inversed); invert_m4(camera_rotmat_transposed_inversed); /* Extract frustum planes from projection matrix. */ planes_from_projmat(params->winmat, /* left right top bottom near far */ data->plane_tx[2], data->plane_tx[0], data->plane_tx[3], data->plane_tx[1], NULL, NULL); /* Rotate planes and get normals from them */ for (i = 0; i < CAMERA_VIEWFRAME_NUM_PLANES; i++) { mul_m4_v4(camera_rotmat_transposed_inversed, data->plane_tx[i]); normalize_v3_v3(data->normal_tx[i], data->plane_tx[i]); } copy_v4_fl(data->dist_vals_sq, FLT_MAX); data->tot = 0; data->is_ortho = params->is_ortho; if (params->is_ortho) { /* we want (0, 0, -1) transformed by camera_rotmat, this is a quicker shortcut. */ negate_v3_v3(data->camera_no, data->camera_rotmat[2]); data->dist_to_cam = FLT_MAX; } }
static void rna_Object_calc_matrix_camera( Object *ob, float mat_ret[16], int width, int height, float scalex, float scaley) { CameraParams params; /* setup parameters */ BKE_camera_params_init(¶ms); BKE_camera_params_from_object(¶ms, ob); /* compute matrix, viewplane, .. */ BKE_camera_params_compute_viewplane(¶ms, width, height, scalex, scaley); BKE_camera_params_compute_matrix(¶ms); copy_m4_m4((float (*)[4])mat_ret, params.winmat); }
void BKE_camera_params_from_view3d(CameraParams *params, View3D *v3d, RegionView3D *rv3d) { /* common */ params->lens = v3d->lens; params->clipsta = v3d->near; params->clipend = v3d->far; if (rv3d->persp == RV3D_CAMOB) { /* camera view */ BKE_camera_params_from_object(params, v3d->camera); params->zoom = BKE_screen_view3d_zoom_to_fac((float)rv3d->camzoom); params->offsetx = 2.0f * rv3d->camdx * params->zoom; params->offsety = 2.0f * rv3d->camdy * params->zoom; params->shiftx *= params->zoom; params->shifty *= params->zoom; params->zoom = 1.0f / params->zoom; } else if (rv3d->persp == RV3D_ORTHO) { /* orthographic view */ int sensor_size = BKE_camera_sensor_size(params->sensor_fit, params->sensor_x, params->sensor_y); params->clipend *= 0.5f; // otherwise too extreme low zbuffer quality params->clipsta = -params->clipend; params->is_ortho = true; /* make sure any changes to this match ED_view3d_radius_to_ortho_dist() */ params->ortho_scale = rv3d->dist * sensor_size / v3d->lens; params->zoom = 2.0f; } else { /* perspective view */ params->zoom = 2.0f; } }
static DerivedMesh *uvprojectModifier_do(UVProjectModifierData *umd, Object *ob, DerivedMesh *dm) { float (*coords)[3], (*co)[3]; MLoopUV *mloop_uv; MTexPoly *mtexpoly, *mt = NULL; int i, numVerts, numPolys, numLoops; Image *image = umd->image; MPoly *mpoly, *mp; MLoop *mloop; const bool override_image = (umd->flags & MOD_UVPROJECT_OVERRIDEIMAGE) != 0; Projector projectors[MOD_UVPROJECT_MAXPROJECTORS]; int num_projectors = 0; char uvname[MAX_CUSTOMDATA_LAYER_NAME]; float aspx = umd->aspectx ? umd->aspectx : 1.0f; float aspy = umd->aspecty ? umd->aspecty : 1.0f; float scax = umd->scalex ? umd->scalex : 1.0f; float scay = umd->scaley ? umd->scaley : 1.0f; int free_uci = 0; for (i = 0; i < umd->num_projectors; ++i) if (umd->projectors[i]) projectors[num_projectors++].ob = umd->projectors[i]; if (num_projectors == 0) return dm; /* make sure there are UV Maps available */ if (!CustomData_has_layer(&dm->loopData, CD_MLOOPUV)) return dm; /* make sure we're using an existing layer */ CustomData_validate_layer_name(&dm->loopData, CD_MLOOPUV, umd->uvlayer_name, uvname); /* calculate a projection matrix and normal for each projector */ for (i = 0; i < num_projectors; ++i) { float tmpmat[4][4]; float offsetmat[4][4]; Camera *cam = NULL; /* calculate projection matrix */ invert_m4_m4(projectors[i].projmat, projectors[i].ob->obmat); projectors[i].uci = NULL; if (projectors[i].ob->type == OB_CAMERA) { cam = (Camera *)projectors[i].ob->data; if (cam->type == CAM_PANO) { projectors[i].uci = BLI_uvproject_camera_info(projectors[i].ob, NULL, aspx, aspy); BLI_uvproject_camera_info_scale(projectors[i].uci, scax, scay); free_uci = 1; } else { CameraParams params; /* setup parameters */ BKE_camera_params_init(¶ms); BKE_camera_params_from_object(¶ms, projectors[i].ob); /* compute matrix, viewplane, .. */ BKE_camera_params_compute_viewplane(¶ms, 1, 1, aspx, aspy); /* scale the view-plane */ params.viewplane.xmin *= scax; params.viewplane.xmax *= scax; params.viewplane.ymin *= scay; params.viewplane.ymax *= scay; BKE_camera_params_compute_matrix(¶ms); mul_m4_m4m4(tmpmat, params.winmat, projectors[i].projmat); } } else { copy_m4_m4(tmpmat, projectors[i].projmat); } unit_m4(offsetmat); mul_mat3_m4_fl(offsetmat, 0.5); offsetmat[3][0] = offsetmat[3][1] = offsetmat[3][2] = 0.5; mul_m4_m4m4(projectors[i].projmat, offsetmat, tmpmat); /* calculate worldspace projector normal (for best projector test) */ projectors[i].normal[0] = 0; projectors[i].normal[1] = 0; projectors[i].normal[2] = 1; mul_mat3_m4_v3(projectors[i].ob->obmat, projectors[i].normal); } numPolys = dm->getNumPolys(dm); numLoops = dm->getNumLoops(dm); /* make sure we are not modifying the original UV map */ mloop_uv = CustomData_duplicate_referenced_layer_named(&dm->loopData, CD_MLOOPUV, uvname, numLoops); /* can be NULL */ mt = mtexpoly = CustomData_duplicate_referenced_layer_named(&dm->polyData, CD_MTEXPOLY, uvname, numPolys); numVerts = dm->getNumVerts(dm); coords = MEM_mallocN(sizeof(*coords) * numVerts, "uvprojectModifier_do coords"); dm->getVertCos(dm, coords); /* convert coords to world space */ for (i = 0, co = coords; i < numVerts; ++i, ++co) mul_m4_v3(ob->obmat, *co); /* if only one projector, project coords to UVs */ if (num_projectors == 1 && projectors[0].uci == NULL) for (i = 0, co = coords; i < numVerts; ++i, ++co) mul_project_m4_v3(projectors[0].projmat, *co); mpoly = dm->getPolyArray(dm); mloop = dm->getLoopArray(dm); /* apply coords as UVs, and apply image if tfaces are new */ for (i = 0, mp = mpoly; i < numPolys; ++i, ++mp, ++mt) { if (override_image || !image || (mtexpoly == NULL || mt->tpage == image)) { if (num_projectors == 1) { if (projectors[0].uci) { unsigned int fidx = mp->totloop - 1; do { unsigned int lidx = mp->loopstart + fidx; unsigned int vidx = mloop[lidx].v; BLI_uvproject_from_camera(mloop_uv[lidx].uv, coords[vidx], projectors[0].uci); } while (fidx--); } else { /* apply transformed coords as UVs */ unsigned int fidx = mp->totloop - 1; do { unsigned int lidx = mp->loopstart + fidx; unsigned int vidx = mloop[lidx].v; copy_v2_v2(mloop_uv[lidx].uv, coords[vidx]); } while (fidx--); } } else { /* multiple projectors, select the closest to face normal direction */ float face_no[3]; int j; Projector *best_projector; float best_dot; /* get the untransformed face normal */ BKE_mesh_calc_poly_normal_coords(mp, mloop + mp->loopstart, (const float (*)[3])coords, face_no); /* find the projector which the face points at most directly * (projector normal with largest dot product is best) */ best_dot = dot_v3v3(projectors[0].normal, face_no); best_projector = &projectors[0]; for (j = 1; j < num_projectors; ++j) { float tmp_dot = dot_v3v3(projectors[j].normal, face_no); if (tmp_dot > best_dot) { best_dot = tmp_dot; best_projector = &projectors[j]; } } if (best_projector->uci) { unsigned int fidx = mp->totloop - 1; do { unsigned int lidx = mp->loopstart + fidx; unsigned int vidx = mloop[lidx].v; BLI_uvproject_from_camera(mloop_uv[lidx].uv, coords[vidx], best_projector->uci); } while (fidx--); } else { unsigned int fidx = mp->totloop - 1; do { unsigned int lidx = mp->loopstart + fidx; unsigned int vidx = mloop[lidx].v; mul_v2_project_m4_v3(mloop_uv[lidx].uv, best_projector->projmat, coords[vidx]); } while (fidx--); } } } if (override_image && mtexpoly) { mt->tpage = image; } } MEM_freeN(coords); if (free_uci) { int j; for (j = 0; j < num_projectors; ++j) { if (projectors[j].uci) { MEM_freeN(projectors[j].uci); } } } /* Mark tessellated CD layers as dirty. */ dm->dirty |= DM_DIRTY_TESS_CDLAYERS; return dm; }
/** * Return a new RegionView3D.dist value to fit the \a radius. * * \note Depth isn't taken into account, this will fit a flat plane exactly, * but points towards the view (with a perspective projection), * may be within the radius but outside the view. eg: * * <pre> * + * pt --> + /^ radius * / | * / | * view + + * \ | * \ | * \| * + * </pre> * * \param ar Can be NULL if \a use_aspect is false. * \param persp Allow the caller to tell what kind of perspective to use (ortho/view/camera) * \param use_aspect Increase the distance to account for non 1:1 view aspect. * \param radius The radius will be fitted exactly, typically pre-scaled by a margin (#VIEW3D_MARGIN). */ float ED_view3d_radius_to_dist( const View3D *v3d, const ARegion *ar, const char persp, const bool use_aspect, const float radius) { float dist; BLI_assert(ELEM(persp, RV3D_ORTHO, RV3D_PERSP, RV3D_CAMOB)); BLI_assert((persp != RV3D_CAMOB) || v3d->camera); if (persp == RV3D_ORTHO) { dist = ED_view3d_radius_to_dist_ortho(v3d->lens, radius); } else { float lens, sensor_size, zoom; float angle; if (persp == RV3D_CAMOB) { CameraParams params; BKE_camera_params_init(¶ms); params.clipsta = v3d->near; params.clipend = v3d->far; BKE_camera_params_from_object(¶ms, v3d->camera); lens = params.lens; sensor_size = BKE_camera_sensor_size(params.sensor_fit, params.sensor_x, params.sensor_y); /* ignore 'rv3d->camzoom' because we want to fit to the cameras frame */ zoom = CAMERA_PARAM_ZOOM_INIT_CAMOB; } else { lens = v3d->lens; sensor_size = DEFAULT_SENSOR_WIDTH; zoom = CAMERA_PARAM_ZOOM_INIT_PERSP; } angle = focallength_to_fov(lens, sensor_size); /* zoom influences lens, correct this by scaling the angle as a distance (by the zoom-level) */ angle = atanf(tanf(angle / 2.0f) * zoom) * 2.0f; dist = ED_view3d_radius_to_dist_persp(angle, radius); } if (use_aspect) { const RegionView3D *rv3d = ar->regiondata; float winx, winy; if (persp == RV3D_CAMOB) { /* camera frame x/y in pixels */ winx = ar->winx / rv3d->viewcamtexcofac[0]; winy = ar->winy / rv3d->viewcamtexcofac[1]; } else { winx = ar->winx; winy = ar->winy; } if (winx && winy) { float aspect = winx / winy; if (aspect < 1.0f) { aspect = 1.0f / aspect; } dist *= aspect; } } return dist; }