static void rna_Mask_update_parent(Main *bmain, Scene *scene, PointerRNA *ptr) { MaskParent *parent = ptr->data; if (parent->id) { if (GS(parent->id->name) == ID_MC) { MovieClip *clip = (MovieClip *) parent->id; MovieTracking *tracking = &clip->tracking; MovieTrackingObject *object = BKE_tracking_object_get_named(tracking, parent->parent); if (object) { MovieTrackingTrack *track = BKE_tracking_track_get_named(tracking, object, parent->sub_parent); if (track) { int clip_framenr = BKE_movieclip_remap_scene_to_clip_frame(clip, scene->r.cfra); MovieTrackingMarker *marker = BKE_tracking_marker_get(track, clip_framenr); float marker_pos_ofs[2], parmask_pos[2]; MovieClipUser user = {0}; BKE_movieclip_user_set_frame(&user, scene->r.cfra); add_v2_v2v2(marker_pos_ofs, marker->pos, track->offset); BKE_mask_coord_from_movieclip(clip, &user, parmask_pos, marker_pos_ofs); copy_v2_v2(parent->parent_orig, parmask_pos); } } } } rna_Mask_update_data(bmain, scene, ptr); }
void MovieDistortionOperation::initExecution() { this->m_inputOperation = this->getInputSocketReader(0); if (this->m_movieClip) { MovieClipUser clipUser = {0}; int calibration_width, calibration_height; BKE_movieclip_user_set_frame(&clipUser, this->m_framenumber); BKE_movieclip_get_size(this->m_movieClip, &clipUser, &calibration_width, &calibration_height); for (unsigned int i = 0; i < s_cache.size(); i++) { DistortionCache *c = (DistortionCache *)s_cache[i]; if (c->isCacheFor(this->m_movieClip, this->m_width, this->m_height, calibration_width, calibration_height, this->m_distortion)) { this->m_cache = c; this->m_cache->updateLastUsage(); return; } } DistortionCache *newC = new DistortionCache(this->m_movieClip, this->m_width, this->m_height, calibration_width, calibration_height, this->m_distortion); s_cache.push_back(newC); this->m_cache = newC; } else { this->m_cache = NULL; } }
static void exec(void *data, bNode *node, bNodeStack **in, bNodeStack **out) { if (in[0]->data) { if (node->id) { MovieClip *clip = (MovieClip *)node->id; CompBuf *cbuf = typecheck_compbuf(in[0]->data, CB_RGBA); CompBuf *stackbuf = alloc_compbuf(cbuf->x, cbuf->y, CB_RGBA, 0); ImBuf *ibuf; ibuf = IMB_allocImBuf(cbuf->x, cbuf->y, 32, 0); if (ibuf) { RenderData *rd = data; ImBuf *obuf; MovieTracking *tracking = &clip->tracking; int width, height; float overscan = 0.0f; MovieClipUser user = {0}; BKE_movieclip_user_set_frame(&user, rd->cfra); ibuf->rect_float = cbuf->rect; BKE_movieclip_get_size(clip, &user, &width, &height); if (!node->storage) node->storage = BKE_tracking_distortion_create(); if (node->custom1 == 0) obuf = BKE_tracking_distortion_exec(node->storage, tracking, ibuf, width, height, overscan, 1); else obuf = BKE_tracking_distortion_exec(node->storage, tracking, ibuf, width, height, overscan, 0); stackbuf->rect = obuf->rect_float; stackbuf->malloc = TRUE; obuf->mall &= ~IB_rectfloat; obuf->rect_float = NULL; IMB_freeImBuf(ibuf); IMB_freeImBuf(obuf); } /* pass on output and free */ out[0]->data = stackbuf; if (cbuf != in[0]->data) free_compbuf(cbuf); } else { CompBuf *cbuf = in[0]->data; CompBuf *stackbuf = pass_on_compbuf(cbuf); out[0]->data = stackbuf; } } }
void TrackPositionOperation::initExecution() { MovieTracking *tracking = NULL; MovieClipUser user = {0}; MovieTrackingObject *object; zero_v2(this->m_markerPos); zero_v2(this->m_relativePos); if (!this->m_movieClip) return; tracking = &this->m_movieClip->tracking; BKE_movieclip_user_set_frame(&user, this->m_framenumber); BKE_movieclip_get_size(this->m_movieClip, &user, &this->m_width, &this->m_height); object = BKE_tracking_object_get_named(tracking, this->m_trackingObjectName); if (object) { MovieTrackingTrack *track; track = BKE_tracking_track_get_named(tracking, object, this->m_trackName); if (track) { MovieTrackingMarker *marker; int clip_framenr = BKE_movieclip_remap_scene_to_clip_frame(this->m_movieClip, this->m_framenumber); marker = BKE_tracking_marker_get(track, clip_framenr); copy_v2_v2(this->m_markerPos, marker->pos); if (this->m_position == CMP_TRACKPOS_RELATIVE_START) { int i; for (i = 0; i < track->markersnr; i++) { marker = &track->markers[i]; if ((marker->flag & MARKER_DISABLED) == 0) { copy_v2_v2(this->m_relativePos, marker->pos); break; } } } else if (this->m_position == CMP_TRACKPOS_RELATIVE_FRAME) { int relative_clip_framenr = BKE_movieclip_remap_scene_to_clip_frame(this->m_movieClip, this->m_relativeFrame); marker = BKE_tracking_marker_get(track, relative_clip_framenr); copy_v2_v2(this->m_relativePos, marker->pos); } } } }
void KeyingScreenOperation::determineResolution(unsigned int resolution[2], unsigned int preferredResolution[2]) { resolution[0] = 0; resolution[1] = 0; if (this->m_movieClip) { MovieClipUser user = {0}; int width, height; int clip_frame = BKE_movieclip_remap_scene_to_clip_frame(this->m_movieClip, this->m_framenumber); BKE_movieclip_user_set_frame(&user, clip_frame); BKE_movieclip_get_size(this->m_movieClip, &user, &width, &height); resolution[0] = width; resolution[1] = height; } }
void TrackPositionOperation::executePixel(float *outputValue, float x, float y, PixelSampler sampler) { MovieClipUser user = {0}; MovieTracking *tracking = &movieClip->tracking; MovieTrackingObject *object = BKE_tracking_object_get_named(tracking, this->trackingObject); MovieTrackingTrack *track; MovieTrackingMarker *marker; int width, height; outputValue[0] = 0.0f; if (!object) return; track = BKE_tracking_track_get_named(tracking, object, this->trackName); if (!track) return; BKE_movieclip_user_set_frame(&user, this->framenumber); BKE_movieclip_get_size(this->movieClip, &user, &width, &height); marker = BKE_tracking_marker_get(track, this->framenumber); outputValue[0] = marker->pos[this->axis]; if (this->relative) { int i; for (i = 0; i < track->markersnr; i++) { marker = &track->markers[i]; if ((marker->flag & MARKER_DISABLED) == 0) { outputValue[0] -= marker->pos[this->axis]; break; } } } if (this->axis == 0) outputValue[0] *= width; else outputValue[0] *= height; }
void PlaneTrackCommonOperation::determineResolution(unsigned int resolution[2], unsigned int preferredResolution[2]) { NodeOperation::determineResolution(resolution, preferredResolution); resolution[0] = 0; resolution[1] = 0; if (this->m_movieClip) { int width, height; MovieClipUser user = {0}; BKE_movieclip_user_set_frame(&user, this->m_framenumber); BKE_movieclip_get_size(this->m_movieClip, &user, &width, &height); resolution[0] = width; resolution[1] = height; } }
static ImBuf *accessor_get_preprocessed_ibuf(TrackingImageAccessor *accessor, int clip_index, int frame) { MovieClip *clip; MovieClipUser user; ImBuf *ibuf; int scene_frame; BLI_assert(clip_index < accessor->num_clips); clip = accessor->clips[clip_index]; scene_frame = BKE_movieclip_remap_clip_to_scene_frame(clip, frame); BKE_movieclip_user_set_frame(&user, scene_frame); user.render_size = MCLIP_PROXY_RENDER_SIZE_FULL; user.render_flag = 0; ibuf = BKE_movieclip_get_ibuf(clip, &user); return ibuf; }
static void node_composit_exec_keyingscreen(void *data, bNode *node, bNodeStack **UNUSED(in), bNodeStack **out) { NodeKeyingScreenData *keyingscreen_data = node->storage; RenderData *rd = data; CompBuf *screenbuf = NULL; if (node->id) { MovieClip *clip = (MovieClip *) node->id; MovieClipUser user = {0}; int width, height; BKE_movieclip_user_set_frame(&user, rd->cfra); BKE_movieclip_get_size(clip, &user, &width, &height); screenbuf = alloc_compbuf(width, height, CB_RGBA, TRUE); compute_gradient_screen(rd, keyingscreen_data, clip, screenbuf); } out[0]->data = screenbuf; }
void MovieClipBaseOperation::initExecution() { if (this->m_movieClip) { BKE_movieclip_user_set_frame(this->m_movieClipUser, this->m_framenumber); ImBuf *ibuf; if (this->m_cacheFrame) ibuf = BKE_movieclip_get_ibuf(this->m_movieClip, this->m_movieClipUser); else ibuf = BKE_movieclip_get_ibuf_flag(this->m_movieClip, this->m_movieClipUser, this->m_movieClip->flag, MOVIECLIP_CACHE_SKIP); if (ibuf) { this->m_movieClipBuffer = ibuf; if (ibuf->rect_float == NULL || ibuf->userflags & IB_RECT_INVALID) { IMB_float_from_rect(ibuf); ibuf->userflags &= ~IB_RECT_INVALID; } } } }
void ED_clip_update_frame(const Main *mainp, int cfra) { wmWindowManager *wm; wmWindow *win; /* image window, compo node users */ for (wm = mainp->wm.first; wm; wm = wm->id.next) { /* only 1 wm */ for (win = wm->windows.first; win; win = win->next) { ScrArea *sa; for (sa = win->screen->areabase.first; sa; sa = sa->next) { if (sa->spacetype == SPACE_CLIP) { SpaceClip *sc = sa->spacedata.first; sc->scopes.ok = FALSE; BKE_movieclip_user_set_frame(&sc->user, cfra); } } } } }
static void node_composit_exec_movieclip(void *data, bNode *node, bNodeStack **UNUSED(in), bNodeStack **out) { if (node->id) { RenderData *rd = data; MovieClip *clip = (MovieClip *)node->id; MovieClipUser *user = (MovieClipUser *)node->storage; CompBuf *stackbuf = NULL; BKE_movieclip_user_set_frame(user, rd->cfra); stackbuf = node_composit_get_movieclip(rd, clip, user); if (stackbuf) { MovieTrackingStabilization *stab = &clip->tracking.stabilization; /* put image on stack */ out[0]->data = stackbuf; if (stab->flag & TRACKING_2D_STABILIZATION) { float loc[2], scale, angle; int clip_framenr = BKE_movieclip_remap_scene_to_clip_frame(clip, rd->cfra); BKE_tracking_stabilization_data_get(&clip->tracking, clip_framenr, stackbuf->x, stackbuf->y, loc, &scale, &angle); out[1]->vec[0] = loc[0]; out[2]->vec[0] = loc[1]; out[3]->vec[0] = scale; out[4]->vec[0] = angle; } /* generate preview */ generate_preview(data, node, stackbuf); } } }
/* reads full rect, converts indices */ uint *ED_view3d_select_id_read(int xmin, int ymin, int xmax, int ymax, uint *r_buf_len) { if (UNLIKELY((xmin > xmax) || (ymin > ymax))) { return NULL; } const rcti rect = { .xmin = xmin, .xmax = xmax + 1, .ymin = ymin, .ymax = ymax + 1, }; uint buf_len; uint *buf = ED_view3d_select_id_read_rect(&rect, &buf_len); if (r_buf_len) { *r_buf_len = buf_len; } return buf; } /* ************************************************************* */ static void view3d_stereo_bgpic_setup(Scene *scene, View3D *v3d, Image *ima, ImageUser *iuser) { if (BKE_image_is_stereo(ima)) { iuser->flag |= IMA_SHOW_STEREO; if ((scene->r.scemode & R_MULTIVIEW) == 0) { iuser->multiview_eye = STEREO_LEFT_ID; } else if (v3d->stereo3d_camera != STEREO_3D_ID) { /* show only left or right camera */ iuser->multiview_eye = v3d->stereo3d_camera; } BKE_image_multiview_index(ima, iuser); } else { iuser->flag &= ~IMA_SHOW_STEREO; } } static void view3d_draw_bgpic(Scene *scene, Depsgraph *depsgraph, ARegion *ar, View3D *v3d, const bool do_foreground, const bool do_camera_frame) { RegionView3D *rv3d = ar->regiondata; int fg_flag = do_foreground ? CAM_BGIMG_FLAG_FOREGROUND : 0; if (v3d->camera == NULL || v3d->camera->type != OB_CAMERA) { return; } Camera *cam = v3d->camera->data; for (CameraBGImage *bgpic = cam->bg_images.first; bgpic; bgpic = bgpic->next) { if ((bgpic->flag & CAM_BGIMG_FLAG_FOREGROUND) != fg_flag) { continue; } { float image_aspect[2]; float x1, y1, x2, y2, centx, centy; void *lock; Image *ima = NULL; /* disable individual images */ if ((bgpic->flag & CAM_BGIMG_FLAG_DISABLED)) { continue; } ImBuf *ibuf = NULL; ImBuf *freeibuf = NULL; ImBuf *releaseibuf = NULL; if (bgpic->source == CAM_BGIMG_SOURCE_IMAGE) { ima = bgpic->ima; if (ima == NULL) { continue; } ImageUser iuser = bgpic->iuser; iuser.scene = scene; /* Needed for render results. */ BKE_image_user_frame_calc(&iuser, (int)DEG_get_ctime(depsgraph)); if (ima->source == IMA_SRC_SEQUENCE && !(iuser.flag & IMA_USER_FRAME_IN_RANGE)) { ibuf = NULL; /* frame is out of range, dont show */ } else { view3d_stereo_bgpic_setup(scene, v3d, ima, &iuser); ibuf = BKE_image_acquire_ibuf(ima, &iuser, &lock); releaseibuf = ibuf; } image_aspect[0] = ima->aspx; image_aspect[1] = ima->aspy; } else if (bgpic->source == CAM_BGIMG_SOURCE_MOVIE) { /* TODO: skip drawing when out of frame range (as image sequences do above) */ MovieClip *clip = NULL; if (bgpic->flag & CAM_BGIMG_FLAG_CAMERACLIP) { if (scene->camera) { clip = BKE_object_movieclip_get(scene, scene->camera, true); } } else { clip = bgpic->clip; } if (clip == NULL) { continue; } BKE_movieclip_user_set_frame(&bgpic->cuser, (int)DEG_get_ctime(depsgraph)); ibuf = BKE_movieclip_get_ibuf(clip, &bgpic->cuser); image_aspect[0] = clip->aspx; image_aspect[1] = clip->aspy; /* working with ibuf from image and clip has got different workflow now. * ibuf acquired from clip is referenced by cache system and should * be dereferenced after usage. */ freeibuf = ibuf; } else { /* perhaps when loading future files... */ BLI_assert(0); copy_v2_fl(image_aspect, 1.0f); } if (ibuf == NULL) { continue; } if ((ibuf->rect == NULL && ibuf->rect_float == NULL) || ibuf->channels != 4) { /* invalid image format */ if (freeibuf) { IMB_freeImBuf(freeibuf); } if (releaseibuf) { BKE_image_release_ibuf(ima, releaseibuf, lock); } continue; } if (ibuf->rect == NULL) { IMB_rect_from_float(ibuf); } BLI_assert(rv3d->persp == RV3D_CAMOB); { if (do_camera_frame) { rctf vb; ED_view3d_calc_camera_border(scene, depsgraph, ar, v3d, rv3d, &vb, false); x1 = vb.xmin; y1 = vb.ymin; x2 = vb.xmax; y2 = vb.ymax; } else { x1 = ar->winrct.xmin; y1 = ar->winrct.ymin; x2 = ar->winrct.xmax; y2 = ar->winrct.ymax; } /* apply offset last - camera offset is different to offset in blender units */ /* so this has some sane way of working - this matches camera's shift _exactly_ */ { const float max_dim = max_ff(x2 - x1, y2 - y1); const float xof_scale = bgpic->offset[0] * max_dim; const float yof_scale = bgpic->offset[1] * max_dim; x1 += xof_scale; y1 += yof_scale; x2 += xof_scale; y2 += yof_scale; } centx = (x1 + x2) * 0.5f; centy = (y1 + y2) * 0.5f; /* aspect correction */ if (bgpic->flag & CAM_BGIMG_FLAG_CAMERA_ASPECT) { /* apply aspect from clip */ const float w_src = ibuf->x * image_aspect[0]; const float h_src = ibuf->y * image_aspect[1]; /* destination aspect is already applied from the camera frame */ const float w_dst = x1 - x2; const float h_dst = y1 - y2; const float asp_src = w_src / h_src; const float asp_dst = w_dst / h_dst; if (fabsf(asp_src - asp_dst) >= FLT_EPSILON) { if ((asp_src > asp_dst) == ((bgpic->flag & CAM_BGIMG_FLAG_CAMERA_CROP) != 0)) { /* fit X */ const float div = asp_src / asp_dst; x1 = ((x1 - centx) * div) + centx; x2 = ((x2 - centx) * div) + centx; } else { /* fit Y */ const float div = asp_dst / asp_src; y1 = ((y1 - centy) * div) + centy; y2 = ((y2 - centy) * div) + centy; } } } } /* complete clip? */ rctf clip_rect; BLI_rctf_init(&clip_rect, x1, x2, y1, y2); if (bgpic->rotation) { BLI_rctf_rotate_expand(&clip_rect, &clip_rect, bgpic->rotation); } if (clip_rect.xmax < 0 || clip_rect.ymax < 0 || clip_rect.xmin > ar->winx || clip_rect.ymin > ar->winy) { if (freeibuf) { IMB_freeImBuf(freeibuf); } if (releaseibuf) { BKE_image_release_ibuf(ima, releaseibuf, lock); } continue; } float zoomx = (x2 - x1) / ibuf->x; float zoomy = (y2 - y1) / ibuf->y; /* For some reason; zoom-levels down refuses to use GL_ALPHA_SCALE. */ if (zoomx < 1.0f || zoomy < 1.0f) { float tzoom = min_ff(zoomx, zoomy); int mip = 0; if ((ibuf->userflags & IB_MIPMAP_INVALID) != 0) { IMB_remakemipmap(ibuf, 0); ibuf->userflags &= ~IB_MIPMAP_INVALID; } else if (ibuf->mipmap[0] == NULL) { IMB_makemipmap(ibuf, 0); } while (tzoom < 1.0f && mip < 8 && ibuf->mipmap[mip]) { tzoom *= 2.0f; zoomx *= 2.0f; zoomy *= 2.0f; mip++; } if (mip > 0) { ibuf = ibuf->mipmap[mip - 1]; } } GPU_depth_test(!do_foreground); glDepthMask(GL_FALSE); GPU_blend(true); GPU_blend_set_func_separate( GPU_SRC_ALPHA, GPU_ONE_MINUS_SRC_ALPHA, GPU_ONE, GPU_ONE_MINUS_SRC_ALPHA); GPU_matrix_push_projection(); GPU_matrix_push(); ED_region_pixelspace(ar); GPU_matrix_translate_2f(centx, centy); GPU_matrix_scale_1f(bgpic->scale); GPU_matrix_rotate_2d(RAD2DEGF(-bgpic->rotation)); if (bgpic->flag & CAM_BGIMG_FLAG_FLIP_X) { zoomx *= -1.0f; x1 = x2; } if (bgpic->flag & CAM_BGIMG_FLAG_FLIP_Y) { zoomy *= -1.0f; y1 = y2; } float col[4] = {1.0f, 1.0f, 1.0f, bgpic->alpha}; IMMDrawPixelsTexState state = immDrawPixelsTexSetup(GPU_SHADER_2D_IMAGE_COLOR); immDrawPixelsTex(&state, x1 - centx, y1 - centy, ibuf->x, ibuf->y, GL_RGBA, GL_UNSIGNED_BYTE, GL_LINEAR, ibuf->rect, zoomx, zoomy, col); GPU_matrix_pop_projection(); GPU_matrix_pop(); GPU_blend(false); glDepthMask(GL_TRUE); GPU_depth_test(true); if (freeibuf) { IMB_freeImBuf(freeibuf); } if (releaseibuf) { BKE_image_release_ibuf(ima, releaseibuf, lock); } } } }
static void compute_gradient_screen(RenderData *rd, NodeKeyingScreenData *keyingscreen_data, MovieClip *clip, CompBuf *screenbuf) { MovieClipUser user = {0}; MovieTracking *tracking = &clip->tracking; MovieTrackingTrack *track; VoronoiTriangulationPoint *triangulated_points; VoronoiSite *sites; ImBuf *ibuf; ListBase *tracksbase; ListBase edges = {NULL, NULL}; int sites_total, triangulated_points_total, triangles_total; int (*triangles)[3]; int i, x, y; float *rect = screenbuf->rect; if (keyingscreen_data->tracking_object[0]) { MovieTrackingObject *object = BKE_tracking_object_get_named(tracking, keyingscreen_data->tracking_object); if (!object) return; tracksbase = BKE_tracking_object_get_tracks(tracking, object); } else tracksbase = BKE_tracking_get_active_tracks(tracking); sites_total = BLI_countlist(tracksbase); if (!sites_total) return; BKE_movieclip_user_set_frame(&user, rd->cfra); ibuf = BKE_movieclip_get_ibuf(clip, &user); sites = MEM_callocN(sizeof(VoronoiSite) * sites_total, "keyingscreen voronoi sites"); track = tracksbase->first; i = 0; while (track) { VoronoiSite *site = &sites[i]; MovieTrackingMarker *marker = BKE_tracking_marker_get(track, rd->cfra); ImBuf *pattern_ibuf = BKE_tracking_get_pattern_imbuf(ibuf, track, marker, TRUE, FALSE); int j; zero_v3(site->color); if (pattern_ibuf) { for (j = 0; j < pattern_ibuf->x * pattern_ibuf->y; j++) { if (pattern_ibuf->rect_float) { add_v3_v3(site->color, &pattern_ibuf->rect_float[4 * j]); } else { unsigned char *rrgb = (unsigned char *)pattern_ibuf->rect; site->color[0] += srgb_to_linearrgb((float)rrgb[4 * j + 0] / 255.0f); site->color[1] += srgb_to_linearrgb((float)rrgb[4 * j + 1] / 255.0f); site->color[2] += srgb_to_linearrgb((float)rrgb[4 * j + 2] / 255.0f); } } mul_v3_fl(site->color, 1.0f / (pattern_ibuf->x * pattern_ibuf->y)); IMB_freeImBuf(pattern_ibuf); } site->co[0] = marker->pos[0] * screenbuf->x; site->co[1] = marker->pos[1] * screenbuf->y; track = track->next; i++; } IMB_freeImBuf(ibuf); BLI_voronoi_compute(sites, sites_total, screenbuf->x, screenbuf->y, &edges); BLI_voronoi_triangulate(sites, sites_total, &edges, screenbuf->x, screenbuf->y, &triangulated_points, &triangulated_points_total, &triangles, &triangles_total); for (y = 0; y < screenbuf->y; y++) { for (x = 0; x < screenbuf->x; x++) { int index = 4 * (y * screenbuf->x + x); rect[index + 0] = rect[index + 1] = rect[index + 2] = 0.0f; rect[index + 3] = 1.0f; for (i = 0; i < triangles_total; i++) { int *triangle = triangles[i]; VoronoiTriangulationPoint *a = &triangulated_points[triangle[0]], *b = &triangulated_points[triangle[1]], *c = &triangulated_points[triangle[2]]; float co[2] = {x, y}, w[3]; if (barycentric_coords_v2(a->co, b->co, c->co, co, w)) { if (barycentric_inside_triangle_v2(w)) { rect[index + 0] += a->color[0] * w[0] + b->color[0] * w[1] + c->color[0] * w[2]; rect[index + 1] += a->color[1] * w[0] + b->color[1] * w[1] + c->color[1] * w[2]; rect[index + 2] += a->color[2] * w[0] + b->color[2] * w[1] + c->color[2] * w[2]; } } } } } MEM_freeN(triangulated_points); MEM_freeN(triangles); MEM_freeN(sites); BLI_freelistN(&edges); }
KeyingScreenOperation::TriangulationData *KeyingScreenOperation::buildVoronoiTriangulation() { MovieClipUser user = {0}; TriangulationData *triangulation; MovieTracking *tracking = &this->m_movieClip->tracking; MovieTrackingTrack *track; VoronoiSite *sites, *site; ImBuf *ibuf; ListBase *tracksbase; ListBase edges = {NULL, NULL}; int sites_total; int i; int width = this->getWidth(); int height = this->getHeight(); int clip_frame = BKE_movieclip_remap_scene_to_clip_frame(this->m_movieClip, this->m_framenumber); if (this->m_trackingObject[0]) { MovieTrackingObject *object = BKE_tracking_object_get_named(tracking, this->m_trackingObject); if (!object) return NULL; tracksbase = BKE_tracking_object_get_tracks(tracking, object); } else tracksbase = BKE_tracking_get_active_tracks(tracking); /* count sites */ for (track = (MovieTrackingTrack *) tracksbase->first, sites_total = 0; track; track = track->next) { MovieTrackingMarker *marker = BKE_tracking_marker_get(track, clip_frame); float pos[2]; if (marker->flag & MARKER_DISABLED) continue; add_v2_v2v2(pos, marker->pos, track->offset); if (!IN_RANGE_INCL(pos[0], 0.0f, 1.0f) || !IN_RANGE_INCL(pos[1], 0.0f, 1.0f)) { continue; } sites_total++; } if (!sites_total) return NULL; BKE_movieclip_user_set_frame(&user, clip_frame); ibuf = BKE_movieclip_get_ibuf(this->m_movieClip, &user); if (!ibuf) return NULL; triangulation = (TriangulationData *) MEM_callocN(sizeof(TriangulationData), "keying screen triangulation data"); sites = (VoronoiSite *) MEM_callocN(sizeof(VoronoiSite) * sites_total, "keyingscreen voronoi sites"); track = (MovieTrackingTrack *) tracksbase->first; for (track = (MovieTrackingTrack *) tracksbase->first, site = sites; track; track = track->next) { MovieTrackingMarker *marker = BKE_tracking_marker_get(track, clip_frame); ImBuf *pattern_ibuf; int j; float pos[2]; if (marker->flag & MARKER_DISABLED) continue; add_v2_v2v2(pos, marker->pos, track->offset); if (!IN_RANGE_INCL(pos[0], 0.0f, 1.0f) || !IN_RANGE_INCL(pos[1], 0.0f, 1.0f)) { continue; } pattern_ibuf = BKE_tracking_get_pattern_imbuf(ibuf, track, marker, TRUE, FALSE); zero_v3(site->color); if (pattern_ibuf) { for (j = 0; j < pattern_ibuf->x * pattern_ibuf->y; j++) { if (pattern_ibuf->rect_float) { add_v3_v3(site->color, &pattern_ibuf->rect_float[4 * j]); } else { unsigned char *rrgb = (unsigned char *)pattern_ibuf->rect; site->color[0] += srgb_to_linearrgb((float)rrgb[4 * j + 0] / 255.0f); site->color[1] += srgb_to_linearrgb((float)rrgb[4 * j + 1] / 255.0f); site->color[2] += srgb_to_linearrgb((float)rrgb[4 * j + 2] / 255.0f); } } mul_v3_fl(site->color, 1.0f / (pattern_ibuf->x * pattern_ibuf->y)); IMB_freeImBuf(pattern_ibuf); } site->co[0] = pos[0] * width; site->co[1] = pos[1] * height; site++; } IMB_freeImBuf(ibuf); BLI_voronoi_compute(sites, sites_total, width, height, &edges); BLI_voronoi_triangulate(sites, sites_total, &edges, width, height, &triangulation->triangulated_points, &triangulation->triangulated_points_total, &triangulation->triangles, &triangulation->triangles_total); MEM_freeN(sites); BLI_freelistN(&edges); if (triangulation->triangles_total) { rctf *rect; rect = triangulation->triangles_AABB = (rctf *) MEM_callocN(sizeof(rctf) * triangulation->triangles_total, "voronoi triangulation AABB"); for (i = 0; i < triangulation->triangles_total; i++, rect++) { int *triangle = triangulation->triangles[i]; VoronoiTriangulationPoint *a = &triangulation->triangulated_points[triangle[0]], *b = &triangulation->triangulated_points[triangle[1]], *c = &triangulation->triangulated_points[triangle[2]]; float min[2], max[2]; INIT_MINMAX2(min, max); minmax_v2v2_v2(min, max, a->co); minmax_v2v2_v2(min, max, b->co); minmax_v2v2_v2(min, max, c->co); rect->xmin = min[0]; rect->ymin = min[1]; rect->xmax = max[0]; rect->ymax = max[1]; } } return triangulation; }