void TimeNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { SetValueOperation *operation = new SetValueOperation(); bNode *node = this->getbNode(); /* stack order output: fac */ float fac = 0.0f; const int framenumber = context.getFramenumber(); if (framenumber < node->custom1) { fac = 0.0f; } else if (framenumber > node->custom2) { fac = 1.0f; } else if (node->custom1 < node->custom2) { fac = (context.getFramenumber() - node->custom1) / (float)(node->custom2 - node->custom1); } curvemapping_initialize((CurveMapping *)node->storage); fac = curvemapping_evaluateF((CurveMapping *)node->storage, 0, fac); operation->setValue(clamp_f(fac, 0.0f, 1.0f)); converter.addOperation(operation); converter.mapOutputSocket(getOutputSocket(0), operation->getOutputSocket()); }
void Stabilize2dNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { NodeInput *imageInput = this->getInputSocket(0); MovieClip *clip = (MovieClip *)getbNode()->id; ScaleOperation *scaleOperation = new ScaleOperation(); scaleOperation->setSampler((PixelSampler)this->getbNode()->custom1); RotateOperation *rotateOperation = new RotateOperation(); rotateOperation->setDoDegree2RadConversion(false); TranslateOperation *translateOperation = new TranslateOperation(); MovieClipAttributeOperation *scaleAttribute = new MovieClipAttributeOperation(); MovieClipAttributeOperation *angleAttribute = new MovieClipAttributeOperation(); MovieClipAttributeOperation *xAttribute = new MovieClipAttributeOperation(); MovieClipAttributeOperation *yAttribute = new MovieClipAttributeOperation(); SetSamplerOperation *psoperation = new SetSamplerOperation(); psoperation->setSampler((PixelSampler)this->getbNode()->custom1); scaleAttribute->setAttribute(MCA_SCALE); scaleAttribute->setFramenumber(context.getFramenumber()); scaleAttribute->setMovieClip(clip); angleAttribute->setAttribute(MCA_ANGLE); angleAttribute->setFramenumber(context.getFramenumber()); angleAttribute->setMovieClip(clip); xAttribute->setAttribute(MCA_X); xAttribute->setFramenumber(context.getFramenumber()); xAttribute->setMovieClip(clip); yAttribute->setAttribute(MCA_Y); yAttribute->setFramenumber(context.getFramenumber()); yAttribute->setMovieClip(clip); converter.addOperation(scaleAttribute); converter.addOperation(angleAttribute); converter.addOperation(xAttribute); converter.addOperation(yAttribute); converter.addOperation(scaleOperation); converter.addOperation(translateOperation); converter.addOperation(rotateOperation); converter.addOperation(psoperation); converter.mapInputSocket(imageInput, scaleOperation->getInputSocket(0)); converter.addLink(scaleAttribute->getOutputSocket(), scaleOperation->getInputSocket(1)); converter.addLink(scaleAttribute->getOutputSocket(), scaleOperation->getInputSocket(2)); converter.addLink(scaleOperation->getOutputSocket(), rotateOperation->getInputSocket(0)); converter.addLink(angleAttribute->getOutputSocket(), rotateOperation->getInputSocket(1)); converter.addLink(rotateOperation->getOutputSocket(), translateOperation->getInputSocket(0)); converter.addLink(xAttribute->getOutputSocket(), translateOperation->getInputSocket(1)); converter.addLink(yAttribute->getOutputSocket(), translateOperation->getInputSocket(2)); converter.addLink(translateOperation->getOutputSocket(), psoperation->getInputSocket(0)); converter.mapOutputSocket(getOutputSocket(), psoperation->getOutputSocket()); }
void WorkScheduler::start(CompositorContext &context) { #if COM_CURRENT_THREADING_MODEL == COM_TM_QUEUE unsigned int index; g_cpuqueue = BLI_thread_queue_init(); BLI_init_threads(&g_cputhreads, thread_execute_cpu, g_cpudevices.size()); for (index = 0; index < g_cpudevices.size(); index++) { Device *device = g_cpudevices[index]; BLI_insert_thread(&g_cputhreads, device); } #ifdef COM_OPENCL_ENABLED if (context.getHasActiveOpenCLDevices()) { g_gpuqueue = BLI_thread_queue_init(); BLI_init_threads(&g_gputhreads, thread_execute_gpu, g_gpudevices.size()); for (index = 0; index < g_gpudevices.size(); index++) { Device *device = g_gpudevices[index]; BLI_insert_thread(&g_gputhreads, device); } g_openclActive = true; } else { g_openclActive = false; } #endif #endif }
void BokehBlurNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { bNode *b_node = this->getbNode(); NodeInput *inputSizeSocket = this->getInputSocket(2); bool connectedSizeSocket = inputSizeSocket->isLinked(); const bool extend_bounds = (b_node->custom1 & CMP_NODEFLAG_BLUR_EXTEND_BOUNDS) != 0; if ((b_node->custom1 & CMP_NODEFLAG_BLUR_VARIABLE_SIZE) && connectedSizeSocket) { VariableSizeBokehBlurOperation *operation = new VariableSizeBokehBlurOperation(); operation->setQuality(context.getQuality()); operation->setThreshold(0.0f); operation->setMaxBlur(b_node->custom4); operation->setDoScaleSize(true); converter.addOperation(operation); converter.mapInputSocket(getInputSocket(0), operation->getInputSocket(0)); converter.mapInputSocket(getInputSocket(1), operation->getInputSocket(1)); converter.mapInputSocket(getInputSocket(2), operation->getInputSocket(2)); converter.mapOutputSocket(getOutputSocket(0), operation->getOutputSocket()); } else { BokehBlurOperation *operation = new BokehBlurOperation(); operation->setQuality(context.getQuality()); operation->setExtendBounds(extend_bounds); converter.addOperation(operation); converter.mapInputSocket(getInputSocket(0), operation->getInputSocket(0)); converter.mapInputSocket(getInputSocket(1), operation->getInputSocket(1)); // NOTE: on the bokeh blur operation the sockets are switched. // for this reason the next two lines are correct. // Fix for T43771 converter.mapInputSocket(getInputSocket(2), operation->getInputSocket(3)); converter.mapInputSocket(getInputSocket(3), operation->getInputSocket(2)); converter.mapOutputSocket(getOutputSocket(0), operation->getOutputSocket()); if (!connectedSizeSocket) { operation->setSize(this->getInputSocket(2)->getEditorValueFloat()); } } }
void RenderLayersNode::testSocketLink(NodeConverter &converter, const CompositorContext &context, NodeOutput *output, RenderLayersProg *operation, Scene *scene, int layerId, bool is_preview) const { operation->setScene(scene); operation->setLayerId(layerId); operation->setRenderData(context.getRenderData()); operation->setViewName(context.getViewName()); converter.mapOutputSocket(output, operation->getOutputSocket()); converter.addOperation(operation); if (is_preview) /* only for image socket */ converter.addPreview(operation->getOutputSocket()); }
void CompositorNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { bNode *editorNode = this->getbNode(); bool is_active = (editorNode->flag & NODE_DO_OUTPUT_RECALC) || context.isRendering(); bool ignore_alpha = (editorNode->custom2 & CMP_NODE_OUTPUT_IGNORE_ALPHA) != 0; NodeInput *imageSocket = this->getInputSocket(0); NodeInput *alphaSocket = this->getInputSocket(1); NodeInput *depthSocket = this->getInputSocket(2); CompositorOperation *compositorOperation = new CompositorOperation(); compositorOperation->setScene(context.getScene()); compositorOperation->setSceneName(context.getScene()->id.name); compositorOperation->setRenderData(context.getRenderData()); compositorOperation->setViewName(context.getViewName()); compositorOperation->setbNodeTree(context.getbNodeTree()); /* alpha socket gives either 1 or a custom alpha value if "use alpha" is enabled */ compositorOperation->setUseAlphaInput(ignore_alpha || alphaSocket->isLinked()); compositorOperation->setActive(is_active); converter.addOperation(compositorOperation); converter.mapInputSocket(imageSocket, compositorOperation->getInputSocket(0)); /* only use alpha link if "use alpha" is enabled */ if (ignore_alpha) { converter.addInputValue(compositorOperation->getInputSocket(1), 1.0f); } else { converter.mapInputSocket(alphaSocket, compositorOperation->getInputSocket(1)); } converter.mapInputSocket(depthSocket, compositorOperation->getInputSocket(2)); converter.addNodeInputPreview(imageSocket); }
void BilateralBlurNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { NodeBilateralBlurData *data = (NodeBilateralBlurData *)this->getbNode()->storage; BilateralBlurOperation *operation = new BilateralBlurOperation(); operation->setQuality(context.getQuality()); operation->setData(data); converter.addOperation(operation); converter.mapInputSocket(getInputSocket(0), operation->getInputSocket(0)); converter.mapInputSocket(getInputSocket(1), operation->getInputSocket(1)); converter.mapOutputSocket(getOutputSocket(0), operation->getOutputSocket(0)); }
void SplitViewerNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { bNode *editorNode = this->getbNode(); bool do_output = (editorNode->flag & NODE_DO_OUTPUT_RECALC || context.isRendering()) && (editorNode->flag & NODE_DO_OUTPUT); NodeInput *image1Socket = this->getInputSocket(0); NodeInput *image2Socket = this->getInputSocket(1); Image *image = (Image *)this->getbNode()->id; ImageUser *imageUser = (ImageUser *) this->getbNode()->storage; SplitOperation *splitViewerOperation = new SplitOperation(); splitViewerOperation->setSplitPercentage(this->getbNode()->custom1); splitViewerOperation->setXSplit(!this->getbNode()->custom2); converter.addOperation(splitViewerOperation); converter.mapInputSocket(image1Socket, splitViewerOperation->getInputSocket(0)); converter.mapInputSocket(image2Socket, splitViewerOperation->getInputSocket(1)); ViewerOperation *viewerOperation = new ViewerOperation(); viewerOperation->setImage(image); viewerOperation->setImageUser(imageUser); viewerOperation->setViewSettings(context.getViewSettings()); viewerOperation->setDisplaySettings(context.getDisplaySettings()); /* defaults - the viewer node has these options but not exposed for split view * we could use the split to define an area of interest on one axis at least */ viewerOperation->setChunkOrder(COM_ORDER_OF_CHUNKS_DEFAULT); viewerOperation->setCenterX(0.5f); viewerOperation->setCenterY(0.5f); converter.addOperation(viewerOperation); converter.addLink(splitViewerOperation->getOutputSocket(), viewerOperation->getInputSocket(0)); converter.addPreview(splitViewerOperation->getOutputSocket()); if (do_output) converter.registerViewer(viewerOperation); }
void MovieDistortionNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { bNode *bnode = this->getbNode(); MovieClip *clip = (MovieClip *)bnode->id; NodeInput *inputSocket = this->getInputSocket(0); NodeOutput *outputSocket = this->getOutputSocket(0); MovieDistortionOperation *operation = new MovieDistortionOperation(bnode->custom1 == 1); operation->setMovieClip(clip); operation->setFramenumber(context.getFramenumber()); converter.addOperation(operation); converter.mapInputSocket(inputSocket, operation->getInputSocket(0)); converter.mapOutputSocket(outputSocket, operation->getOutputSocket(0)); }
void KeyingScreenNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { bNode *editorNode = this->getbNode(); MovieClip *clip = (MovieClip *) editorNode->id; NodeKeyingScreenData *keyingscreen_data = (NodeKeyingScreenData *) editorNode->storage; NodeOutput *outputScreen = this->getOutputSocket(0); // always connect the output image KeyingScreenOperation *operation = new KeyingScreenOperation(); operation->setMovieClip(clip); operation->setTrackingObject(keyingscreen_data->tracking_object); operation->setFramenumber(context.getFramenumber()); converter.addOperation(operation); converter.mapOutputSocket(outputScreen, operation->getOutputSocket()); }
void BoxMaskNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { NodeInput *inputSocket = this->getInputSocket(0); NodeOutput *outputSocket = this->getOutputSocket(0); BoxMaskOperation *operation; operation = new BoxMaskOperation(); operation->setData((NodeBoxMask *)this->getbNode()->storage); operation->setMaskType(this->getbNode()->custom1); converter.addOperation(operation); if (inputSocket->isLinked()) { converter.mapInputSocket(inputSocket, operation->getInputSocket(0)); converter.mapOutputSocket(outputSocket, operation->getOutputSocket()); } else { /* Value operation to produce original transparent image */ SetValueOperation *valueOperation = new SetValueOperation(); valueOperation->setValue(0.0f); converter.addOperation(valueOperation); /* Scale that image up to render resolution */ const RenderData *rd = context.getRenderData(); ScaleFixedSizeOperation *scaleOperation = new ScaleFixedSizeOperation(); scaleOperation->setIsAspect(false); scaleOperation->setIsCrop(false); scaleOperation->setOffset(0.0f, 0.0f); scaleOperation->setNewWidth(rd->xsch * rd->size / 100.0f); scaleOperation->setNewHeight(rd->ysch * rd->size / 100.0f); scaleOperation->getInputSocket(0)->setResizeMode(COM_SC_NO_RESIZE); converter.addOperation(scaleOperation); converter.addLink(valueOperation->getOutputSocket(0), scaleOperation->getInputSocket(0)); converter.addLink(scaleOperation->getOutputSocket(0), operation->getInputSocket(0)); converter.mapOutputSocket(outputSocket, operation->getOutputSocket(0)); } converter.mapInputSocket(getInputSocket(1), operation->getInputSocket(1)); }
void TrackPositionNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { bNode *editorNode = this->getbNode(); MovieClip *clip = (MovieClip *) editorNode->id; NodeTrackPosData *trackpos_data = (NodeTrackPosData *) editorNode->storage; NodeOutput *outputX = this->getOutputSocket(0); NodeOutput *outputY = this->getOutputSocket(1); int frame_number; if (editorNode->custom1 == CMP_TRACKPOS_ABSOLUTE_FRAME) { frame_number = editorNode->custom2; } else { frame_number = context.getFramenumber(); } TrackPositionOperation *operationX = new TrackPositionOperation(); operationX->setMovieClip(clip); operationX->setTrackingObject(trackpos_data->tracking_object); operationX->setTrackName(trackpos_data->track_name); operationX->setFramenumber(frame_number); operationX->setAxis(0); operationX->setPosition(editorNode->custom1); operationX->setRelativeFrame(editorNode->custom2); converter.addOperation(operationX); TrackPositionOperation *operationY = new TrackPositionOperation(); operationY->setMovieClip(clip); operationY->setTrackingObject(trackpos_data->tracking_object); operationY->setTrackName(trackpos_data->track_name); operationY->setFramenumber(frame_number); operationY->setAxis(1); operationY->setPosition(editorNode->custom1); operationY->setRelativeFrame(editorNode->custom2); converter.addOperation(operationY); converter.mapOutputSocket(outputX, operationX->getOutputSocket()); converter.mapOutputSocket(outputY, operationY->getOutputSocket()); }
void TranslateNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { bNode *bnode = this->getbNode(); NodeTranslateData *data = (NodeTranslateData *)bnode->storage; NodeInput *inputSocket = this->getInputSocket(0); NodeInput *inputXSocket = this->getInputSocket(1); NodeInput *inputYSocket = this->getInputSocket(2); NodeOutput *outputSocket = this->getOutputSocket(0); TranslateOperation *operation = new TranslateOperation(); if (data->relative) { const RenderData *rd = context.getRenderData(); float fx = rd->xsch * rd->size / 100.0f; float fy = rd->ysch * rd->size / 100.0f; operation->setFactorXY(fx, fy); } converter.addOperation(operation); converter.mapInputSocket(inputXSocket, operation->getInputSocket(1)); converter.mapInputSocket(inputYSocket, operation->getInputSocket(2)); converter.mapOutputSocket(outputSocket, operation->getOutputSocket(0)); if (data->wrap_axis) { WriteBufferOperation *writeOperation = new WriteBufferOperation(COM_DT_COLOR); WrapOperation *wrapOperation = new WrapOperation(COM_DT_COLOR); wrapOperation->setMemoryProxy(writeOperation->getMemoryProxy()); wrapOperation->setWrapping(data->wrap_axis); converter.addOperation(writeOperation); converter.addOperation(wrapOperation); converter.mapInputSocket(inputSocket, writeOperation->getInputSocket(0)); converter.addLink(wrapOperation->getOutputSocket(), operation->getInputSocket(0)); } else { converter.mapInputSocket(inputSocket, operation->getInputSocket(0)); } }
void BlurNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { bNode *editorNode = this->getbNode(); NodeBlurData *data = (NodeBlurData *)editorNode->storage; NodeInput *inputSizeSocket = this->getInputSocket(1); bool connectedSizeSocket = inputSizeSocket->isLinked(); const float size = this->getInputSocket(1)->getEditorValueFloat(); CompositorQuality quality = context.getQuality(); NodeOperation *input_operation = NULL, *output_operation = NULL; if (data->filtertype == R_FILTER_FAST_GAUSS) { FastGaussianBlurOperation *operationfgb = new FastGaussianBlurOperation(); operationfgb->setData(data); converter.addOperation(operationfgb); converter.mapInputSocket(getInputSocket(1), operationfgb->getInputSocket(1)); input_operation = operationfgb; output_operation = operationfgb; } else if (editorNode->custom1 & CMP_NODEFLAG_BLUR_VARIABLE_SIZE) { MathAddOperation *clamp = new MathAddOperation(); SetValueOperation *zero = new SetValueOperation(); zero->setValue(0.0f); clamp->setUseClamp(true); converter.addOperation(clamp); converter.addOperation(zero); converter.mapInputSocket(getInputSocket(1), clamp->getInputSocket(0)); converter.addLink(zero->getOutputSocket(), clamp->getInputSocket(1)); GaussianAlphaXBlurOperation *operationx = new GaussianAlphaXBlurOperation(); operationx->setData(data); operationx->setQuality(quality); operationx->setSize(1.0f); operationx->setFalloff(PROP_SMOOTH); operationx->setSubtract(false); converter.addOperation(operationx); converter.addLink(clamp->getOutputSocket(), operationx->getInputSocket(0)); GaussianAlphaYBlurOperation *operationy = new GaussianAlphaYBlurOperation(); operationy->setData(data); operationy->setQuality(quality); operationy->setSize(1.0f); operationy->setFalloff(PROP_SMOOTH); operationy->setSubtract(false); converter.addOperation(operationy); converter.addLink(operationx->getOutputSocket(), operationy->getInputSocket(0)); GaussianBlurReferenceOperation *operation = new GaussianBlurReferenceOperation(); operation->setData(data); operation->setQuality(quality); converter.addOperation(operation); converter.addLink(operationy->getOutputSocket(), operation->getInputSocket(1)); output_operation = operation; input_operation = operation; } else if (!data->bokeh) { GaussianXBlurOperation *operationx = new GaussianXBlurOperation(); operationx->setData(data); operationx->setQuality(quality); converter.addOperation(operationx); converter.mapInputSocket(getInputSocket(1), operationx->getInputSocket(1)); GaussianYBlurOperation *operationy = new GaussianYBlurOperation(); operationy->setData(data); operationy->setQuality(quality); converter.addOperation(operationy); converter.mapInputSocket(getInputSocket(1), operationy->getInputSocket(1)); converter.addLink(operationx->getOutputSocket(), operationy->getInputSocket(0)); if (!connectedSizeSocket) { operationx->setSize(size); operationy->setSize(size); } input_operation = operationx; output_operation = operationy; } else { GaussianBokehBlurOperation *operation = new GaussianBokehBlurOperation(); operation->setData(data); operation->setQuality(quality); converter.addOperation(operation); converter.mapInputSocket(getInputSocket(1), operation->getInputSocket(1)); if (!connectedSizeSocket) { operation->setSize(size); } input_operation = operation; output_operation = operation; } if (data->gamma) { GammaCorrectOperation *correct = new GammaCorrectOperation(); GammaUncorrectOperation *inverse = new GammaUncorrectOperation(); converter.addOperation(correct); converter.addOperation(inverse); converter.mapInputSocket(getInputSocket(0), correct->getInputSocket(0)); converter.addLink(correct->getOutputSocket(), input_operation->getInputSocket(0)); converter.addLink(output_operation->getOutputSocket(), inverse->getInputSocket(0)); converter.mapOutputSocket(getOutputSocket(), inverse->getOutputSocket()); converter.addPreview(inverse->getOutputSocket()); } else { converter.mapInputSocket(getInputSocket(0), input_operation->getInputSocket(0)); converter.mapOutputSocket(getOutputSocket(), output_operation->getOutputSocket()); converter.addPreview(output_operation->getOutputSocket()); } }
void ImageNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { /// Image output NodeOutput *outputImage = this->getOutputSocket(0); bNode *editorNode = this->getbNode(); Image *image = (Image *)editorNode->id; ImageUser *imageuser = (ImageUser *)editorNode->storage; int framenumber = context.getFramenumber(); int numberOfOutputs = this->getNumberOfOutputSockets(); bool outputStraightAlpha = (editorNode->custom1 & CMP_NODE_IMAGE_USE_STRAIGHT_OUTPUT) != 0; BKE_image_user_frame_calc(imageuser, context.getFramenumber(), 0); /* force a load, we assume iuser index will be set OK anyway */ if (image && image->type == IMA_TYPE_MULTILAYER) { bool is_multilayer_ok = false; ImBuf *ibuf = BKE_image_acquire_ibuf(image, imageuser, NULL); if (image->rr) { RenderLayer *rl = (RenderLayer *)BLI_findlink(&image->rr->layers, imageuser->layer); if (rl) { NodeOutput *socket; int index; is_multilayer_ok = true; for (index = 0; index < numberOfOutputs; index++) { NodeOperation *operation = NULL; socket = this->getOutputSocket(index); bNodeSocket *bnodeSocket = socket->getbNodeSocket(); RenderPass *rpass = (RenderPass *)BLI_findstring(&rl->passes, bnodeSocket->identifier, offsetof(RenderPass, internal_name)); int view = 0; /* Passes in the file can differ from passes stored in sockets (#36755). * Look up the correct file pass using the socket identifier instead. */ #if 0 NodeImageLayer *storage = (NodeImageLayer *)bnodeSocket->storage;*/ int passindex = storage->pass_index;*/ RenderPass *rpass = (RenderPass *)BLI_findlink(&rl->passes, passindex); #endif /* returns the image view to use for the current active view */ if (BLI_listbase_count_ex(&image->rr->views, 2) > 1) { const int view_image = imageuser->view; const bool is_allview = (view_image == 0); /* if view selected == All (0) */ if (is_allview) { /* heuristic to match image name with scene names * check if the view name exists in the image */ view = BLI_findstringindex(&image->rr->views, context.getViewName(), offsetof(RenderView, name)); if (view == -1) view = 0; } else { view = view_image - 1; } } if (rpass) { switch (rpass->channels) { case 1: operation = doMultilayerCheck(converter, rl, image, imageuser, framenumber, index, rpass->passtype, view, COM_DT_VALUE); break; /* using image operations for both 3 and 4 channels (RGB and RGBA respectively) */ /* XXX any way to detect actual vector images? */ case 3: operation = doMultilayerCheck(converter, rl, image, imageuser, framenumber, index, rpass->passtype, view, COM_DT_VECTOR); break; case 4: operation = doMultilayerCheck(converter, rl, image, imageuser, framenumber, index, rpass->passtype, view, COM_DT_COLOR); break; default: /* dummy operation is added below */ break; } if (index == 0 && operation) { converter.addPreview(operation->getOutputSocket()); } if (rpass->passtype == SCE_PASS_COMBINED) { BLI_assert(operation != NULL); BLI_assert(index < numberOfOutputs - 1); NodeOutput *outputSocket = this->getOutputSocket(index + 1); SeparateChannelOperation *separate_operation; separate_operation = new SeparateChannelOperation(); separate_operation->setChannel(3); converter.addOperation(separate_operation); converter.addLink(operation->getOutputSocket(), separate_operation->getInputSocket(0)); converter.mapOutputSocket(outputSocket, separate_operation->getOutputSocket()); index++; } } /* incase we can't load the layer */ if (operation == NULL) converter.setInvalidOutput(getOutputSocket(index)); } } }
void OutputFileNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { NodeImageMultiFile *storage = (NodeImageMultiFile *)this->getbNode()->storage; if (!context.isRendering()) { /* only output files when rendering a sequence - * otherwise, it overwrites the output files just * scrubbing through the timeline when the compositor updates. */ return; } if (storage->format.imtype == R_IMF_IMTYPE_MULTILAYER) { /* single output operation for the multilayer file */ OutputOpenExrMultiLayerOperation *outputOperation = new OutputOpenExrMultiLayerOperation( context.getRenderData(), context.getbNodeTree(), storage->base_path, storage->format.exr_codec); converter.addOperation(outputOperation); int num_inputs = getNumberOfInputSockets(); bool previewAdded = false; for (int i = 0; i < num_inputs; ++i) { NodeInput *input = getInputSocket(i); NodeImageMultiFileSocket *sockdata = (NodeImageMultiFileSocket *)input->getbNodeSocket()->storage; /* note: layer becomes an empty placeholder if the input is not linked */ outputOperation->add_layer(sockdata->layer, input->getDataType(), input->isLinked()); converter.mapInputSocket(input, outputOperation->getInputSocket(i)); if (!previewAdded) { converter.addNodeInputPreview(input); previewAdded = true; } } } else { /* single layer format */ int num_inputs = getNumberOfInputSockets(); bool previewAdded = false; for (int i = 0; i < num_inputs; ++i) { NodeInput *input = getInputSocket(i); if (input->isLinked()) { NodeImageMultiFileSocket *sockdata = (NodeImageMultiFileSocket *)input->getbNodeSocket()->storage; ImageFormatData *format = (sockdata->use_node_format ? &storage->format : &sockdata->format); char path[FILE_MAX]; /* combine file path for the input */ BLI_join_dirfile(path, FILE_MAX, storage->base_path, sockdata->path); OutputSingleLayerOperation *outputOperation = new OutputSingleLayerOperation( context.getRenderData(), context.getbNodeTree(), input->getDataType(), format, path, context.getViewSettings(), context.getDisplaySettings()); converter.addOperation(outputOperation); converter.mapInputSocket(input, outputOperation->getInputSocket(0)); if (!previewAdded) { converter.addNodeInputPreview(input); previewAdded = true; } } } } }
void MovieClipNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { NodeOutput *outputMovieClip = this->getOutputSocket(0); NodeOutput *alphaMovieClip = this->getOutputSocket(1); NodeOutput *offsetXMovieClip = this->getOutputSocket(2); NodeOutput *offsetYMovieClip = this->getOutputSocket(3); NodeOutput *scaleMovieClip = this->getOutputSocket(4); NodeOutput *angleMovieClip = this->getOutputSocket(5); bNode *editorNode = this->getbNode(); MovieClip *movieClip = (MovieClip *)editorNode->id; MovieClipUser *movieClipUser = (MovieClipUser *)editorNode->storage; bool cacheFrame = !context.isRendering(); ImBuf *ibuf = NULL; if (movieClip) { if (cacheFrame) ibuf = BKE_movieclip_get_ibuf(movieClip, movieClipUser); else ibuf = BKE_movieclip_get_ibuf_flag(movieClip, movieClipUser, movieClip->flag, MOVIECLIP_CACHE_SKIP); } // always connect the output image MovieClipOperation *operation = new MovieClipOperation(); operation->setMovieClip(movieClip); operation->setMovieClipUser(movieClipUser); operation->setFramenumber(context.getFramenumber()); operation->setCacheFrame(cacheFrame); converter.addOperation(operation); converter.mapOutputSocket(outputMovieClip, operation->getOutputSocket()); converter.addPreview(operation->getOutputSocket()); MovieClipAlphaOperation *alphaOperation = new MovieClipAlphaOperation(); alphaOperation->setMovieClip(movieClip); alphaOperation->setMovieClipUser(movieClipUser); alphaOperation->setFramenumber(context.getFramenumber()); alphaOperation->setCacheFrame(cacheFrame); converter.addOperation(alphaOperation); converter.mapOutputSocket(alphaMovieClip, alphaOperation->getOutputSocket()); MovieTrackingStabilization *stab = &movieClip->tracking.stabilization; float loc[2], scale, angle; loc[0] = 0.0f; loc[1] = 0.0f; scale = 1.0f; angle = 0.0f; if (ibuf) { if (stab->flag & TRACKING_2D_STABILIZATION) { int clip_framenr = BKE_movieclip_remap_scene_to_clip_frame(movieClip, context.getFramenumber()); BKE_tracking_stabilization_data_get(&movieClip->tracking, clip_framenr, ibuf->x, ibuf->y, loc, &scale, &angle); } } converter.addOutputValue(offsetXMovieClip, loc[0]); converter.addOutputValue(offsetYMovieClip, loc[1]); converter.addOutputValue(scaleMovieClip, scale); converter.addOutputValue(angleMovieClip, angle); if (ibuf) { IMB_freeImBuf(ibuf); } }
void DilateErodeNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { bNode *editorNode = this->getbNode(); if (editorNode->custom1 == CMP_NODE_DILATEERODE_DISTANCE_THRESH) { DilateErodeThresholdOperation *operation = new DilateErodeThresholdOperation(); operation->setDistance(editorNode->custom2); operation->setInset(editorNode->custom3); converter.addOperation(operation); converter.mapInputSocket(getInputSocket(0), operation->getInputSocket(0)); if (editorNode->custom3 < 2.0f) { AntiAliasOperation *antiAlias = new AntiAliasOperation(); converter.addOperation(antiAlias); converter.addLink(operation->getOutputSocket(), antiAlias->getInputSocket(0)); converter.mapOutputSocket(getOutputSocket(0), antiAlias->getOutputSocket(0)); } else { converter.mapOutputSocket(getOutputSocket(0), operation->getOutputSocket(0)); } } else if (editorNode->custom1 == CMP_NODE_DILATEERODE_DISTANCE) { if (editorNode->custom2 > 0) { DilateDistanceOperation *operation = new DilateDistanceOperation(); operation->setDistance(editorNode->custom2); converter.addOperation(operation); converter.mapInputSocket(getInputSocket(0), operation->getInputSocket(0)); converter.mapOutputSocket(getOutputSocket(0), operation->getOutputSocket(0)); } else { ErodeDistanceOperation *operation = new ErodeDistanceOperation(); operation->setDistance(-editorNode->custom2); converter.addOperation(operation); converter.mapInputSocket(getInputSocket(0), operation->getInputSocket(0)); converter.mapOutputSocket(getOutputSocket(0), operation->getOutputSocket(0)); } } else if (editorNode->custom1 == CMP_NODE_DILATEERODE_DISTANCE_FEATHER) { /* this uses a modified gaussian blur function otherwise its far too slow */ CompositorQuality quality = context.getQuality(); GaussianAlphaXBlurOperation *operationx = new GaussianAlphaXBlurOperation(); operationx->setData(&m_alpha_blur); operationx->setQuality(quality); operationx->setFalloff(PROP_SMOOTH); converter.addOperation(operationx); converter.mapInputSocket(getInputSocket(0), operationx->getInputSocket(0)); // converter.mapInputSocket(getInputSocket(1), operationx->getInputSocket(1)); // no size input // yet GaussianAlphaYBlurOperation *operationy = new GaussianAlphaYBlurOperation(); operationy->setData(&m_alpha_blur); operationy->setQuality(quality); operationy->setFalloff(PROP_SMOOTH); converter.addOperation(operationy); converter.addLink(operationx->getOutputSocket(), operationy->getInputSocket(0)); // converter.mapInputSocket(getInputSocket(1), operationy->getInputSocket(1)); // no size input // yet converter.mapOutputSocket(getOutputSocket(0), operationy->getOutputSocket()); converter.addPreview(operationy->getOutputSocket()); /* TODO? */ /* see gaussian blue node for original usage */ #if 0 if (!connectedSizeSocket) { operationx->setSize(size); operationy->setSize(size); } #else operationx->setSize(1.0f); operationy->setSize(1.0f); #endif operationx->setSubtract(editorNode->custom2 < 0); operationy->setSubtract(editorNode->custom2 < 0); if (editorNode->storage) { NodeDilateErode *data_storage = (NodeDilateErode *)editorNode->storage; operationx->setFalloff(data_storage->falloff); operationy->setFalloff(data_storage->falloff); } } else { if (editorNode->custom2 > 0) { DilateStepOperation *operation = new DilateStepOperation(); operation->setIterations(editorNode->custom2); converter.addOperation(operation); converter.mapInputSocket(getInputSocket(0), operation->getInputSocket(0)); converter.mapOutputSocket(getOutputSocket(0), operation->getOutputSocket(0)); } else { ErodeStepOperation *operation = new ErodeStepOperation(); operation->setIterations(-editorNode->custom2); converter.addOperation(operation); converter.mapInputSocket(getInputSocket(0), operation->getInputSocket(0)); converter.mapOutputSocket(getOutputSocket(0), operation->getOutputSocket(0)); } } }
void ZCombineNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { if ((context.getRenderData()->scemode & R_FULL_SAMPLE) || this->getbNode()->custom2) { ZCombineOperation *operation = NULL; if (this->getbNode()->custom1) { operation = new ZCombineAlphaOperation(); } else { operation = new ZCombineOperation(); } converter.addOperation(operation); converter.mapInputSocket(getInputSocket(0), operation->getInputSocket(0)); converter.mapInputSocket(getInputSocket(1), operation->getInputSocket(1)); converter.mapInputSocket(getInputSocket(2), operation->getInputSocket(2)); converter.mapInputSocket(getInputSocket(3), operation->getInputSocket(3)); converter.mapOutputSocket(getOutputSocket(0), operation->getOutputSocket()); MathMinimumOperation *zoperation = new MathMinimumOperation(); converter.addOperation(zoperation); converter.mapInputSocket(getInputSocket(1), zoperation->getInputSocket(0)); converter.mapInputSocket(getInputSocket(3), zoperation->getInputSocket(1)); converter.mapOutputSocket(getOutputSocket(1), zoperation->getOutputSocket()); } else { /* XXX custom1 is "use_alpha", what on earth is this supposed to do here?!? */ // not full anti alias, use masking for Z combine. be aware it uses anti aliasing. // step 1 create mask NodeOperation *maskoperation; if (this->getbNode()->custom1) { maskoperation = new MathGreaterThanOperation(); converter.addOperation(maskoperation); converter.mapInputSocket(getInputSocket(1), maskoperation->getInputSocket(0)); converter.mapInputSocket(getInputSocket(3), maskoperation->getInputSocket(1)); } else { maskoperation = new MathLessThanOperation(); converter.addOperation(maskoperation); converter.mapInputSocket(getInputSocket(1), maskoperation->getInputSocket(0)); converter.mapInputSocket(getInputSocket(3), maskoperation->getInputSocket(1)); } // step 2 anti alias mask bit of an expensive operation, but does the trick AntiAliasOperation *antialiasoperation = new AntiAliasOperation(); converter.addOperation(antialiasoperation); converter.addLink(maskoperation->getOutputSocket(), antialiasoperation->getInputSocket(0)); // use mask to blend between the input colors. ZCombineMaskOperation *zcombineoperation = this->getbNode()->custom1 ? new ZCombineMaskAlphaOperation() : new ZCombineMaskOperation(); converter.addOperation(zcombineoperation); converter.addLink(antialiasoperation->getOutputSocket(), zcombineoperation->getInputSocket(0)); converter.mapInputSocket(getInputSocket(0), zcombineoperation->getInputSocket(1)); converter.mapInputSocket(getInputSocket(2), zcombineoperation->getInputSocket(2)); converter.mapOutputSocket(getOutputSocket(0), zcombineoperation->getOutputSocket()); MathMinimumOperation *zoperation = new MathMinimumOperation(); converter.addOperation(zoperation); converter.mapInputSocket(getInputSocket(1), zoperation->getInputSocket(0)); converter.mapInputSocket(getInputSocket(3), zoperation->getInputSocket(1)); converter.mapOutputSocket(getOutputSocket(1), zoperation->getOutputSocket()); } }
void ImageNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { /// Image output NodeOutput *outputImage = this->getOutputSocket(0); bNode *editorNode = this->getbNode(); Image *image = (Image *)editorNode->id; ImageUser *imageuser = (ImageUser *)editorNode->storage; int framenumber = context.getFramenumber(); int numberOfOutputs = this->getNumberOfOutputSockets(); bool outputStraightAlpha = (editorNode->custom1 & CMP_NODE_IMAGE_USE_STRAIGHT_OUTPUT) != 0; BKE_image_user_frame_calc(imageuser, context.getFramenumber(), 0); /* force a load, we assume iuser index will be set OK anyway */ if (image && image->type == IMA_TYPE_MULTILAYER) { bool is_multilayer_ok = false; ImBuf *ibuf = BKE_image_acquire_ibuf(image, imageuser, NULL); if (image->rr) { RenderLayer *rl = (RenderLayer *)BLI_findlink(&image->rr->layers, imageuser->layer); if (rl) { NodeOutput *socket; int index; is_multilayer_ok = true; for (index = 0; index < numberOfOutputs; index++) { NodeOperation *operation = NULL; socket = this->getOutputSocket(index); bNodeSocket *bnodeSocket = socket->getbNodeSocket(); /* Passes in the file can differ from passes stored in sockets (#36755). * Look up the correct file pass using the socket identifier instead. */ #if 0 NodeImageLayer *storage = (NodeImageLayer *)bnodeSocket->storage;*/ int passindex = storage->pass_index;*/ RenderPass *rpass = (RenderPass *)BLI_findlink(&rl->passes, passindex); #endif int passindex; RenderPass *rpass; for (rpass = (RenderPass *)rl->passes.first, passindex = 0; rpass; rpass = rpass->next, ++passindex) if (STREQ(rpass->name, bnodeSocket->identifier)) break; if (rpass) { imageuser->pass = passindex; switch (rpass->channels) { case 1: operation = doMultilayerCheck(converter, rl, image, imageuser, framenumber, index, passindex, COM_DT_VALUE); break; /* using image operations for both 3 and 4 channels (RGB and RGBA respectively) */ /* XXX any way to detect actual vector images? */ case 3: operation = doMultilayerCheck(converter, rl, image, imageuser, framenumber, index, passindex, COM_DT_VECTOR); break; case 4: operation = doMultilayerCheck(converter, rl, image, imageuser, framenumber, index, passindex, COM_DT_COLOR); break; default: /* dummy operation is added below */ break; } if (index == 0 && operation) { converter.addPreview(operation->getOutputSocket()); } } /* incase we can't load the layer */ if (operation == NULL) converter.setInvalidOutput(getOutputSocket(index)); } } }
void TrackPositionNode::convertToOperations(NodeConverter &converter, const CompositorContext &context) const { bNode *editorNode = this->getbNode(); MovieClip *clip = (MovieClip *) editorNode->id; NodeTrackPosData *trackpos_data = (NodeTrackPosData *) editorNode->storage; NodeOutput *outputX = this->getOutputSocket(0); NodeOutput *outputY = this->getOutputSocket(1); NodeOutput *outputSpeed = this->getOutputSocket(2); int frame_number; if (editorNode->custom1 == CMP_TRACKPOS_ABSOLUTE_FRAME) { frame_number = editorNode->custom2; } else { frame_number = context.getFramenumber(); } TrackPositionOperation *operationX = new TrackPositionOperation(); operationX->setMovieClip(clip); operationX->setTrackingObject(trackpos_data->tracking_object); operationX->setTrackName(trackpos_data->track_name); operationX->setFramenumber(frame_number); operationX->setAxis(0); operationX->setPosition(editorNode->custom1); operationX->setRelativeFrame(editorNode->custom2); converter.addOperation(operationX); converter.mapOutputSocket(outputX, operationX->getOutputSocket()); TrackPositionOperation *operationY = new TrackPositionOperation(); operationY->setMovieClip(clip); operationY->setTrackingObject(trackpos_data->tracking_object); operationY->setTrackName(trackpos_data->track_name); operationY->setFramenumber(frame_number); operationY->setAxis(1); operationY->setPosition(editorNode->custom1); operationY->setRelativeFrame(editorNode->custom2); converter.addOperation(operationY); converter.mapOutputSocket(outputY, operationY->getOutputSocket()); TrackPositionOperation *operationMotionPreX = create_motion_operation(converter, clip, trackpos_data, 0, frame_number, -1); TrackPositionOperation *operationMotionPreY = create_motion_operation(converter, clip, trackpos_data, 1, frame_number, -1); TrackPositionOperation *operationMotionPostX = create_motion_operation(converter, clip, trackpos_data, 0, frame_number, 1); TrackPositionOperation *operationMotionPostY = create_motion_operation(converter, clip, trackpos_data, 1, frame_number, 1); CombineChannelsOperation *combine_operation = new CombineChannelsOperation(); converter.addOperation(combine_operation); converter.addLink(operationMotionPreX->getOutputSocket(), combine_operation->getInputSocket(0)); converter.addLink(operationMotionPreY->getOutputSocket(), combine_operation->getInputSocket(1)); converter.addLink(operationMotionPostX->getOutputSocket(), combine_operation->getInputSocket(2)); converter.addLink(operationMotionPostY->getOutputSocket(), combine_operation->getInputSocket(3)); converter.mapOutputSocket(outputSpeed, combine_operation->getOutputSocket()); }