static void VS_CC selectEveryCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { SelectEveryData d; SelectEveryData *data; int i; d.cycle = int64ToIntS(vsapi->propGetInt(in, "cycle", 0, 0)); if (d.cycle <= 1) RETERROR("SelectEvery: invalid cycle size"); d.num = vsapi->propNumElements(in, "offsets"); d.offsets = malloc(sizeof(d.offsets[0]) * d.num); for (i = 0; i < d.num; i++) { d.offsets[i] = int64ToIntS(vsapi->propGetInt(in, "offsets", i, 0)); if (d.offsets[i] < 0 || d.offsets[i] >= d.cycle) { free(d.offsets); RETERROR("SelectEvery: invalid offset specified"); } } d.node = vsapi->propGetNode(in, "clip", 0, 0); data = malloc(sizeof(d)); *data = d; vsapi->createFilter(in, out, "SelectEvery", selectEveryInit, selectEveryGetframe, selectEveryFree, fmParallel, nfNoCache, data, core); }
static void VS_CC loopCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { LoopData d; LoopData *data; int err; d.times = int64ToIntS(vsapi->propGetInt(in, "times", 0, &err)); d.node = vsapi->propGetNode(in, "clip", 0, 0); d.vi = vsapi->getVideoInfo(d.node); if (!d.vi->numFrames) { vsapi->freeNode(d.node); RETERROR("Loop: cannot loop clips with unknown length"); } // early termination for the trivial case if (d.times == 1) { vsapi->propSetNode(out, "clip", d.node, 0); vsapi->freeNode(d.node); return; } data = malloc(sizeof(d)); *data = d; vsapi->createFilter(in, out, "Loop", loopInit, loopGetframe, singleClipFree, fmParallel, nfNoCache, data, core); }
static void VS_CC minsrpCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { MinSRPData d; MinSRPData *data = nullptr; d.node = vsapi->propGetNode(in, "clip", 0, 0); d.vi = vsapi->getVideoInfo(d.node); const int m = vsapi->propNumElements(in, "mode"); const int n = vsapi->propNumElements(in, "str"); if (m > d.vi->format->numPlanes) { vsapi->setError(out, "MinSRP: number of modes specified must be equal to or fewer than the number of input planes"); vsapi->freeNode(d.node); return; } if (n > d.vi->format->numPlanes) { vsapi->setError(out, "MinSRP: number of the specified elements in str array must be equal to or fewer than the number of input planes"); vsapi->freeNode(d.node); return; } for (int i = 0; i < 3; ++i) { if (m == -1) d.mode[0] = d.mode[1] = d.mode[2] = 3; else if (i < m) { d.mode[i] = int64ToIntS(vsapi->propGetInt(in, "mode", i, nullptr)); if (d.mode[i] < 0 || d.mode[i] > 3) { vsapi->setError(out, "MinSRP: invalid mode specified, only modes 0-3 supported"); vsapi->freeNode(d.node); return; } } else d.mode[i] = d.mode[i - 1]; if (n == -1) d.str[0] = d.str[1] = d.str[2] = 1.; else if (i < n) d.str[i] = vsapi->propGetFloat(in, "str", i, nullptr); else d.str[i] = d.str[i - 1]; if (d.vi->format->subSamplingH || d.vi->format->subSamplingW) { vsapi->setError(out, "MinSRP: 4:4:4 or gray input required!"); vsapi->freeNode(d.node); return; } } if (!isConstantFormat(d.vi)) { vsapi->setError(out, "MinSRP: only input with constant format supported"); vsapi->freeNode(d.node); return; } data = new MinSRPData; *data = d; vsapi->createFilter(in, out, "MinSRP", minsrpInit, minsrpGetFrame, minsrpFree, fmParallel, 0, data, core); }
static void VS_CC cacheInit(VSMap *in, VSMap *out, void **instanceData, VSNode *node, VSCore *core, const VSAPI *vsapi) { VSNodeRef *video = vsapi->propGetNode(in, "clip", 0, nullptr); int err; bool fixed = !!vsapi->propGetInt(in, "fixed", 0, &err); CacheInstance *c = new CacheInstance(video, node, core, fixed); int size = int64ToIntS(vsapi->propGetInt(in, "size", 0, &err)); if (!err && size > 0) c->cache.setMaxFrames(size); *instanceData = c; vsapi->setVideoInfo(vsapi->getVideoInfo(video), 1, node); c->addCache(); }
void VS_CC removeGrainCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { RemoveGrainData d; d.node = vsapi->propGetNode(in, "clip", 0, nullptr); d.vi = vsapi->getVideoInfo(d.node); if (!d.vi->format) { vsapi->freeNode(d.node); vsapi->setError(out, "RemoveGrain: Only constant format input supported"); return; } int n = d.vi->format->numPlanes; int m = vsapi->propNumElements(in, "mode"); if (n < m) { vsapi->freeNode(d.node); vsapi->setError(out, "RemoveGrain: Number of modes specified must be equal or fewer than the number of input planes"); return; } for (int i = 0; i < 3; i++) { if (i < m) { d.mode[i] = int64ToIntS(vsapi->propGetInt(in, "mode", i, nullptr)); if (d.mode[i] < 0 || d.mode[i] > 24) { vsapi->freeNode(d.node); vsapi->setError(out, "RemoveGrain: Invalid mode specified, only modes 0-24 supported"); return; } } else { d.mode[i] = d.mode[i - 1]; } } RemoveGrainData *data = new RemoveGrainData(d); vsapi->createFilter(in, out, "RemoveGrain", removeGrainInit, removeGrainGetFrame, removeGrainFree, fmParallel, 0, data, core); }
static void VS_CC mvsuperCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { MVSuperData d; MVSuperData *data; int err; d.nHPad = int64ToIntS(vsapi->propGetInt(in, "hpad", 0, &err)); if (err) d.nHPad = 8; d.nVPad = int64ToIntS(vsapi->propGetInt(in, "vpad", 0, &err)); if (err) d.nVPad = 8; d.nPel = int64ToIntS(vsapi->propGetInt(in, "pel", 0, &err)); if (err) d.nPel = 2; d.nLevels = int64ToIntS(vsapi->propGetInt(in, "levels", 0, &err)); d.chroma = !!vsapi->propGetInt(in, "chroma", 0, &err); if (err) d.chroma = 1; d.sharp = int64ToIntS(vsapi->propGetInt(in, "sharp", 0, &err)); if (err) d.sharp = 2; d.rfilter = int64ToIntS(vsapi->propGetInt(in, "rfilter", 0, &err)); if (err) d.rfilter = 2; if ((d.nPel != 1) && (d.nPel != 2) && (d.nPel != 4)) { vsapi->setError(out, "Super: pel must be 1, 2, or 4."); return; } if (d.sharp < 0 || d.sharp > 2) { vsapi->setError(out, "Super: sharp must be between 0 and 2 (inclusive)."); return; } if (d.rfilter < 0 || d.rfilter > 4) { vsapi->setError(out, "Super: rfilter must be between 0 and 4 (inclusive)."); return; } d.node = vsapi->propGetNode(in, "clip", 0, 0); d.vi = *vsapi->getVideoInfo(d.node); d.nWidth = d.vi.width; d.nHeight = d.vi.height; if (!isConstantFormat(&d.vi) || d.vi.format->bitsPerSample < 32 || d.vi.format->sampleType != stFloat) { vsapi->setError(out, "Super: input clip must be single precision fp, with constant dimensions."); vsapi->freeNode(d.node); return; } if (d.vi.format->colorFamily == cmGray) d.chroma = 0; if (d.vi.format->colorFamily == cmRGB) d.chroma = 1; d.nModeYUV = d.chroma ? YUVPLANES : YPLANE; d.xRatioUV = 1 << d.vi.format->subSamplingW; d.yRatioUV = 1 << d.vi.format->subSamplingH; int32_t nLevelsMax = 0; while (PlaneHeightLuma(d.vi.height, nLevelsMax, d.yRatioUV, d.nVPad) >= d.yRatioUV * 2 && PlaneWidthLuma(d.vi.width, nLevelsMax, d.xRatioUV, d.nHPad) >= d.xRatioUV * 2) { ++nLevelsMax; } if (d.nLevels <= 0 || d.nLevels > nLevelsMax) d.nLevels = nLevelsMax; d.pelclip = vsapi->propGetNode(in, "pelclip", 0, &err); const VSVideoInfo *pelvi = d.pelclip ? vsapi->getVideoInfo(d.pelclip) : nullptr; if (d.pelclip && (!isConstantFormat(pelvi) || pelvi->format != d.vi.format)) { vsapi->setError(out, "Super: pelclip must have the same format as the input clip, and it must have constant dimensions."); vsapi->freeNode(d.node); vsapi->freeNode(d.pelclip); return; } d.usePelClip = false; if (d.pelclip && (d.nPel >= 2)) { if ((pelvi->width == d.vi.width * d.nPel) && (pelvi->height == d.vi.height * d.nPel)) { d.usePelClip = true; d.isPelClipPadded = false; } else if ((pelvi->width == (d.vi.width + d.nHPad * 2) * d.nPel) && (pelvi->height == (d.vi.height + d.nVPad * 2) * d.nPel)) { d.usePelClip = true; d.isPelClipPadded = true; } else { vsapi->setError(out, "Super: pelclip's dimensions must be multiples of the input clip's dimensions."); vsapi->freeNode(d.pelclip); vsapi->freeNode(d.node); return; } } d.nSuperWidth = d.nWidth + 2 * d.nHPad; d.nSuperHeight = PlaneSuperOffset(false, d.nHeight, d.nLevels, d.nPel, d.nVPad, d.nSuperWidth, d.yRatioUV) / d.nSuperWidth; if (d.yRatioUV == 2 && d.nSuperHeight & 1) ++d.nSuperHeight; if (d.xRatioUV == 2 && d.nSuperWidth & 1) ++d.nSuperWidth; d.vi.width = d.nSuperWidth; d.vi.height = d.nSuperHeight; data = new MVSuperData; *data = d; vsapi->createFilter(in, out, "Super", mvsuperInit, mvsuperGetFrame, mvsuperFree, fmParallel, 0, data, core); }
static void VS_CC textCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { TextData d; TextData *data; int err; d.node = vsapi->propGetNode(in, "clip", 0, &err); if (err) { // Can only happen for CoreInfo. VSMap *args = vsapi->createMap(); VSPlugin *stdPlugin = vsapi->getPluginById("com.vapoursynth.std", core); VSMap *ret = vsapi->invoke(stdPlugin, "BlankClip", args); vsapi->freeMap(args); const char *error = vsapi->getError(ret); if (error) { std::string msg = "CoreInfo: No input clip was given and invoking BlankClip failed. The error message from BlankClip is:\n"; msg += error; vsapi->setError(out, msg.c_str()); vsapi->freeMap(ret); return; } d.node = vsapi->propGetNode(ret, "clip", 0, nullptr); vsapi->freeMap(ret); } d.vi = vsapi->getVideoInfo(d.node); if (isCompatFormat(d.vi)) { vsapi->setError(out, "Text: Compat formats not supported"); vsapi->freeNode(d.node); return; } if (d.vi->format && ((d.vi->format->sampleType == stInteger && d.vi->format->bitsPerSample > 16) || (d.vi->format->sampleType == stFloat && d.vi->format->bitsPerSample != 32))) { vsapi->setError(out, "Text: Only 8-16 bit integer and 32 bit float formats supported"); vsapi->freeNode(d.node); return; } d.alignment = int64ToIntS(vsapi->propGetInt(in, "alignment", 0, &err)); if (err) { d.alignment = 7; // top left } if (d.alignment < 1 || d.alignment > 9) { vsapi->setError(out, "Text: alignment must be between 1 and 9 (think numpad)"); vsapi->freeNode(d.node); return; } d.filter = reinterpret_cast<intptr_t>(userData); switch (d.filter) { case FILTER_TEXT: d.text = vsapi->propGetData(in, "text", 0, nullptr); d.instanceName = "Text"; break; case FILTER_CLIPINFO: d.instanceName = "ClipInfo"; break; case FILTER_COREINFO: { d.instanceName = "CoreInfo"; break; } case FILTER_FRAMENUM: d.instanceName = "FrameNum"; break; case FILTER_FRAMEPROPS: int numProps = vsapi->propNumElements(in, "props"); for (int i = 0; i < numProps; i++) { d.props.push_back(vsapi->propGetData(in, "props", i, nullptr)); } d.instanceName = "FrameProps"; break; } data = new TextData(d); vsapi->createFilter(in, out, d.instanceName.c_str(), textInit, textGetFrame, textFree, fmParallel, 0, data, core); }
static const VSFrameRef *VS_CC textGetFrame(int n, int activationReason, void **instanceData, void **frameData, VSFrameContext *frameCtx, VSCore *core, const VSAPI *vsapi) { TextData *d = static_cast<TextData *>(*instanceData); if (activationReason == arInitial) { vsapi->requestFrameFilter(n, d->node, frameCtx); } else if (activationReason == arAllFramesReady) { const VSFrameRef *src = vsapi->getFrameFilter(n, d->node, frameCtx); const VSFormat *frame_format = vsapi->getFrameFormat(src); if ((frame_format->sampleType == stInteger && frame_format->bitsPerSample > 16) || (frame_format->sampleType == stFloat && frame_format->bitsPerSample != 32)) { vsapi->freeFrame(src); vsapi->setFilterError((d->instanceName + ": Only 8..16 bit integer and 32 bit float formats supported").c_str(), frameCtx); return nullptr; } VSFrameRef *dst = vsapi->copyFrame(src, core); if (d->filter == FILTER_FRAMENUM) { scrawl_text(std::to_string(n), d->alignment, dst, vsapi); } else if (d->filter == FILTER_FRAMEPROPS) { const VSMap *props = vsapi->getFramePropsRO(dst); int numKeys = vsapi->propNumKeys(props); int i; std::string text = "Frame properties:\n"; if (!d->props.empty()) { for (const auto &iter : d->props) { append_prop(text, iter, props, vsapi); } } else { for (i = 0; i < numKeys; i++) { const char *key = vsapi->propGetKey(props, i); append_prop(text, key, props, vsapi); } } scrawl_text(text, d->alignment, dst, vsapi); } else if (d->filter == FILTER_COREINFO) { const VSCoreInfo *ci = vsapi->getCoreInfo(core); std::string text; text.append(ci->versionString).append("\n"); text.append("Threads: ").append(std::to_string(ci->numThreads)).append("\n"); text.append("Maximum framebuffer cache size: ").append(std::to_string(ci->maxFramebufferSize)).append(" bytes\n"); text.append("Used framebuffer cache size: ").append(std::to_string(ci->usedFramebufferSize)).append(" bytes"); scrawl_text(text, d->alignment, dst, vsapi); } else if (d->filter == FILTER_CLIPINFO) { const VSMap *props = vsapi->getFramePropsRO(src); std::string text = "Clip info:\n"; if (d->vi->width) { text += "Width: " + std::to_string(vsapi->getFrameWidth(dst, 0)) + " px\n"; text += "Height: " + std::to_string(vsapi->getFrameHeight(dst, 0)) + " px\n"; } else { text += "Width: " + std::to_string(vsapi->getFrameWidth(dst, 0)) + " px (may vary)\n"; text += "Height: " + std::to_string(vsapi->getFrameHeight(dst, 0)) + " px (may vary)\n"; } int snerr, sderr; int sn = int64ToIntS(vsapi->propGetInt(props, "_SARNum", 0, &snerr)); int sd = int64ToIntS(vsapi->propGetInt(props, "_SARDen", 0, &sderr)); if (snerr || sderr) text += "Aspect ratio: Unknown\n"; else text += "Sample aspect ratio: " + std::to_string(sn) + ":" + std::to_string(sd) + "\n"; text += "Length: " + std::to_string(d->vi->numFrames) + " frames\n"; text += "Format name: " + std::string(frame_format->name) + (d->vi->format ? "\n" : " (may vary)\n"); text += "Color family: " + colorFamilyToString(frame_format->colorFamily) + "\n"; text += "Sample type: " + std::string(frame_format->sampleType == stInteger ? "Integer" : "Float") + "\n"; text += "Bits per sample: " + std::to_string(frame_format->bitsPerSample) + "\n"; text += "Subsampling Height/Width: " + std::to_string(1 << frame_format->subSamplingH) + "x/" + std::to_string(1 << frame_format->subSamplingW) + "x\n"; int err; int matrix = int64ToIntS(vsapi->propGetInt(props, "_Matrix", 0, &err)); if (err) matrix = -1; int primaries = int64ToIntS(vsapi->propGetInt(props, "_Primaries", 0, &err)); if (err) primaries = -1; int transfer = int64ToIntS(vsapi->propGetInt(props, "_Transfer", 0, &err)); if (err) transfer = -1; int range = int64ToIntS(vsapi->propGetInt(props, "_ColorRange", 0, &err)); if (err) range = -1; int location = int64ToIntS(vsapi->propGetInt(props, "_ChromaLocation", 0, &err)); if (err) location = -1; int field = int64ToIntS(vsapi->propGetInt(props, "_FieldBased", 0, &err)); if (err) field = -1; const char *picttype = vsapi->propGetData(props, "_PictType", 0, &err); text += "Matrix: " + matrixToString(matrix) + "\n"; text += "Primaries: " + primariesToString(primaries) + "\n"; text += "Transfer: " + transferToString(transfer) + "\n"; text += "Range: " + rangeToString(range) + "\n"; text += "Chroma Location: " + chromaLocationToString(location) + "\n"; text += "Field handling: " + fieldBasedToString(field) + "\n"; text += "Picture type: " + std::string(picttype ? picttype : "Unknown") + "\n"; if (d->vi->fpsNum && d->vi->fpsDen) { text += "Fps: " + std::to_string(d->vi->fpsNum) + "/" + std::to_string(d->vi->fpsDen) + " (" + std::to_string(static_cast<double>(d->vi->fpsNum) / d->vi->fpsDen) + ")\n"; } else { text += "Fps: Unknown\n"; } int fnerr, fderr; int fn = int64ToIntS(vsapi->propGetInt(props, "_DurationNum", 0, &fnerr)); int fd = int64ToIntS(vsapi->propGetInt(props, "_DurationDen", 0, &fderr)); if (fnerr || fderr) { text += "Frame duration: Unknown\n"; } else { text += "Frame duration: " + std::to_string(fn) + "/" + std::to_string(fd) + " (" + std::to_string(static_cast<double>(fn) / fd) + ")\n"; } scrawl_text(text, d->alignment, dst, vsapi); } else { scrawl_text(d->text, d->alignment, dst, vsapi); } vsapi->freeFrame(src); return dst; } return nullptr; }
static void VS_CC mvrecalculateCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { MVRecalculateData d; MVRecalculateData *data; int err; d.thSAD = int64ToIntS(vsapi->propGetInt(in, "thsad", 0, &err)); if (err) d.thSAD = 200; d.smooth = int64ToIntS(vsapi->propGetInt(in, "smooth", 0, &err)); if (err) d.smooth = 1; d.blksize = int64ToIntS(vsapi->propGetInt(in, "blksize", 0, &err)); if (err) d.blksize = 8; d.blksizev = int64ToIntS(vsapi->propGetInt(in, "blksizev", 0, &err)); if (err) d.blksizev = d.blksize; d.search = int64ToIntS(vsapi->propGetInt(in, "search", 0, &err)); if (err) d.search = 4; d.searchparam = int64ToIntS(vsapi->propGetInt(in, "searchparam", 0, &err)); if (err) d.searchparam = 2; d.chroma = !!vsapi->propGetInt(in, "chroma", 0, &err); if (err) d.chroma = 1; d.truemotion = !!vsapi->propGetInt(in, "truemotion", 0, &err); if (err) d.truemotion = 1; d.nLambda = int64ToIntS(vsapi->propGetInt(in, "lambda", 0, &err)); if (err) d.nLambda = d.truemotion ? (1000 * d.blksize * d.blksizev / 64) : 0; d.pnew = int64ToIntS(vsapi->propGetInt(in, "pnew", 0, &err)); if (err) d.pnew = d.truemotion ? 50 : 0; // relative to 256 d.overlap = int64ToIntS(vsapi->propGetInt(in, "overlap", 0, &err)); d.overlapv = int64ToIntS(vsapi->propGetInt(in, "overlapv", 0, &err)); if (err) d.overlapv = d.overlap; d.dctmode = int64ToIntS(vsapi->propGetInt(in, "dct", 0, &err)); d.divideExtra = int64ToIntS(vsapi->propGetInt(in, "divide", 0, &err)); d.isse = !!vsapi->propGetInt(in, "isse", 0, &err); if (err) d.isse = 1; d.meander = !!vsapi->propGetInt(in, "meander", 0, &err); if (err) d.meander = 1; d.fields = !!vsapi->propGetInt(in, "fields", 0, &err); d.tff = !!vsapi->propGetInt(in, "tff", 0, &err); d.tffexists = err; if (d.search < 0 || d.search > 7) { vsapi->setError(out, "Recalculate: search must be between 0 and 7 (inclusive)."); return; } if (d.dctmode < 0 || d.dctmode > 10) { vsapi->setError(out, "Recalculate: dct must be between 0 and 10 (inclusive)."); return; } if (d.dctmode >= 5 && !((d.blksize == 4 && d.blksizev == 4) || (d.blksize == 8 && d.blksizev == 4) || (d.blksize == 8 && d.blksizev == 8) || (d.blksize == 16 && d.blksizev == 8) || (d.blksize == 16 && d.blksizev == 16))) { vsapi->setError(out, "Recalculate: dct 5..10 can only work with 4x4, 8x4, 8x8, 16x8, and 16x16 blocks."); return; } if (d.divideExtra < 0 || d.divideExtra > 2) { vsapi->setError(out, "Recalculate: divide must be between 0 and 2 (inclusive)."); return; } d.analysisData.nBlkSizeX = d.blksize; d.analysisData.nBlkSizeY = d.blksizev; if ((d.analysisData.nBlkSizeX != 4 || d.analysisData.nBlkSizeY != 4) && (d.analysisData.nBlkSizeX != 8 || d.analysisData.nBlkSizeY != 4) && (d.analysisData.nBlkSizeX != 8 || d.analysisData.nBlkSizeY != 8) && (d.analysisData.nBlkSizeX != 16 || d.analysisData.nBlkSizeY != 2) && (d.analysisData.nBlkSizeX != 16 || d.analysisData.nBlkSizeY != 8) && (d.analysisData.nBlkSizeX != 16 || d.analysisData.nBlkSizeY != 16) && (d.analysisData.nBlkSizeX != 32 || d.analysisData.nBlkSizeY != 32) && (d.analysisData.nBlkSizeX != 32 || d.analysisData.nBlkSizeY != 16)) { vsapi->setError(out, "Recalculate: the block size must be 4x4, 8x4, 8x8, 16x2, 16x8, 16x16, 32x16, or 32x32."); return; } if (d.overlap < 0 || d.overlap > d.blksize / 2 || d.overlapv < 0 || d.overlapv > d.blksizev / 2) { vsapi->setError(out, "Recalculate: overlap must be at most half of blksize, overlapv must be at most half of blksizev, and they both need to be at least 0."); return; } if (d.divideExtra && (d.blksize < 8 && d.blksizev < 8) ) { vsapi->setError(out, "Recalculate: blksize and blksizev must be at least 8 when divide=True."); return; } d.analysisData.nOverlapX = d.overlap; d.analysisData.nOverlapY = d.overlapv; SearchType searchTypes[] = { ONETIME, NSTEP, LOGARITHMIC, EXHAUSTIVE, HEX2SEARCH, UMHSEARCH, HSEARCH, VSEARCH }; d.searchType = searchTypes[d.search]; if (d.searchType == NSTEP) d.nSearchParam = ( d.searchparam < 0 ) ? 0 : d.searchparam; else d.nSearchParam = ( d.searchparam < 1 ) ? 1 : d.searchparam; // XXX maybe get rid of these two // Bleh, they're checked by client filters. Though it's kind of pointless. d.analysisData.nMagicKey = MOTION_MAGIC_KEY; d.analysisData.nVersion = MVANALYSIS_DATA_VERSION; // MVAnalysisData and outfile format version: last update v1.8.1 d.headerSize = VSMAX(4 + sizeof(d.analysisData), 256); // include itself, but usually equal to 256 :-) d.node = vsapi->propGetNode(in, "super", 0, 0); d.supervi = vsapi->getVideoInfo(d.node); if (d.overlap % (1 << d.supervi->format->subSamplingW) || d.overlapv % (1 << d.supervi->format->subSamplingH)) { vsapi->setError(out, "Recalculate: The requested overlap is incompatible with the super clip's subsampling."); vsapi->freeNode(d.node); return; } if (d.divideExtra && (d.overlap % (2 << d.supervi->format->subSamplingW) || d.overlapv % (2 << d.supervi->format->subSamplingH))) { // subsampling times 2 vsapi->setError(out, "Recalculate: overlap and overlapv must be multiples of 2 or 4 when divide=True, depending on the super clip's subsampling."); vsapi->freeNode(d.node); return; } char errorMsg[1024]; const VSFrameRef *evil = vsapi->getFrame(0, d.node, errorMsg, 1024); if (!evil) { vsapi->setError(out, std::string("Recalculate: failed to retrieve first frame from super clip. Error message: ").append(errorMsg).c_str()); vsapi->freeNode(d.node); return; } const VSMap *props = vsapi->getFramePropsRO(evil); int evil_err[6]; int nHeight = int64ToIntS(vsapi->propGetInt(props, "Super_height", 0, &evil_err[0])); d.nSuperHPad = int64ToIntS(vsapi->propGetInt(props, "Super_hpad", 0, &evil_err[1])); d.nSuperVPad = int64ToIntS(vsapi->propGetInt(props, "Super_vpad", 0, &evil_err[2])); d.nSuperPel = int64ToIntS(vsapi->propGetInt(props, "Super_pel", 0, &evil_err[3])); d.nSuperModeYUV = int64ToIntS(vsapi->propGetInt(props, "Super_modeyuv", 0, &evil_err[4])); d.nSuperLevels = int64ToIntS(vsapi->propGetInt(props, "Super_levels", 0, &evil_err[5])); vsapi->freeFrame(evil); for (int i = 0; i < 6; i++) if (evil_err[i]) { vsapi->setError(out, "Recalculate: required properties not found in first frame of super clip. Maybe clip didn't come from mv.Super? Was the first frame trimmed away?"); vsapi->freeNode(d.node); return; } if (d.supervi->format->colorFamily == cmGray) d.chroma = 0; d.nModeYUV = d.chroma ? YUVPLANES : YPLANE; if ((d.nModeYUV & d.nSuperModeYUV) != d.nModeYUV) { //x vsapi->setError(out, "Recalculate: super clip does not contain needed colour data."); vsapi->freeNode(d.node); return; } d.vectors = vsapi->propGetNode(in, "vectors", 0, NULL); d.vi = vsapi->getVideoInfo(d.vectors); evil = vsapi->getFrame(0, d.vectors, errorMsg, 1024); if (!evil) { vsapi->setError(out, std::string("Recalculate: failed to retrieve first frame from vectors clip. Error message: ").append(errorMsg).c_str()); vsapi->freeNode(d.node); vsapi->freeNode(d.vectors); return; } // XXX This really should be passed as a frame property. const MVAnalysisData *pAnalyseFilter = reinterpret_cast<const MVAnalysisData *>(vsapi->getReadPtr(evil, 0) + sizeof(int)); d.analysisData.yRatioUV = pAnalyseFilter->GetYRatioUV(); d.analysisData.xRatioUV = pAnalyseFilter->GetXRatioUV(); d.analysisData.nWidth = pAnalyseFilter->GetWidth(); d.analysisData.nHeight = pAnalyseFilter->GetHeight(); d.analysisData.nDeltaFrame = pAnalyseFilter->GetDeltaFrame(); d.analysisData.isBackward = pAnalyseFilter->IsBackward(); vsapi->freeFrame(evil); d.analysisData.bitsPerSample = d.supervi->format->bitsPerSample; int pixelMax = (1 << d.supervi->format->bitsPerSample) - 1; d.thSAD = int((double)d.thSAD * pixelMax / 255.0 + 0.5); // normalize threshold to block size int referenceBlockSize = 8 * 8; d.thSAD = d.thSAD * (d.analysisData.nBlkSizeX * d.analysisData.nBlkSizeY) / referenceBlockSize; if (d.chroma) d.thSAD += d.thSAD / (d.analysisData.xRatioUV * d.analysisData.yRatioUV) * 2; d.analysisData.nMotionFlags = 0; d.analysisData.nMotionFlags |= d.isse ? MOTION_USE_ISSE : 0; d.analysisData.nMotionFlags |= d.analysisData.isBackward ? MOTION_IS_BACKWARD : 0; d.analysisData.nMotionFlags |= d.chroma ? MOTION_USE_CHROMA_MOTION : 0; if (d.isse) { d.analysisData.nCPUFlags = cpu_detect(); } if (d.supervi->format->bitsPerSample > 8) d.isse = 0; // needed here because MVPlane can't have isse=1 with more than 8 bits d.analysisData.nPel = d.nSuperPel;//x int nSuperWidth = d.supervi->width; if (nHeight != d.analysisData.nHeight || nSuperWidth - 2 * d.nSuperHPad != d.analysisData.nWidth) { vsapi->setError(out, "Recalculate: wrong frame size."); vsapi->freeNode(d.node); vsapi->freeNode(d.vectors); return; } d.analysisData.nHPadding = d.nSuperHPad; //v2.0 //x d.analysisData.nVPadding = d.nSuperVPad; int nBlkX = (d.analysisData.nWidth - d.analysisData.nOverlapX) / (d.analysisData.nBlkSizeX - d.analysisData.nOverlapX);//x int nBlkY = (d.analysisData.nHeight - d.analysisData.nOverlapY) / (d.analysisData.nBlkSizeY - d.analysisData.nOverlapY); d.analysisData.nBlkX = nBlkX; d.analysisData.nBlkY = nBlkY; d.analysisData.nLvCount = 1; if (d.divideExtra) { //v1.8.1 memcpy(&d.analysisDataDivided, &d.analysisData, sizeof(d.analysisData)); d.analysisDataDivided.nBlkX = d.analysisData.nBlkX * 2; d.analysisDataDivided.nBlkY = d.analysisData.nBlkY * 2; d.analysisDataDivided.nBlkSizeX = d.analysisData.nBlkSizeX / 2; d.analysisDataDivided.nBlkSizeY = d.analysisData.nBlkSizeY / 2; d.analysisDataDivided.nOverlapX = d.analysisData.nOverlapX / 2; d.analysisDataDivided.nOverlapY = d.analysisData.nOverlapY / 2; d.analysisDataDivided.nLvCount = d.analysisData.nLvCount + 1; } try { d.mvClip = new MVClipDicks(d.vectors, 8 * 8 * 255, 255, vsapi); } catch (MVException &e) { vsapi->setError(out, std::string("Recalculate: ").append(e.what()).c_str()); vsapi->freeNode(d.node); vsapi->freeNode(d.vectors); return; } data = (MVRecalculateData *)malloc(sizeof(d)); *data = d; vsapi->createFilter(in, out, "Recalculate", mvrecalculateInit, mvrecalculateGetFrame, mvrecalculateFree, fmParallel, 0, data, core); }
static void VS_CC resizeCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { ResizeData d; ResizeData *data; int id; int dstwidth; int dstheight; int pf; int err; d.context = 0; d.dstrange = 0; d.lsrcformat = 0; d.lsrch = 0; d.lsrcw = 0; d.node = 0; d.flags = (intptr_t)userData; d.node = vsapi->propGetNode(in, "clip", 0, 0); d.vi = *vsapi->getVideoInfo(d.node); dstwidth = int64ToIntS(vsapi->propGetInt(in, "width", 0, &err)); if (err) dstwidth = d.vi.width; dstheight = int64ToIntS(vsapi->propGetInt(in, "height", 0, &err)); if (err) dstheight = d.vi.height; id = int64ToIntS(vsapi->propGetInt(in, "format", 0, &err)); if (err && d.vi.format) id = d.vi.format->id; if (dstwidth > 0) d.vi.width = dstwidth; if (dstheight > 0) d.vi.height = dstheight; pf = formatIdToPixelFormat(id); if (pf == PIX_FMT_NONE) { vsapi->freeNode(d.node); RETERROR("Resize: unsupported output format"); } d.vi.format = vsapi->getFormatPreset(id, core); if ((d.vi.width % (1 << d.vi.format->subSamplingW)) || (d.vi.height % (1 << d.vi.format->subSamplingH))) { vsapi->freeNode(d.node); RETERROR("Resize: mod requirements of the target colorspace not fulfilled"); } if (!isConstantFormat(&d.vi)) { vsapi->freeNode(d.node); RETERROR("Resize: output format not constant, set width, height and format"); } data = malloc(sizeof(d)); *data = d; vsapi->createFilter(in, out, "Resize", resizeInit, resizeGetframe, resizeFree, fmParallelRequests, 0, data, core); }
static void VS_CC mvflowblurCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { (void)userData; MVFlowBlurData d; MVFlowBlurData *data; int err; d.blur = (float)vsapi->propGetFloat(in, "blur", 0, &err); if (err) d.blur = 50.0f; d.prec = int64ToIntS(vsapi->propGetInt(in, "prec", 0, &err)); if (err) d.prec = 1; d.thscd1 = int64ToIntS(vsapi->propGetInt(in, "thscd1", 0, &err)); if (err) d.thscd1 = MV_DEFAULT_SCD1; d.thscd2 = int64ToIntS(vsapi->propGetInt(in, "thscd2", 0, &err)); if (err) d.thscd2 = MV_DEFAULT_SCD2; d.opt = !!vsapi->propGetInt(in, "opt", 0, &err); if (err) d.opt = 1; if (d.blur < 0.0f || d.blur > 200.0f) { vsapi->setError(out, "FlowBlur: blur must be between 0 and 200 % (inclusive)."); return; } if (d.prec < 1) { vsapi->setError(out, "FlowBlur: prec must be at least 1."); return; } d.blur256 = (int)(d.blur * 256.0f / 200.0f); d.super = vsapi->propGetNode(in, "super", 0, NULL); #define ERROR_SIZE 1024 char errorMsg[ERROR_SIZE] = "FlowBlur: failed to retrieve first frame from super clip. Error message: "; size_t errorLen = strlen(errorMsg); const VSFrameRef *evil = vsapi->getFrame(0, d.super, errorMsg + errorLen, ERROR_SIZE - errorLen); #undef ERROR_SIZE if (!evil) { vsapi->setError(out, errorMsg); vsapi->freeNode(d.super); return; } const VSMap *props = vsapi->getFramePropsRO(evil); int evil_err[3]; int nHeightS = int64ToIntS(vsapi->propGetInt(props, "Super_height", 0, &evil_err[0])); d.nSuperHPad = int64ToIntS(vsapi->propGetInt(props, "Super_hpad", 0, &evil_err[1])); int nSuperPel = int64ToIntS(vsapi->propGetInt(props, "Super_pel", 0, &evil_err[2])); vsapi->freeFrame(evil); for (int i = 0; i < 2; i++) if (evil_err[i]) { vsapi->setError(out, "FlowBlur: required properties not found in first frame of super clip. Maybe clip didn't come from mv.Super? Was the first frame trimmed away?"); vsapi->freeNode(d.super); return; } d.mvbw = vsapi->propGetNode(in, "mvbw", 0, NULL); d.mvfw = vsapi->propGetNode(in, "mvfw", 0, NULL); #define ERROR_SIZE 512 char error[ERROR_SIZE + 1] = { 0 }; const char *filter_name = "FlowBlur"; adataFromVectorClip(&d.mvbw_data, d.mvbw, filter_name, "mvbw", vsapi, error, ERROR_SIZE); adataFromVectorClip(&d.mvfw_data, d.mvfw, filter_name, "mvfw", vsapi, error, ERROR_SIZE); scaleThSCD(&d.thscd1, &d.thscd2, &d.mvbw_data, filter_name, error, ERROR_SIZE); adataCheckSimilarity(&d.mvbw_data, &d.mvfw_data, filter_name, "mvbw", "mvfw", error, ERROR_SIZE); #undef ERROR_SIZE if (error[0]) { vsapi->setError(out, error); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); return; } if (d.mvbw_data.nDeltaFrame <= 0 || d.mvfw_data.nDeltaFrame <= 0) { vsapi->setError(out, "FlowBlur: cannot use motion vectors with absolute frame references."); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); return; } // XXX Alternatively, use both clips' delta as offsets in GetFrame. if (d.mvfw_data.nDeltaFrame != d.mvbw_data.nDeltaFrame) { vsapi->setError(out, "FlowBlur: mvbw and mvfw must be generated with the same delta."); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); return; } // Make sure the motion vector clips are correct. if (!d.mvbw_data.isBackward || d.mvfw_data.isBackward) { if (!d.mvbw_data.isBackward) vsapi->setError(out, "FlowBlur: mvbw must be generated with isb=True."); else vsapi->setError(out, "FlowBlur: mvfw must be generated with isb=False."); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); return; } if (d.mvbw_data.nPel == 1) d.finest = vsapi->cloneNodeRef(d.super); // v2.0.9.1 else { VSPlugin *mvtoolsPlugin = vsapi->getPluginById("com.nodame.mvtools", core); VSPlugin *stdPlugin = vsapi->getPluginById("com.vapoursynth.std", core); VSMap *args = vsapi->createMap(); vsapi->propSetNode(args, "super", d.super, paReplace); vsapi->propSetInt(args, "opt", d.opt, paReplace); VSMap *ret = vsapi->invoke(mvtoolsPlugin, "Finest", args); if (vsapi->getError(ret)) { #define ERROR_SIZE 512 char error_msg[ERROR_SIZE + 1] = { 0 }; snprintf(error_msg, ERROR_SIZE, "FlowBlur: %s", vsapi->getError(ret)); #undef ERROR_SIZE vsapi->setError(out, error_msg); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); vsapi->freeMap(args); vsapi->freeMap(ret); return; } d.finest = vsapi->propGetNode(ret, "clip", 0, NULL); vsapi->freeMap(ret); vsapi->clearMap(args); vsapi->propSetNode(args, "clip", d.finest, paReplace); vsapi->freeNode(d.finest); ret = vsapi->invoke(stdPlugin, "Cache", args); vsapi->freeMap(args); if (vsapi->getError(ret)) { #define ERROR_SIZE 512 char error_msg[ERROR_SIZE + 1] = { 0 }; snprintf(error_msg, ERROR_SIZE, "FlowBlur: %s", vsapi->getError(ret)); #undef ERROR_SIZE vsapi->setError(out, error_msg); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); vsapi->freeMap(ret); return; } d.finest = vsapi->propGetNode(ret, "clip", 0, NULL); vsapi->freeMap(ret); } d.node = vsapi->propGetNode(in, "clip", 0, 0); d.vi = vsapi->getVideoInfo(d.node); const VSVideoInfo *supervi = vsapi->getVideoInfo(d.super); int nSuperWidth = supervi->width; if (d.mvbw_data.nHeight != nHeightS || d.mvbw_data.nWidth != nSuperWidth - d.nSuperHPad * 2 || d.mvbw_data.nPel != nSuperPel) { vsapi->setError(out, "FlowBlur: wrong source or super clip frame size."); vsapi->freeNode(d.finest); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); vsapi->freeNode(d.node); return; } if (!isConstantFormat(d.vi) || d.vi->format->bitsPerSample > 16 || d.vi->format->sampleType != stInteger || d.vi->format->subSamplingW > 1 || d.vi->format->subSamplingH > 1 || (d.vi->format->colorFamily != cmYUV && d.vi->format->colorFamily != cmGray)) { vsapi->setError(out, "FlowBlur: input clip must be GRAY, 420, 422, 440, or 444, up to 16 bits, with constant dimensions."); vsapi->freeNode(d.super); vsapi->freeNode(d.finest); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); vsapi->freeNode(d.node); return; } d.nHeightUV = d.mvbw_data.nHeight / d.mvbw_data.yRatioUV; d.nWidthUV = d.mvbw_data.nWidth / d.mvbw_data.xRatioUV; d.nHPaddingUV = d.mvbw_data.nHPadding / d.mvbw_data.xRatioUV; //d.nVPaddingUV = d.mvbw_data.nHPadding / d.mvbw_data.yRatioUV; // original looks wrong d.nVPaddingUV = d.mvbw_data.nVPadding / d.mvbw_data.yRatioUV; d.VPitchY = d.mvbw_data.nWidth; d.VPitchUV = d.nWidthUV; simpleInit(&d.upsizer, d.mvbw_data.nWidth, d.mvbw_data.nHeight, d.mvbw_data.nBlkX, d.mvbw_data.nBlkY, d.opt); if (d.vi->format->colorFamily != cmGray) simpleInit(&d.upsizerUV, d.nWidthUV, d.nHeightUV, d.mvbw_data.nBlkX, d.mvbw_data.nBlkY, d.opt); data = (MVFlowBlurData *)malloc(sizeof(d)); *data = d; vsapi->createFilter(in, out, "FlowBlur", mvflowblurInit, mvflowblurGetFrame, mvflowblurFree, fmParallel, 0, data, core); }
static void VS_CC mvmaskCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { (void)userData; MVMaskData d; MVMaskData *data; int err; d.ml = (float)vsapi->propGetFloat(in, "ml", 0, &err); if (err) d.ml = 100.0f; d.fGamma = (float)vsapi->propGetFloat(in, "gamma", 0, &err); if (err) d.fGamma = 1.0f; d.kind = int64ToIntS(vsapi->propGetInt(in, "kind", 0, &err)); double time = vsapi->propGetFloat(in, "time", 0, &err); if (err) time = 100.0; d.nSceneChangeValue = int64ToIntS(vsapi->propGetInt(in, "ysc", 0, &err)); d.thscd1 = vsapi->propGetInt(in, "thscd1", 0, &err); if (err) d.thscd1 = MV_DEFAULT_SCD1; d.thscd2 = int64ToIntS(vsapi->propGetInt(in, "thscd2", 0, &err)); if (err) d.thscd2 = MV_DEFAULT_SCD2; d.opt = !!vsapi->propGetInt(in, "opt", 0, &err); if (err) d.opt = 1; if (d.fGamma < 0.0f) { vsapi->setError(out, "Mask: gamma must not be negative."); return; } if (d.kind < 0 || d.kind > 5) { vsapi->setError(out, "Mask: kind must 0, 1, 2, 3, 4, or 5."); return; } if (time < 0.0 || time > 100.0) { vsapi->setError(out, "Mask: time must be between 0.0 and 100.0 (inclusive)."); return; } if (d.nSceneChangeValue < 0 || d.nSceneChangeValue > 255) { vsapi->setError(out, "Mask: ysc must be between 0 and 255 (inclusive)."); return; } d.vectors = vsapi->propGetNode(in, "vectors", 0, NULL); #define ERROR_SIZE 512 char error[ERROR_SIZE + 1] = { 0 }; const char *filter_name = "Mask"; adataFromVectorClip(&d.vectors_data, d.vectors, filter_name, "vectors", vsapi, error, ERROR_SIZE); scaleThSCD(&d.thscd1, &d.thscd2, &d.vectors_data, filter_name, error, ERROR_SIZE); #undef ERROR_SIZE if (error[0]) { vsapi->setError(out, error); vsapi->freeNode(d.vectors); return; } d.fMaskNormFactor = 1.0f / d.ml; // Fizick d.fMaskNormFactor2 = d.fMaskNormFactor * d.fMaskNormFactor; d.fHalfGamma = d.fGamma * 0.5f; d.nWidthB = d.vectors_data.nBlkX * (d.vectors_data.nBlkSizeX - d.vectors_data.nOverlapX) + d.vectors_data.nOverlapX; d.nHeightB = d.vectors_data.nBlkY * (d.vectors_data.nBlkSizeY - d.vectors_data.nOverlapY) + d.vectors_data.nOverlapY; d.nHeightUV = d.vectors_data.nHeight / d.vectors_data.yRatioUV; d.nWidthUV = d.vectors_data.nWidth / d.vectors_data.xRatioUV; d.nHeightBUV = d.nHeightB / d.vectors_data.yRatioUV; d.nWidthBUV = d.nWidthB / d.vectors_data.xRatioUV; d.node = vsapi->propGetNode(in, "clip", 0, NULL); d.vi = *vsapi->getVideoInfo(d.node); if (!isConstantFormat(&d.vi) || d.vi.format->bitsPerSample > 8 || d.vi.format->subSamplingW > 1 || d.vi.format->subSamplingH > 1 || (d.vi.format->colorFamily != cmYUV && d.vi.format->colorFamily != cmGray)) { vsapi->setError(out, "Mask: input clip must be GRAY8, YUV420P8, YUV422P8, YUV440P8, or YUV444P8, with constant dimensions."); vsapi->freeNode(d.node); vsapi->freeNode(d.vectors); return; } if (d.vi.format->colorFamily == cmGray) d.vi.format = vsapi->getFormatPreset(pfYUV444P8, core); simpleInit(&d.upsizer, d.nWidthB, d.nHeightB, d.vectors_data.nBlkX, d.vectors_data.nBlkY, d.opt); simpleInit(&d.upsizerUV, d.nWidthBUV, d.nHeightBUV, d.vectors_data.nBlkX, d.vectors_data.nBlkY, d.opt); d.time256 = (int)(time * 256 / 100); data = (MVMaskData *)malloc(sizeof(d)); *data = d; vsapi->createFilter(in, out, "Mask", mvmaskInit, mvmaskGetFrame, mvmaskFree, fmParallel, 0, data, core); }
static void VS_CC lut2Create(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { std::unique_ptr<Lut2Data> d(new Lut2Data(vsapi)); d->node[0] = vsapi->propGetNode(in, "clipa", 0, 0); d->node[1] = vsapi->propGetNode(in, "clipb", 0, 0); d->vi[0] = vsapi->getVideoInfo(d->node[0]); d->vi[1] = vsapi->getVideoInfo(d->node[1]); if (!isConstantFormat(d->vi[0]) || !isConstantFormat(d->vi[1])) RETERROR("Lut2: only clips with constant format and dimensions supported"); if (isCompatFormat(d->vi[0]) || isCompatFormat(d->vi[1])) RETERROR("Lut2: compat formats are not supported"); if (d->vi[0]->format->sampleType != stInteger || d->vi[1]->format->sampleType != stInteger || (d->vi[0]->format->bitsPerSample + d->vi[1]->format->bitsPerSample) > 20 || d->vi[0]->format->subSamplingH != d->vi[1]->format->subSamplingH || d->vi[0]->format->subSamplingW != d->vi[1]->format->subSamplingW || d->vi[0]->width != d->vi[1]->width || d->vi[0]->height != d->vi[1]->height) RETERROR("Lut2: only clips with integer samples, same dimensions, same subsampling and up to a total of 20 indexing bits supported"); int n = d->vi[0]->format->numPlanes; int num_planes = vsapi->propNumElements(in, "planes"); for (int i = 0; i < 3; i++) d->process[i] = (num_planes <= 0); for (int i = 0; i < num_planes; i++) { int o = int64ToIntS(vsapi->propGetInt(in, "planes", i, 0)); if (o < 0 || o >= n) RETERROR("Lut2: plane index out of range"); if (d->process[o]) RETERROR("Lut2: plane specified twice"); d->process[o] = true; } int err; VSFuncRef *func = vsapi->propGetFunc(in, "function", 0, &err); int lut_elem = vsapi->propNumElements(in, "lut"); int lutf_elem = vsapi->propNumElements(in, "lutf"); bool floatout = !!vsapi->propGetInt(in, "floatout", 0, &err); int num_set = (lut_elem >= 0) + (lutf_elem >= 0) + !!func; if (!num_set) { vsapi->freeFunc(func); RETERROR("Lut2: none of lut, lutf and function are set"); } if (num_set > 1) { vsapi->freeFunc(func); RETERROR("Lut2: more than one of lut, lutf and function are set"); } if (lut_elem >= 0 && floatout) { vsapi->freeFunc(func); RETERROR("Lut2: lut set but float output specified"); } if (lutf_elem >= 0 && !floatout) { vsapi->freeFunc(func); RETERROR("Lut2: lutf set but float output not specified"); } n = 1 << (d->vi[0]->format->bitsPerSample + d->vi[1]->format->bitsPerSample); int lut_length = std::max(lut_elem, lutf_elem); if (lut_length >= 0 && lut_length != n) { vsapi->freeFunc(func); RETERROR(("Lut2: bad lut length. Expected " + std::to_string(n) + " elements, got " + std::to_string(lut_length) + " instead").c_str()); } int bitsout = int64ToIntS(vsapi->propGetInt(in, "bits", 0, &err)); if (err) bitsout = floatout ? sizeof(float) * 8 : d->vi[0]->format->bitsPerSample; if ((floatout && bitsout != 32) || (!floatout && (bitsout < 8 || bitsout > 16))) { vsapi->freeFunc(func); RETERROR("Lut2: only 8-16 bit integer and 32 bit float output supported"); } d->vi_out = *d->vi[0]; d->vi_out.format = vsapi->registerFormat(d->vi[0]->format->colorFamily, floatout ? stFloat : stInteger, bitsout, d->vi[0]->format->subSamplingW, d->vi[0]->format->subSamplingH, core); if (d->vi[0]->format->bytesPerSample == 1) { if (d->vi[1]->format->bytesPerSample == 1) { if (d->vi_out.format->bytesPerSample == 1 && d->vi_out.format->sampleType == stInteger) lut2CreateHelper<uint8_t, uint8_t, uint8_t>(in, out, func, d, core, vsapi); else if (d->vi_out.format->bytesPerSample == 2 && d->vi_out.format->sampleType == stInteger) lut2CreateHelper<uint8_t, uint8_t, uint16_t>(in, out, func, d, core, vsapi); else if (d->vi_out.format->bitsPerSample == 32 && d->vi_out.format->sampleType == stFloat) lut2CreateHelper<uint8_t, uint8_t, float>(in, out, func, d, core, vsapi); } else if (d->vi[1]->format->bytesPerSample == 2) { if (d->vi_out.format->bytesPerSample == 1 && d->vi_out.format->sampleType == stInteger) lut2CreateHelper<uint8_t, uint16_t, uint8_t>(in, out, func, d, core, vsapi); else if (d->vi_out.format->bytesPerSample == 2 && d->vi_out.format->sampleType == stInteger) lut2CreateHelper<uint8_t, uint16_t, uint16_t>(in, out, func, d, core, vsapi); else if (d->vi_out.format->bitsPerSample == 32 && d->vi_out.format->sampleType == stFloat) lut2CreateHelper<uint8_t, uint16_t, float>(in, out, func, d, core, vsapi); } } else if (d->vi[0]->format->bytesPerSample == 2) { if (d->vi[1]->format->bytesPerSample == 1) { if (d->vi_out.format->bytesPerSample == 1 && d->vi_out.format->sampleType == stInteger) lut2CreateHelper<uint16_t, uint8_t, uint8_t>(in, out, func, d, core, vsapi); else if (d->vi_out.format->bytesPerSample == 2 && d->vi_out.format->sampleType == stInteger) lut2CreateHelper<uint16_t, uint8_t, uint16_t>(in, out, func, d, core, vsapi); else if (d->vi_out.format->bitsPerSample == 32 && d->vi_out.format->sampleType == stFloat) lut2CreateHelper<uint16_t, uint8_t, float>(in, out, func, d, core, vsapi); } else if (d->vi[1]->format->bytesPerSample == 2) { if (d->vi_out.format->bytesPerSample == 1 && d->vi_out.format->sampleType == stInteger) lut2CreateHelper<uint16_t, uint16_t, uint8_t>(in, out, func, d, core, vsapi); else if (d->vi_out.format->bytesPerSample == 2 && d->vi_out.format->sampleType == stInteger) lut2CreateHelper<uint16_t, uint16_t, uint16_t>(in, out, func, d, core, vsapi); else if (d->vi_out.format->bitsPerSample == 32 && d->vi_out.format->sampleType == stFloat) lut2CreateHelper<uint16_t, uint16_t, float>(in, out, func, d, core, vsapi); } } }
static void VS_CC mvsuperCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { (void)userData; MVSuperData d; MVSuperData *data; int err; d.nHPad = int64ToIntS(vsapi->propGetInt(in, "hpad", 0, &err)); if (err) d.nHPad = 8; d.nVPad = int64ToIntS(vsapi->propGetInt(in, "vpad", 0, &err)); if (err) d.nVPad = 8; d.nPel = int64ToIntS(vsapi->propGetInt(in, "pel", 0, &err)); if (err) d.nPel = 2; d.nLevels = int64ToIntS(vsapi->propGetInt(in, "levels", 0, &err)); d.chroma = !!vsapi->propGetInt(in, "chroma", 0, &err); if (err) d.chroma = 1; d.sharp = int64ToIntS(vsapi->propGetInt(in, "sharp", 0, &err)); // pel2 interpolation type if (err) d.sharp = SharpWiener; d.rfilter = int64ToIntS(vsapi->propGetInt(in, "rfilter", 0, &err)); if (err) d.rfilter = RfilterBilinear; d.opt = !!vsapi->propGetInt(in, "opt", 0, &err); if (err) d.opt = 1; if ((d.nPel != 1) && (d.nPel != 2) && (d.nPel != 4)) { vsapi->setError(out, "Super: pel must be 1, 2, or 4."); return; } if (d.sharp < SharpBilinear || d.sharp > SharpWiener) { vsapi->setError(out, "Super: sharp must be between 0 and 2 (inclusive)."); return; } if (d.rfilter < RfilterSimple || d.rfilter > RfilterCubic) { vsapi->setError(out, "Super: rfilter must be between 0 and 4 (inclusive)."); return; } d.node = vsapi->propGetNode(in, "clip", 0, 0); d.vi = *vsapi->getVideoInfo(d.node); d.nWidth = d.vi.width; d.nHeight = d.vi.height; if (!isConstantFormat(&d.vi) || d.vi.format->bitsPerSample > 16 || d.vi.format->sampleType != stInteger || d.vi.format->subSamplingW > 1 || d.vi.format->subSamplingH > 1 || (d.vi.format->colorFamily != cmYUV && d.vi.format->colorFamily != cmGray)) { vsapi->setError(out, "Super: input clip must be GRAY, 420, 422, 440, or 444, up to 16 bits, with constant dimensions."); vsapi->freeNode(d.node); return; } if (d.vi.format->colorFamily == cmGray) d.chroma = 0; d.nModeYUV = d.chroma ? YUVPLANES : YPLANE; d.xRatioUV = 1 << d.vi.format->subSamplingW; d.yRatioUV = 1 << d.vi.format->subSamplingH; int nLevelsMax = 0; while (PlaneHeightLuma(d.vi.height, nLevelsMax, d.yRatioUV, d.nVPad) >= d.yRatioUV * 2 && PlaneWidthLuma(d.vi.width, nLevelsMax, d.xRatioUV, d.nHPad) >= d.xRatioUV * 2) // at last two pixels width and height of chroma { nLevelsMax++; } if (d.nLevels <= 0 || d.nLevels > nLevelsMax) d.nLevels = nLevelsMax; d.pelclip = vsapi->propGetNode(in, "pelclip", 0, &err); const VSVideoInfo *pelvi = d.pelclip ? vsapi->getVideoInfo(d.pelclip) : NULL; if (d.pelclip && (!isConstantFormat(pelvi) || pelvi->format != d.vi.format)) { vsapi->setError(out, "Super: pelclip must have the same format as the input clip, and it must have constant dimensions."); vsapi->freeNode(d.node); vsapi->freeNode(d.pelclip); return; } d.usePelClip = 0; if (d.pelclip && (d.nPel >= 2)) { if ((pelvi->width == d.vi.width * d.nPel) && (pelvi->height == d.vi.height * d.nPel)) { d.usePelClip = 1; d.isPelClipPadded = 0; } else if ((pelvi->width == (d.vi.width + d.nHPad * 2) * d.nPel) && (pelvi->height == (d.vi.height + d.nVPad * 2) * d.nPel)) { d.usePelClip = 1; d.isPelClipPadded = 1; } else { vsapi->setError(out, "Super: pelclip's dimensions must be multiples of the input clip's dimensions."); vsapi->freeNode(d.pelclip); vsapi->freeNode(d.node); return; } } d.nSuperWidth = d.nWidth + 2 * d.nHPad; d.nSuperHeight = PlaneSuperOffset(0, d.nHeight, d.nLevels, d.nPel, d.nVPad, d.nSuperWidth, d.yRatioUV) / d.nSuperWidth; if (d.yRatioUV == 2 && d.nSuperHeight & 1) d.nSuperHeight++; // even if (d.xRatioUV == 2 && d.nSuperWidth & 1) d.nSuperWidth++; d.vi.width = d.nSuperWidth; d.vi.height = d.nSuperHeight; data = (MVSuperData *)malloc(sizeof(d)); *data = d; vsapi->createFilter(in, out, "Super", mvsuperInit, mvsuperGetFrame, mvsuperFree, fmParallel, 0, data, core); }
static void VS_CC trimCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { TrimData d; TrimData *data; int firstset; int lastset; int lengthset; int err; d.first = 0; d.last = -1; d.length = -1; d.first = int64ToIntS(vsapi->propGetInt(in, "first", 0, &err)); firstset = !err; d.last = int64ToIntS(vsapi->propGetInt(in, "last", 0, &err)); lastset = !err; d.length = int64ToIntS(vsapi->propGetInt(in, "length", 0, &err)); lengthset = !err; if (lastset && lengthset) RETERROR("Trim: both last frame and length specified"); if (lastset && d.last < d.first) RETERROR("Trim: invalid last frame specified"); if (lengthset && d.length < 1) RETERROR("Trim: invalid length specified"); if (d.first < 0) RETERROR("Trim: invalid first frame specified"); d.node = vsapi->propGetNode(in, "clip", 0, 0); d.vi = *vsapi->getVideoInfo(d.node); if ((lastset && d.vi.numFrames && d.last >= d.vi.numFrames) || (lengthset && d.vi.numFrames && (d.first + d.length) > d.vi.numFrames) || (d.vi.numFrames && d.vi.numFrames <= d.first)) { vsapi->freeNode(d.node); RETERROR("Trim: last frame beyond clip end"); } if (lastset) { d.trimlen = d.last - d.first + 1; } else if (lengthset) { d.trimlen = d.length; } else if (d.vi.numFrames) { d.trimlen = d.vi.numFrames - d.first; } else { d.trimlen = 0; } // obvious nop() so just pass through the input clip if ((!firstset && !lastset && !lengthset) || (d.trimlen && d.trimlen == d.vi.numFrames)) { vsapi->propSetNode(out, "clip", d.node, 0); vsapi->freeNode(d.node); return; } data = malloc(sizeof(d)); *data = d; vsapi->createFilter(in, out, "Trim", trimInit, trimGetframe, singleClipFree, fmParallel, nfNoCache, data, core); }
int VBM3D_Data_Base::arguments_process(const VSMap *in, VSMap *out) { int error; int m; // input - clip node = vsapi->propGetNode(in, "input", 0, nullptr); vi = vsapi->getVideoInfo(node); if (!isConstantFormat(vi)) { setError(out, "Invalid input clip, only constant format input supported"); return 1; } if ((vi->format->sampleType == stInteger && vi->format->bitsPerSample > 16) || (vi->format->sampleType == stFloat && vi->format->bitsPerSample != 32)) { setError(out, "Invalid input clip, only 8-16 bit integer or 32 bit float formats supported"); return 1; } // ref - clip rnode = vsapi->propGetNode(in, "ref", 0, &error); if (error) { rdef = false; rnode = node; rvi = vi; } else { rdef = true; rvi = vsapi->getVideoInfo(rnode); if (!isConstantFormat(rvi)) { setError(out, "Invalid clip \"ref\", only constant format input supported"); return 1; } if (rvi->format != vi->format) { setError(out, "input clip and clip \"ref\" must be of the same format"); return 1; } if (rvi->width != vi->width || rvi->height != vi->height) { setError(out, "input clip and clip \"ref\" must be of the same width and height"); return 1; } if (rvi->numFrames != vi->numFrames) { setError(out, "input clip and clip \"ref\" must have the same number of frames"); return 1; } } // profile - data auto profile = vsapi->propGetData(in, "profile", 0, &error); if (error) { para.profile = para_default.profile; } else { para.profile = profile; } if (para.profile != "fast" && para.profile != "lc" && para.profile != "np" && para.profile != "high" && para.profile != "vn") { setError(out, "Unrecognized \"profile\" specified, should be \"fast\", \"lc\", \"np\", \"high\" or \"vn\""); return 1; } get_default_para(para.profile); // sigma - float[] m = vsapi->propNumElements(in, "sigma"); if (m > 0) { int i; if (m > 3) m = 3; for (i = 0; i < m; ++i) { para.sigma[i] = vsapi->propGetFloat(in, "sigma", i, nullptr); if (para.sigma[i] < 0) { setError(out, "Invalid \"sigma\" assigned, must be a non-negative floating point number"); return 1; } } for (; i < 3; ++i) { para.sigma[i] = para.sigma[i - 1]; } } else { para.sigma = para_default.sigma; } // radius - int para.radius = int64ToIntS(vsapi->propGetInt(in, "radius", 0, &error)); if (error) { para.radius = para_default.radius; } else if (para.radius < 1 || para.radius > 16) { setError(out, "Invalid \"radius\" assigned, must be an integer in [1, 16]"); return 1; } // block_size - int para.BlockSize = int64ToIntS(vsapi->propGetInt(in, "block_size", 0, &error)); if (error) { para.BlockSize = para_default.BlockSize; } else if (para.BlockSize < 1 || para.BlockSize > 64) { setError(out, "Invalid \"block_size\" assigned, must be an integer in [1, 64]"); return 1; } else if (para.BlockSize > vi->width || para.BlockSize > vi->height) { setError(out, "Invalid \"block_size\" assigned, must not exceed width or height of the frame"); return 1; } // block_step - int para.BlockStep = int64ToIntS(vsapi->propGetInt(in, "block_step", 0, &error)); if (error) { para.BlockStep = para_default.BlockStep; } else if (para.BlockStep < 1 || para.BlockStep > para.BlockSize) { setError(out, "Invalid \"block_step\" assigned, must be an integer in [1, block_size]"); return 1; } // group_size - int para.GroupSize = int64ToIntS(vsapi->propGetInt(in, "group_size", 0, &error)); if (error) { para.GroupSize = para_default.GroupSize; } else if (para.GroupSize < 1 || para.GroupSize > 256) { setError(out, "Invalid \"group_size\" assigned, must be an integer in [1, 256]"); return 1; } // bm_range - int para.BMrange = int64ToIntS(vsapi->propGetInt(in, "bm_range", 0, &error)); if (error) { para.BMrange = para_default.BMrange; } else if (para.BMrange < 1) { setError(out, "Invalid \"bm_range\" assigned, must be a positive integer"); return 1; } // bm_step - int para.BMstep = int64ToIntS(vsapi->propGetInt(in, "bm_step", 0, &error)); if (error) { para.BMstep = para_default.BMstep; } else if (para.BMstep < 1 || para.BMstep > para.BMrange) { setError(out, "Invalid \"bm_step\" assigned, must be an integer in [1, bm_range]"); return 1; } // ps_num - int para.PSnum = int64ToIntS(vsapi->propGetInt(in, "ps_num", 0, &error)); if (error) { para.PSnum = para_default.PSnum; } else if (para.PSnum < 1 || para.PSnum > para.GroupSize) { setError(out, "Invalid \"ps_num\" assigned, must be an integer in [1, group_size]"); return 1; } // ps_range - int para.PSrange = int64ToIntS(vsapi->propGetInt(in, "ps_range", 0, &error)); if (error) { para.PSrange = para_default.PSrange; } else if (para.PSrange < 1) { setError(out, "Invalid \"ps_range\" assigned, must be a positive integer"); return 1; } // ps_step - int para.PSstep = int64ToIntS(vsapi->propGetInt(in, "ps_step", 0, &error)); if (error) { para.PSstep = para_default.PSstep; } else if (para.PSstep < 1 || para.PSstep > para.PSrange) { setError(out, "Invalid \"ps_step\" assigned, must be an integer in [1, ps_range]"); return 1; } // th_mse - float para.thMSE = vsapi->propGetFloat(in, "th_mse", 0, &error); if (error) { para.thMSE_Default(); } else if (para.thMSE <= 0) { setError(out, "Invalid \"th_mse\" assigned, must be a positive floating point number"); return 1; } // matrix - int matrix = static_cast<ColorMatrix>(vsapi->propGetInt(in, "matrix", 0, &error)); if (vi->format->colorFamily == cmRGB) { matrix = ColorMatrix::OPP; } else if (vi->format->colorFamily == cmYCoCg) { matrix = ColorMatrix::YCgCo; } else if (error || matrix == ColorMatrix::Unspecified) { matrix = ColorMatrix_Default(vi->width, vi->height); } else if (matrix != ColorMatrix::GBR && matrix != ColorMatrix::bt709 && matrix != ColorMatrix::fcc && matrix != ColorMatrix::bt470bg && matrix != ColorMatrix::smpte170m && matrix != ColorMatrix::smpte240m && matrix != ColorMatrix::YCgCo && matrix != ColorMatrix::bt2020nc && matrix != ColorMatrix::bt2020c && matrix != ColorMatrix::OPP) { setError(out, "Unsupported \"matrix\" specified"); return 1; } // process for (int i = 0; i < VSMaxPlaneCount; i++) { if (vi->format->colorFamily != cmRGB && para.sigma[i] == 0) { process[i] = 0; } } if (process[1] || process[2]) { if (vi->format->subSamplingH || vi->format->subSamplingW) { setError(out, "input clip: sub-sampled format is not supported when chroma is processed, convert it to YUV444 or RGB first. " "For the best quality, RGB colorspace is recommended as input."); return 1; } if (rvi->format->subSamplingH || rvi->format->subSamplingW) { setError(out, "clip \"ref\": sub-sampled format is not supported when chroma is processed, convert it to YUV444 or RGB first. " "For the best quality, RGB colorspace is recommended as input."); return 1; } } return 0; }
static void VS_CC exprCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { JitExprData d; JitExprData *data; int err; try { for (int i = 0; i < 3; i++) d.node[i] = vsapi->propGetNode(in, "clips", i, &err); const VSVideoInfo *vi[3]; for (int i = 0; i < 3; i++) if (d.node[i]) vi[i] = vsapi->getVideoInfo(d.node[i]); else vi[i] = NULL; for (int i = 0; i < 3; i++) { if (vi[i]) { if (!isConstantFormat(vi[i])) throw std::runtime_error("Only constant format input allowed"); if (vi[0]->format->numPlanes != vi[i]->format->numPlanes || vi[0]->format->subSamplingW != vi[i]->format->subSamplingW || vi[0]->format->subSamplingH != vi[i]->format->subSamplingH || vi[0]->width != vi[i]->width || vi[0]->height != vi[i]->height) throw std::runtime_error("All inputs must have the same number of planes and the same dimensions, subsampling included"); if ((vi[i]->format->bitsPerSample > 16 && vi[i]->format->sampleType == stInteger) || vi[i]->format->bitsPerSample != 32 && vi[i]->format->sampleType == stFloat) throw std::runtime_error("Input clips must be 8-16 bit integer or 32 bit float format"); } } d.vi = *vi[0]; int format = int64ToIntS(vsapi->propGetInt(in, "format", 0, &err)); if (!err) { const VSFormat *f = vsapi->getFormatPreset(format, core); if (f) { if (d.vi.format->colorFamily == cmCompat) throw std::runtime_error("No compat formats allowed"); if (d.vi.format->numPlanes != f->numPlanes) throw std::runtime_error("The number of planes in the inputs and output must match"); d.vi.format = vsapi->registerFormat(d.vi.format->colorFamily, f->sampleType, f->bitsPerSample, d.vi.format->subSamplingW, d.vi.format->subSamplingH, core); } } int nexpr = vsapi->propNumElements(in, "expr"); if (nexpr > d.vi.format->numPlanes) throw std::runtime_error("More expressions given than there are planes"); std::string expr[3]; for (int i = 0; i < nexpr; i++) expr[i] = vsapi->propGetData(in, "expr", i, 0); if (nexpr == 1) { expr[1] = expr[0]; expr[2] = expr[0]; } else if (nexpr == 2) { expr[2] = expr[1]; } for (int i = 0; i < 3; i++) { if (!expr[i].empty()) { d.plane[i] = poProcess; } else { if (d.vi.format->bitsPerSample == vi[0]->format->bitsPerSample && d.vi.format->sampleType == vi[0]->format->sampleType) d.plane[i] = poCopy; else d.plane[i] = poUndefined; } } const SOperation sop[3] = { getLoadOp(vi[0]), getLoadOp(vi[1]), getLoadOp(vi[2]) }; int maxStackSize = 0; for (int i = 0; i < d.vi.format->numPlanes; i++) maxStackSize = std::max(parseExpression(expr[i], d.ops[i], sop, getStoreOp(&d.vi)), maxStackSize); #ifdef VS_TARGET_CPU_X86 d.stack = vs_aligned_malloc<void>(maxStackSize * 32, 32); #else d.stack.resize(maxStackSize); #endif } catch (std::runtime_error &e) { for (int i = 0; i < 3; i++) vsapi->freeNode(d.node[i]); std::string s = "Expr: "; s += e.what(); vsapi->setError(out, s.c_str()); return; } data = new JitExprData(); *data = d; vsapi->createFilter(in, out, "Expr", exprInit, exprGetFrame, exprFree, fmParallelRequests, 0, data, core); }
static void VS_CC mvflowfpsCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { MVFlowFPSData d; MVFlowFPSData *data; int err; d.num = vsapi->propGetInt(in, "num", 0, &err); if (err) d.num = 25; d.den = vsapi->propGetInt(in, "den", 0, &err); if (err) d.den = 1; d.maskmode = int64ToIntS(vsapi->propGetInt(in, "mask", 0, &err)); if (err) d.maskmode = 2; d.ml = vsapi->propGetFloat(in, "ml", 0, &err); if (err) d.ml = 100.0; d.blend = !!vsapi->propGetInt(in, "blend", 0, &err); if (err) d.blend = 1; d.thscd1 = int64ToIntS(vsapi->propGetInt(in, "thscd1", 0, &err)); if (err) d.thscd1 = MV_DEFAULT_SCD1; d.thscd2 = int64ToIntS(vsapi->propGetInt(in, "thscd2", 0, &err)); if (err) d.thscd2 = MV_DEFAULT_SCD2; d.isse = !!vsapi->propGetInt(in, "isse", 0, &err); if (err) d.isse = 1; if (d.maskmode < 0 || d.maskmode > 2) { vsapi->setError(out, "FlowFPS: mask must be 0, 1, or 2."); return; } if (d.ml <= 0.0) { vsapi->setError(out, "FlowFPS: ml must be greater than 0."); return; } d.super = vsapi->propGetNode(in, "super", 0, NULL); char errorMsg[1024]; const VSFrameRef *evil = vsapi->getFrame(0, d.super, errorMsg, 1024); if (!evil) { vsapi->setError(out, std::string("FlowFPS: failed to retrieve first frame from super clip. Error message: ").append(errorMsg).c_str()); vsapi->freeNode(d.super); return; } const VSMap *props = vsapi->getFramePropsRO(evil); int evil_err[2]; int nHeightS = int64ToIntS(vsapi->propGetInt(props, "Super_height", 0, &evil_err[0])); d.nSuperHPad = int64ToIntS(vsapi->propGetInt(props, "Super_hpad", 0, &evil_err[1])); vsapi->freeFrame(evil); for (int i = 0; i < 2; i++) if (evil_err[i]) { vsapi->setError(out, "FlowFPS: required properties not found in first frame of super clip. Maybe clip didn't come from mv.Super? Was the first frame trimmed away?"); vsapi->freeNode(d.super); return; } d.mvbw = vsapi->propGetNode(in, "mvbw", 0, NULL); d.mvfw = vsapi->propGetNode(in, "mvfw", 0, NULL); // XXX F**k all this trying. try { d.mvClipB = new MVClipDicks(d.mvbw, d.thscd1, d.thscd2, vsapi); } catch (MVException &e) { vsapi->setError(out, std::string("FlowFPS: ").append(e.what()).c_str()); vsapi->freeNode(d.super); vsapi->freeNode(d.mvbw); vsapi->freeNode(d.mvfw); return; } try { d.mvClipF = new MVClipDicks(d.mvfw, d.thscd1, d.thscd2, vsapi); } catch (MVException &e) { vsapi->setError(out, std::string("FlowFPS: ").append(e.what()).c_str()); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); delete d.mvClipB; return; } // XXX Alternatively, use both clips' delta as offsets in GetFrame. if (d.mvClipF->GetDeltaFrame() != d.mvClipB->GetDeltaFrame()) { vsapi->setError(out, "FlowFPS: mvbw and mvfw must be generated with the same delta."); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); delete d.mvClipB; delete d.mvClipF; return; } // Make sure the motion vector clips are correct. if (!d.mvClipB->IsBackward() || d.mvClipF->IsBackward()) { if (!d.mvClipB->IsBackward()) vsapi->setError(out, "FlowFPS: mvbw must be generated with isb=True."); else vsapi->setError(out, "FlowFPS: mvfw must be generated with isb=False."); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); delete d.mvClipB; delete d.mvClipF; return; } try { d.bleh = new MVFilter(d.mvfw, "FlowFPS", vsapi); } catch (MVException &e) { vsapi->setError(out, std::string("FlowFPS: ").append(e.what()).c_str()); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); delete d.mvClipB; delete d.mvClipF; return; } try { // So it checks the similarity of mvfw and mvfw? ????? // Copied straight from 2.5.11.3... d.bleh->CheckSimilarity(d.mvClipF, "mvfw"); d.bleh->CheckSimilarity(d.mvClipB, "mvbw"); } catch (MVException &e) { vsapi->setError(out, std::string("FlowFPS: ").append(e.what()).c_str()); delete d.bleh; delete d.mvClipB; delete d.mvClipF; vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); return; } if (d.bleh->nPel == 1) d.finest = vsapi->cloneNodeRef(d.super); // v2.0.9.1 else { VSPlugin *mvtoolsPlugin = vsapi->getPluginById("com.nodame.mvtools", core); VSPlugin *stdPlugin = vsapi->getPluginById("com.vapoursynth.std", core); VSMap *args = vsapi->createMap(); vsapi->propSetNode(args, "super", d.super, paReplace); vsapi->propSetInt(args, "isse", d.isse, paReplace); VSMap *ret = vsapi->invoke(mvtoolsPlugin, "Finest", args); if (vsapi->getError(ret)) { vsapi->setError(out, std::string("FlowFPS: ").append(vsapi->getError(ret)).c_str()); delete d.bleh; delete d.mvClipB; delete d.mvClipF; vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); vsapi->freeMap(args); vsapi->freeMap(ret); return; } d.finest = vsapi->propGetNode(ret, "clip", 0, NULL); vsapi->freeMap(ret); vsapi->clearMap(args); vsapi->propSetNode(args, "clip", d.finest, paReplace); vsapi->freeNode(d.finest); ret = vsapi->invoke(stdPlugin, "Cache", args); vsapi->freeMap(args); if (vsapi->getError(ret)) { // prefix the error messages vsapi->setError(out, std::string("FlowFPS: ").append(vsapi->getError(ret)).c_str()); delete d.bleh; delete d.mvClipB; delete d.mvClipF; vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); vsapi->freeMap(ret); return; } d.finest = vsapi->propGetNode(ret, "clip", 0, NULL); vsapi->freeMap(ret); } d.node = vsapi->propGetNode(in, "clip", 0, 0); d.vi = *vsapi->getVideoInfo(d.node); if (d.vi.fpsNum == 0 || d.vi.fpsDen == 0) { vsapi->setError(out, "FlowFPS: The input clip must have a frame rate. Invoke AssumeFPS if necessary."); vsapi->freeNode(d.finest); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); vsapi->freeNode(d.node); delete d.bleh; delete d.mvClipB; delete d.mvClipF; return; } int64_t numeratorOld = d.vi.fpsNum; int64_t denominatorOld = d.vi.fpsDen; int64_t numerator, denominator; if (d.num != 0 && d.den != 0) { numerator = d.num; denominator = d.den; } else { numerator = numeratorOld * 2; // double fps by default denominator = denominatorOld; } // safe for big numbers since v2.1 d.fa = denominator * numeratorOld; d.fb = numerator * denominatorOld; int64_t fgcd = gcd(d.fa, d.fb); // general common divisor d.fa /= fgcd; d.fb /= fgcd; setFPS(&d.vi, numerator, denominator); if (d.vi.numFrames) d.vi.numFrames = (int)(1 + (d.vi.numFrames - 1) * d.fb / d.fa); if (d.bleh->nWidth != d.vi.width || d.bleh->nHeight != d.vi.height) { vsapi->setError(out, "FlowFPS: inconsistent source and vector frame size."); vsapi->freeNode(d.finest); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); vsapi->freeNode(d.node); delete d.bleh; delete d.mvClipB; delete d.mvClipF; return; } const VSVideoInfo *supervi = vsapi->getVideoInfo(d.super); int nSuperWidth = supervi->width; if (d.bleh->nHeight != nHeightS || d.bleh->nWidth != nSuperWidth - d.nSuperHPad * 2) { vsapi->setError(out, "FlowFPS: wrong source or super clip frame size."); vsapi->freeNode(d.finest); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); vsapi->freeNode(d.node); delete d.bleh; delete d.mvClipB; delete d.mvClipF; return; } if (!((d.bleh->nWidth + d.bleh->nHPadding*2) == supervi->width && (d.bleh->nHeight + d.bleh->nVPadding*2) <= supervi->height)) { vsapi->setError(out, "FlowFPS: inconsistent clips frame size! Incomprehensible error messages are the best, right?"); vsapi->freeNode(d.finest); vsapi->freeNode(d.super); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); vsapi->freeNode(d.node); delete d.bleh; delete d.mvClipB; delete d.mvClipF; return; } if (!isConstantFormat(&d.vi) || d.vi.format->bitsPerSample > 16 || d.vi.format->sampleType != stInteger || d.vi.format->subSamplingW > 1 || d.vi.format->subSamplingH > 1 || (d.vi.format->colorFamily != cmYUV && d.vi.format->colorFamily != cmGray)) { vsapi->setError(out, "FlowFPS: input clip must be GRAY, 420, 422, 440, or 444, up to 16 bits, with constant dimensions."); vsapi->freeNode(d.super); vsapi->freeNode(d.finest); vsapi->freeNode(d.mvfw); vsapi->freeNode(d.mvbw); vsapi->freeNode(d.node); delete d.bleh; delete d.mvClipB; delete d.mvClipF; return; } if (d.vi.format->bitsPerSample > 8) d.isse = 0; d.nBlkXP = (d.bleh->nBlkX * (d.bleh->nBlkSizeX - d.bleh->nOverlapX) + d.bleh->nOverlapX < d.bleh->nWidth) ? d.bleh->nBlkX + 1 : d.bleh->nBlkX; d.nBlkYP = (d.bleh->nBlkY * (d.bleh->nBlkSizeY - d.bleh->nOverlapY) + d.bleh->nOverlapY < d.bleh->nHeight) ? d.bleh->nBlkY + 1 : d.bleh->nBlkY; d.nWidthP = d.nBlkXP * (d.bleh->nBlkSizeX - d.bleh->nOverlapX) + d.bleh->nOverlapX; d.nHeightP = d.nBlkYP * (d.bleh->nBlkSizeY - d.bleh->nOverlapY) + d.bleh->nOverlapY; d.nWidthPUV = d.nWidthP / d.bleh->xRatioUV; d.nHeightPUV = d.nHeightP / d.bleh->yRatioUV; d.nHeightUV = d.bleh->nHeight / d.bleh->yRatioUV; d.nWidthUV = d.bleh->nWidth / d.bleh->xRatioUV; d.nHPaddingUV = d.bleh->nHPadding / d.bleh->xRatioUV; d.nVPaddingUV = d.bleh->nVPadding / d.bleh->yRatioUV; d.VPitchY = (d.nWidthP + 15) & (~15); d.VPitchUV = (d.nWidthPUV + 15) & (~15); d.VXFullYB = new uint8_t [d.nHeightP * d.VPitchY]; d.VYFullYB = new uint8_t [d.nHeightP * d.VPitchY]; d.VXFullYF = new uint8_t [d.nHeightP * d.VPitchY]; d.VYFullYF = new uint8_t [d.nHeightP * d.VPitchY]; d.VXSmallYB = new uint8_t [d.nBlkXP * d.nBlkYP]; d.VYSmallYB = new uint8_t [d.nBlkXP * d.nBlkYP]; d.VXSmallYF = new uint8_t [d.nBlkXP * d.nBlkYP]; d.VYSmallYF = new uint8_t [d.nBlkXP * d.nBlkYP]; if (d.maskmode == 2) { d.VXFullYBB = new uint8_t [d.nHeightP * d.VPitchY]; d.VYFullYBB = new uint8_t [d.nHeightP * d.VPitchY]; d.VXFullYFF = new uint8_t [d.nHeightP * d.VPitchY]; d.VYFullYFF = new uint8_t [d.nHeightP * d.VPitchY]; d.VXSmallYBB = new uint8_t [d.nBlkXP * d.nBlkYP]; d.VYSmallYBB = new uint8_t [d.nBlkXP * d.nBlkYP]; d.VXSmallYFF = new uint8_t [d.nBlkXP * d.nBlkYP]; d.VYSmallYFF = new uint8_t [d.nBlkXP * d.nBlkYP]; } d.MaskSmallB = new uint8_t [d.nBlkXP * d.nBlkYP]; d.MaskFullYB = new uint8_t [d.nHeightP * d.VPitchY]; d.MaskSmallF = new uint8_t [d.nBlkXP * d.nBlkYP]; d.MaskFullYF = new uint8_t [d.nHeightP * d.VPitchY]; d.upsizer = new SimpleResize(d.nWidthP, d.nHeightP, d.nBlkXP, d.nBlkYP); if (d.vi.format->colorFamily != cmGray) { d.VXFullUVB = new uint8_t [d.nHeightPUV * d.VPitchUV]; d.VYFullUVB = new uint8_t [d.nHeightPUV * d.VPitchUV]; d.VXFullUVF = new uint8_t [d.nHeightPUV * d.VPitchUV]; d.VYFullUVF = new uint8_t [d.nHeightPUV * d.VPitchUV]; d.VXSmallUVB = new uint8_t [d.nBlkXP * d.nBlkYP]; d.VYSmallUVB = new uint8_t [d.nBlkXP * d.nBlkYP]; d.VXSmallUVF = new uint8_t [d.nBlkXP * d.nBlkYP]; d.VYSmallUVF = new uint8_t [d.nBlkXP * d.nBlkYP]; if (d.maskmode == 2) { d.VXFullUVBB = new uint8_t [d.nHeightPUV * d.VPitchUV]; d.VYFullUVBB = new uint8_t [d.nHeightPUV * d.VPitchUV]; d.VXFullUVFF = new uint8_t [d.nHeightPUV * d.VPitchUV]; d.VYFullUVFF = new uint8_t [d.nHeightPUV * d.VPitchUV]; d.VXSmallUVBB = new uint8_t [d.nBlkXP * d.nBlkYP]; d.VYSmallUVBB = new uint8_t [d.nBlkXP * d.nBlkYP]; d.VXSmallUVFF = new uint8_t [d.nBlkXP * d.nBlkYP]; d.VYSmallUVFF = new uint8_t [d.nBlkXP * d.nBlkYP]; } d.MaskFullUVB = new uint8_t [d.nHeightPUV * d.VPitchUV]; d.MaskFullUVF = new uint8_t [d.nHeightPUV * d.VPitchUV]; d.upsizerUV = new SimpleResize(d.nWidthPUV, d.nHeightPUV, d.nBlkXP, d.nBlkYP); } d.LUTVB = new int[256]; d.LUTVF = new int[256]; d.nleftLast = -1000; d.nrightLast = -1000; data = (MVFlowFPSData *)malloc(sizeof(d)); *data = d; // Can't use fmParallel because of nleftLast/nrightLast. vsapi->createFilter(in, out, "FlowFPS", mvflowfpsInit, mvflowfpsGetFrame, mvflowfpsFree, fmParallelRequests, 0, data, core); // AssumeFPS sets the _DurationNum and _DurationDen properties. VSNodeRef *node = vsapi->propGetNode(out, "clip", 0, NULL); VSMap *args = vsapi->createMap(); vsapi->propSetNode(args, "clip", node, paReplace); vsapi->freeNode(node); vsapi->propSetInt(args, "fpsnum", d.vi.fpsNum, paReplace); vsapi->propSetInt(args, "fpsden", d.vi.fpsDen, paReplace); VSPlugin *stdPlugin = vsapi->getPluginById("com.vapoursynth.std", core); VSMap *ret = vsapi->invoke(stdPlugin, "AssumeFPS", args); const char *error = vsapi->getError(ret); if (error) { vsapi->setError(out, std::string("FlowFPS: Failed to invoke AssumeFPS. Error message: ").append(error).c_str()); vsapi->freeMap(args); vsapi->freeMap(ret); return; } node = vsapi->propGetNode(ret, "clip", 0, NULL); vsapi->freeMap(ret); vsapi->clearMap(args); vsapi->propSetNode(args, "clip", node, paReplace); vsapi->freeNode(node); ret = vsapi->invoke(stdPlugin, "Cache", args); vsapi->freeMap(args); error = vsapi->getError(ret); if (error) { vsapi->setError(out, std::string("FlowFPS: Failed to invoke Cache. Error message: ").append(error).c_str()); vsapi->freeMap(ret); return; } node = vsapi->propGetNode(ret, "clip", 0, NULL); vsapi->freeMap(ret); vsapi->propSetNode(out, "clip", node, paReplace); vsapi->freeNode(node); }
static void VS_CC mvrecalculateCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { (void)userData; MVRecalculateData d; MVRecalculateData *data; int err; d.thSAD = int64ToIntS(vsapi->propGetInt(in, "thsad", 0, &err)); if (err) d.thSAD = 200; d.smooth = int64ToIntS(vsapi->propGetInt(in, "smooth", 0, &err)); if (err) d.smooth = 1; d.analysisData.nBlkSizeX = int64ToIntS(vsapi->propGetInt(in, "blksize", 0, &err)); if (err) d.analysisData.nBlkSizeX = 8; d.analysisData.nBlkSizeY = int64ToIntS(vsapi->propGetInt(in, "blksizev", 0, &err)); if (err) d.analysisData.nBlkSizeY = d.analysisData.nBlkSizeX; d.searchType = (SearchType)int64ToIntS(vsapi->propGetInt(in, "search", 0, &err)); if (err) d.searchType = SearchHex2; d.searchparam = int64ToIntS(vsapi->propGetInt(in, "searchparam", 0, &err)); if (err) d.searchparam = 2; d.chroma = !!vsapi->propGetInt(in, "chroma", 0, &err); if (err) d.chroma = 1; d.truemotion = !!vsapi->propGetInt(in, "truemotion", 0, &err); if (err) d.truemotion = 1; d.nLambda = int64ToIntS(vsapi->propGetInt(in, "lambda", 0, &err)); if (err) d.nLambda = d.truemotion ? (1000 * d.analysisData.nBlkSizeX * d.analysisData.nBlkSizeY / 64) : 0; d.pnew = int64ToIntS(vsapi->propGetInt(in, "pnew", 0, &err)); if (err) d.pnew = d.truemotion ? 50 : 0; // relative to 256 d.analysisData.nOverlapX = int64ToIntS(vsapi->propGetInt(in, "overlap", 0, &err)); d.analysisData.nOverlapY = int64ToIntS(vsapi->propGetInt(in, "overlapv", 0, &err)); if (err) d.analysisData.nOverlapY = d.analysisData.nOverlapX; d.dctmode = int64ToIntS(vsapi->propGetInt(in, "dct", 0, &err)); d.divideExtra = int64ToIntS(vsapi->propGetInt(in, "divide", 0, &err)); d.opt = !!vsapi->propGetInt(in, "opt", 0, &err); if (err) d.opt = 1; d.meander = !!vsapi->propGetInt(in, "meander", 0, &err); if (err) d.meander = 1; d.fields = !!vsapi->propGetInt(in, "fields", 0, &err); d.tff = !!vsapi->propGetInt(in, "tff", 0, &err); d.tff_exists = !err; if (d.searchType < 0 || d.searchType > 7) { vsapi->setError(out, "Recalculate: search must be between 0 and 7 (inclusive)."); return; } if (d.dctmode < 0 || d.dctmode > 10) { vsapi->setError(out, "Recalculate: dct must be between 0 and 10 (inclusive)."); return; } if (d.dctmode >= 5 && d.analysisData.nBlkSizeX == 16 && d.analysisData.nBlkSizeY == 2) { vsapi->setError(out, "Recalculate: dct 5..10 cannot work with 16x2 blocks."); return; } if (d.divideExtra < 0 || d.divideExtra > 2) { vsapi->setError(out, "Recalculate: divide must be between 0 and 2 (inclusive)."); return; } if ((d.analysisData.nBlkSizeX != 4 || d.analysisData.nBlkSizeY != 4) && (d.analysisData.nBlkSizeX != 8 || d.analysisData.nBlkSizeY != 4) && (d.analysisData.nBlkSizeX != 8 || d.analysisData.nBlkSizeY != 8) && (d.analysisData.nBlkSizeX != 16 || d.analysisData.nBlkSizeY != 2) && (d.analysisData.nBlkSizeX != 16 || d.analysisData.nBlkSizeY != 8) && (d.analysisData.nBlkSizeX != 16 || d.analysisData.nBlkSizeY != 16) && (d.analysisData.nBlkSizeX != 32 || d.analysisData.nBlkSizeY != 16) && (d.analysisData.nBlkSizeX != 32 || d.analysisData.nBlkSizeY != 32) && (d.analysisData.nBlkSizeX != 64 || d.analysisData.nBlkSizeY != 32) && (d.analysisData.nBlkSizeX != 64 || d.analysisData.nBlkSizeY != 64) && (d.analysisData.nBlkSizeX != 128 || d.analysisData.nBlkSizeY != 64) && (d.analysisData.nBlkSizeX != 128 || d.analysisData.nBlkSizeY != 128)) { vsapi->setError(out, "Recalculate: the block size must be 4x4, 8x4, 8x8, 16x2, 16x8, 16x16, 32x16, 32x32, 64x32, 64x64, 128x64, or 128x128."); return; } if (d.pnew < 0 || d.pnew > 256) { vsapi->setError(out, "Recalculate: pnew must be between 0 and 256 (inclusive)."); return; } if (d.analysisData.nOverlapX < 0 || d.analysisData.nOverlapX > d.analysisData.nBlkSizeX / 2 || d.analysisData.nOverlapY < 0 || d.analysisData.nOverlapY > d.analysisData.nBlkSizeY / 2) { vsapi->setError(out, "Recalculate: overlap must be at most half of blksize, overlapv must be at most half of blksizev, and they both need to be at least 0."); return; } if (d.divideExtra && (d.analysisData.nBlkSizeX < 8 || d.analysisData.nBlkSizeY < 8)) { vsapi->setError(out, "Recalculate: blksize and blksizev must be at least 8 when divide=True."); return; } if (d.searchType == SearchNstep) d.nSearchParam = (d.searchparam < 0) ? 0 : d.searchparam; else d.nSearchParam = (d.searchparam < 1) ? 1 : d.searchparam; d.node = vsapi->propGetNode(in, "super", 0, 0); d.vi = vsapi->getVideoInfo(d.node); if (d.analysisData.nOverlapX % (1 << d.vi->format->subSamplingW) || d.analysisData.nOverlapY % (1 << d.vi->format->subSamplingH)) { vsapi->setError(out, "Recalculate: The requested overlap is incompatible with the super clip's subsampling."); vsapi->freeNode(d.node); return; } if (d.divideExtra && (d.analysisData.nOverlapX % (2 << d.vi->format->subSamplingW) || d.analysisData.nOverlapY % (2 << d.vi->format->subSamplingH))) { // subsampling times 2 vsapi->setError(out, "Recalculate: overlap and overlapv must be multiples of 2 or 4 when divide=True, depending on the super clip's subsampling."); vsapi->freeNode(d.node); return; } #define ERROR_SIZE 1024 char errorMsg[ERROR_SIZE] = "Recalculate: failed to retrieve first frame from super clip. Error message: "; size_t errorLen = strlen(errorMsg); const VSFrameRef *evil = vsapi->getFrame(0, d.node, errorMsg + errorLen, ERROR_SIZE - errorLen); #undef ERROR_SIZE if (!evil) { vsapi->setError(out, errorMsg); vsapi->freeNode(d.node); return; } const VSMap *props = vsapi->getFramePropsRO(evil); int evil_err[6]; int nHeight = int64ToIntS(vsapi->propGetInt(props, "Super_height", 0, &evil_err[0])); d.nSuperHPad = int64ToIntS(vsapi->propGetInt(props, "Super_hpad", 0, &evil_err[1])); d.nSuperVPad = int64ToIntS(vsapi->propGetInt(props, "Super_vpad", 0, &evil_err[2])); d.nSuperPel = int64ToIntS(vsapi->propGetInt(props, "Super_pel", 0, &evil_err[3])); d.nSuperModeYUV = int64ToIntS(vsapi->propGetInt(props, "Super_modeyuv", 0, &evil_err[4])); d.nSuperLevels = int64ToIntS(vsapi->propGetInt(props, "Super_levels", 0, &evil_err[5])); vsapi->freeFrame(evil); for (int i = 0; i < 6; i++) if (evil_err[i]) { vsapi->setError(out, "Recalculate: required properties not found in first frame of super clip. Maybe clip didn't come from mv.Super? Was the first frame trimmed away?"); vsapi->freeNode(d.node); return; } if (d.vi->format->colorFamily == cmGray) d.chroma = 0; d.nModeYUV = d.chroma ? YUVPLANES : YPLANE; if ((d.nModeYUV & d.nSuperModeYUV) != d.nModeYUV) { //x vsapi->setError(out, "Recalculate: super clip does not contain needed colour data."); vsapi->freeNode(d.node); return; } d.vectors = vsapi->propGetNode(in, "vectors", 0, NULL); #define ERROR_SIZE 512 char error[ERROR_SIZE + 1] = { 0 }; const char *filter_name = "Recalculate"; adataFromVectorClip(&d.vectors_data, d.vectors, filter_name, "vectors", vsapi, error, ERROR_SIZE); #undef ERROR_SIZE if (error[0]) { vsapi->setError(out, error); vsapi->freeNode(d.node); vsapi->freeNode(d.vectors); return; } d.analysisData.yRatioUV = d.vectors_data.yRatioUV; d.analysisData.xRatioUV = d.vectors_data.xRatioUV; d.analysisData.nWidth = d.vectors_data.nWidth; d.analysisData.nHeight = d.vectors_data.nHeight; d.analysisData.nDeltaFrame = d.vectors_data.nDeltaFrame; d.analysisData.isBackward = d.vectors_data.isBackward; d.analysisData.bitsPerSample = d.vi->format->bitsPerSample; int pixelMax = (1 << d.vi->format->bitsPerSample) - 1; d.thSAD = (int)((double)d.thSAD * pixelMax / 255.0 + 0.5); d.nLambda = (int)((double)d.nLambda * pixelMax / 255.0 + 0.5); // normalize threshold to block size int referenceBlockSize = 8 * 8; d.thSAD = d.thSAD * (d.analysisData.nBlkSizeX * d.analysisData.nBlkSizeY) / referenceBlockSize; if (d.chroma) d.thSAD += d.thSAD / (d.analysisData.xRatioUV * d.analysisData.yRatioUV) * 2; d.analysisData.nMotionFlags = 0; d.analysisData.nMotionFlags |= d.opt ? MOTION_USE_SIMD : 0; d.analysisData.nMotionFlags |= d.analysisData.isBackward ? MOTION_IS_BACKWARD : 0; d.analysisData.nMotionFlags |= d.chroma ? MOTION_USE_CHROMA_MOTION : 0; if (d.opt) { d.analysisData.nCPUFlags = g_cpuinfo; } d.analysisData.nPel = d.nSuperPel; //x int nSuperWidth = d.vi->width; if (nHeight != d.analysisData.nHeight || nSuperWidth - 2 * d.nSuperHPad != d.analysisData.nWidth) { vsapi->setError(out, "Recalculate: wrong frame size."); vsapi->freeNode(d.node); vsapi->freeNode(d.vectors); return; } d.analysisData.nHPadding = d.nSuperHPad; //v2.0 //x d.analysisData.nVPadding = d.nSuperVPad; int nBlkX = (d.analysisData.nWidth - d.analysisData.nOverlapX) / (d.analysisData.nBlkSizeX - d.analysisData.nOverlapX); //x int nBlkY = (d.analysisData.nHeight - d.analysisData.nOverlapY) / (d.analysisData.nBlkSizeY - d.analysisData.nOverlapY); d.analysisData.nBlkX = nBlkX; d.analysisData.nBlkY = nBlkY; d.analysisData.nLvCount = 1; if (d.divideExtra) { //v1.8.1 memcpy(&d.analysisDataDivided, &d.analysisData, sizeof(d.analysisData)); d.analysisDataDivided.nBlkX = d.analysisData.nBlkX * 2; d.analysisDataDivided.nBlkY = d.analysisData.nBlkY * 2; d.analysisDataDivided.nBlkSizeX = d.analysisData.nBlkSizeX / 2; d.analysisDataDivided.nBlkSizeY = d.analysisData.nBlkSizeY / 2; d.analysisDataDivided.nOverlapX = d.analysisData.nOverlapX / 2; d.analysisDataDivided.nOverlapY = d.analysisData.nOverlapY / 2; d.analysisDataDivided.nLvCount = d.analysisData.nLvCount + 1; } data = (MVRecalculateData *)malloc(sizeof(d)); *data = d; vsapi->createFilter(in, out, "Recalculate", mvrecalculateInit, mvrecalculateGetFrame, mvrecalculateFree, fmParallel, 0, data, core); }
static void VS_CC textCreate(const VSMap *in, VSMap *out, void *userData, VSCore *core, const VSAPI *vsapi) { TextData d; TextData *data; int err; d.node = vsapi->propGetNode(in, "clip", 0, &err); if (err) { // Can only happen for CoreInfo. VSMap *args = vsapi->createMap(); VSPlugin *stdPlugin = vsapi->getPluginById("com.vapoursynth.std", core); VSMap *ret = vsapi->invoke(stdPlugin, "BlankClip", args); vsapi->freeMap(args); const char *error = vsapi->getError(ret); if (error) { std::string msg = "CoreInfo: No input clip was given and invoking BlankClip failed. The error message from BlankClip is:\n"; msg.append(error); vsapi->setError(out, msg.c_str()); vsapi->freeMap(ret); return; } d.node = vsapi->propGetNode(ret, "clip", 0, 0); vsapi->freeMap(ret); } d.vi = vsapi->getVideoInfo(d.node); if (isCompatFormat(d.vi)) { vsapi->setError(out, "Text: Compat formats not supported"); vsapi->freeNode(d.node); return; } if (d.vi->format && ((d.vi->format->sampleType == stInteger && d.vi->format->bitsPerSample > 16) || (d.vi->format->sampleType == stFloat && d.vi->format->bitsPerSample != 32))) { vsapi->setError(out, "Text: Only 8-16 bit integer and 32 bit float formats supported"); vsapi->freeNode(d.node); return; } d.alignment = int64ToIntS(vsapi->propGetInt(in, "alignment", 0, &err)); if (err) { d.alignment = 7; // top left } if (d.alignment < 1 || d.alignment > 9) { vsapi->setError(out, "Text: alignment must be between 1 and 9 (think numpad)"); vsapi->freeNode(d.node); return; } d.filter = (intptr_t)userData; switch (d.filter) { case FILTER_TEXT: d.text = vsapi->propGetData(in, "text", 0, 0); d.instanceName = "Text"; break; case FILTER_CLIPINFO: d.text.append("Clip info:\n"); if (d.vi->width) { d.text.append("Width: ").append(std::to_string(d.vi->width)).append(" px\n"); d.text.append("Height: ").append(std::to_string(d.vi->height)).append(" px\n"); } else { d.text.append("Width: may vary\n"); d.text.append("Height: may vary\n"); } if (d.vi->numFrames) { d.text.append("Length: ").append(std::to_string(d.vi->numFrames)).append(" frames\n"); } else { d.text.append("Length: unknown\n"); } if (d.vi->format) { const VSFormat *fi = d.vi->format; const char *family; switch (fi->colorFamily) { case cmGray: family = "Gray"; break; case cmRGB: family = "RGB"; break; case cmYUV: family = "YUV"; break; case cmYCoCg: family = "YCoCg"; break; case cmCompat: family = "Compat"; break; default: family = "impossible"; break; } const char *type; switch (fi->sampleType) { case stInteger: type = "integer"; break; case stFloat: type = "float"; break; default: type = "impossible"; break; } d.text.append("Format name: ").append(fi->name).append("\n"); d.text.append("Format id: ").append(std::to_string(fi->id)).append("\n"); d.text.append("Color family: ").append(family).append("\n"); d.text.append("Sample type: ").append(type).append("\n"); d.text.append("Bits per sample: ").append(std::to_string(fi->bitsPerSample)).append("\n"); d.text.append("Bytes per sample: ").append(std::to_string(fi->bytesPerSample)).append("\n"); d.text.append("Horizontal subsampling: ").append(std::to_string(fi->subSamplingW)).append("\n"); d.text.append("Vertical subsampling: ").append(std::to_string(fi->subSamplingH)).append("\n"); d.text.append("Number of planes: ").append(std::to_string(fi->numPlanes)).append("\n"); } else { d.text.append("Format: may vary").append("\n"); } d.text.append("FpsNum: ").append(std::to_string(d.vi->fpsNum)).append("\n"); d.text.append("FpsDen: ").append(std::to_string(d.vi->fpsDen)); d.instanceName = "ClipInfo"; break; case FILTER_COREINFO: { d.instanceName = "CoreInfo"; break; } case FILTER_FRAMENUM: d.instanceName = "FrameNum"; break; case FILTER_FRAMEPROPS: int numProps = vsapi->propNumElements(in, "props"); for (int i = 0; i < numProps; i++) { d.props.push_back(vsapi->propGetData(in, "props", i, 0)); } d.instanceName = "FrameProps"; break; } data = new TextData(); *data = d; vsapi->createFilter(in, out, d.instanceName.c_str(), textInit, textGetFrame, textFree, fmParallel, 0, data, core); }