FFPP::FFPP(avxsynth::PClip AChild, const char *PP, avxsynth::IScriptEnvironment *Env) : avxsynth::GenericVideoFilter(AChild) { if (!strcmp(PP, "")) Env->ThrowError("FFPP: PP argument is empty"); PPContext = NULL; PPMode = NULL; SWSTo422P = NULL; SWSFrom422P = NULL; memset(&InputPicture, 0, sizeof(InputPicture)); memset(&OutputPicture, 0, sizeof(OutputPicture)); // due to a parsing bug in libpostproc it can read beyond the end of a string // adding a ',' prevents the bug from manifesting // libav head 2011-08-26 std::string s = PP; s.append(","); PPMode = pp_get_mode_by_name_and_quality(s.c_str(), PP_QUALITY_MAX); if (!PPMode) Env->ThrowError("FFPP: Invalid postprocesing settings"); int64_t Flags = AvisynthToSWSCPUFlags(Env->GetCPUFlags()); if (vi.IsYV12()) { Flags |= PP_FORMAT_420; } else if (vi.IsYUY2()) { Flags |= PP_FORMAT_422; SWSTo422P = FFGetSwsContext(vi.width, vi.height, PIX_FMT_YUYV422, vi.width, vi.height, PIX_FMT_YUV422P, Flags | SWS_BICUBIC, FFGetSwsAssumedColorSpace(vi.width, vi.height)); SWSFrom422P = FFGetSwsContext(vi.width, vi.height, PIX_FMT_YUV422P, vi.width, vi.height, PIX_FMT_YUYV422, Flags | SWS_BICUBIC, FFGetSwsAssumedColorSpace(vi.width, vi.height)); avpicture_alloc(&InputPicture, PIX_FMT_YUV422P, vi.width, vi.height); avpicture_alloc(&OutputPicture, PIX_FMT_YUV422P, vi.width, vi.height); } else { Env->ThrowError("FFPP: Only YV12 and YUY2 video supported"); } /* Flags as passed to pp_get_context will potentially no longer be the same int value, * but it will still have the correct binary representation (which is the important part). */ PPContext = pp_get_context(vi.width, vi.height, (int)Flags); if (!PPContext) Env->ThrowError("FFPP: Failed to create context"); }
SWScale::SWScale(avxsynth::PClip Child, int ResizeToWidth, int ResizeToHeight, const char *ResizerName, const char *ConvertToFormatName, avxsynth::IScriptEnvironment *Env) : avxsynth::GenericVideoFilter(Child) { Context = NULL; OrigWidth = vi.width; OrigHeight = vi.height; FlipOutput = vi.IsYUV(); PixelFormat ConvertFromFormat = PIX_FMT_NONE; if (vi.IsYV12()) ConvertFromFormat = PIX_FMT_YUV420P; if (vi.IsYUY2()) ConvertFromFormat = PIX_FMT_YUYV422; if (vi.IsRGB24()) ConvertFromFormat = PIX_FMT_BGR24; if (vi.IsRGB32()) ConvertFromFormat = PIX_FMT_RGB32; if (ResizeToHeight <= 0) ResizeToHeight = OrigHeight; else vi.height = ResizeToHeight; if (ResizeToWidth <= 0) ResizeToWidth = OrigWidth; else vi.width = ResizeToWidth; PixelFormat ConvertToFormat = CSNameToPIXFMT(ConvertToFormatName, ConvertFromFormat); if (ConvertToFormat == PIX_FMT_NONE) Env->ThrowError("SWScale: Invalid colorspace specified (%s)", ConvertToFormatName); switch (ConvertToFormat) { case PIX_FMT_YUV420P: vi.pixel_type = avxsynth::VideoInfo::CS_I420; break; case PIX_FMT_YUYV422: vi.pixel_type = avxsynth::VideoInfo::CS_YUY2; break; case PIX_FMT_BGR24: vi.pixel_type = avxsynth::VideoInfo::CS_BGR24; break; case PIX_FMT_RGB32: vi.pixel_type = avxsynth::VideoInfo::CS_BGR32; break; case PIX_FMT_NONE: case PIX_FMT_RGB24: case PIX_FMT_YUV422P: case PIX_FMT_YUV444P: case PIX_FMT_YUV410P: case PIX_FMT_YUV411P: case PIX_FMT_GRAY8: case PIX_FMT_MONOWHITE: case PIX_FMT_MONOBLACK: case PIX_FMT_PAL8: case PIX_FMT_YUVJ420P: case PIX_FMT_YUVJ422P: case PIX_FMT_YUVJ444P: case PIX_FMT_XVMC_MPEG2_MC: case PIX_FMT_XVMC_MPEG2_IDCT: case PIX_FMT_UYVY422: case PIX_FMT_UYYVYY411: case PIX_FMT_BGR8: case PIX_FMT_BGR4: case PIX_FMT_BGR4_BYTE: case PIX_FMT_RGB8: case PIX_FMT_RGB4: case PIX_FMT_RGB4_BYTE: case PIX_FMT_NV12: case PIX_FMT_NV21: case PIX_FMT_ARGB: case PIX_FMT_RGBA: case PIX_FMT_ABGR: case PIX_FMT_GRAY16BE: case PIX_FMT_GRAY16LE: case PIX_FMT_YUV440P: case PIX_FMT_YUVJ440P: case PIX_FMT_YUVA420P: case PIX_FMT_VDPAU_H264: case PIX_FMT_VDPAU_MPEG1: case PIX_FMT_VDPAU_MPEG2: case PIX_FMT_VDPAU_WMV3: case PIX_FMT_VDPAU_VC1: case PIX_FMT_RGB48BE: case PIX_FMT_RGB48LE: case PIX_FMT_RGB565BE: case PIX_FMT_RGB565LE: case PIX_FMT_RGB555BE: case PIX_FMT_RGB555LE: case PIX_FMT_BGR565BE: case PIX_FMT_BGR565LE: case PIX_FMT_BGR555BE: case PIX_FMT_BGR555LE: case PIX_FMT_VAAPI_MOCO: case PIX_FMT_VAAPI_IDCT: case PIX_FMT_VAAPI_VLD: case PIX_FMT_YUV420P16LE: case PIX_FMT_YUV420P16BE: case PIX_FMT_YUV422P16LE: case PIX_FMT_YUV422P16BE: case PIX_FMT_YUV444P16LE: case PIX_FMT_YUV444P16BE: case PIX_FMT_VDPAU_MPEG4: case PIX_FMT_DXVA2_VLD: case PIX_FMT_RGB444LE: case PIX_FMT_RGB444BE: case PIX_FMT_BGR444LE: case PIX_FMT_BGR444BE: case PIX_FMT_Y400A: case PIX_FMT_BGR48BE: case PIX_FMT_BGR48LE: case PIX_FMT_YUV420P9BE: case PIX_FMT_YUV420P9LE: case PIX_FMT_YUV420P10BE: case PIX_FMT_YUV420P10LE: case PIX_FMT_YUV422P10BE: case PIX_FMT_YUV422P10LE: case PIX_FMT_YUV444P9BE: case PIX_FMT_YUV444P9LE: case PIX_FMT_YUV444P10BE: case PIX_FMT_YUV444P10LE: case PIX_FMT_NB: default: break; } FlipOutput ^= vi.IsYUV(); int Resizer = ResizerNameToSWSResizer(ResizerName); if (Resizer == 0) Env->ThrowError("SWScale: Invalid resizer specified (%s)", ResizerName); if (ConvertToFormat == PIX_FMT_YUV420P && vi.height & 1) Env->ThrowError("SWScale: mod 2 output height required"); if ((ConvertToFormat == PIX_FMT_YUV420P || ConvertToFormat == PIX_FMT_YUYV422) && vi.width & 1) Env->ThrowError("SWScale: mod 2 output width required"); Context = FFGetSwsContext(OrigWidth, OrigHeight, ConvertFromFormat, vi.width, vi.height, ConvertToFormat, AvisynthToSWSCPUFlags(Env->GetCPUFlags()) | Resizer, FFGetSwsAssumedColorSpace(OrigWidth, OrigHeight)); if (Context == NULL) Env->ThrowError("SWScale: Context creation failed"); }