Пример #1
0
AVSValue ExpFunctionCall::Call(IScriptEnvironment* env) 
{
  AVSValue result;
  IScriptEnvironment2 *env2 = static_cast<IScriptEnvironment2*>(env);

  std::vector<AVSValue> args(arg_expr_count+1, AVSValue());
  for (int a=0; a<arg_expr_count; ++a)
    args[a+1] = arg_exprs[a]->Evaluate(env);

  // first try without implicit "last"
  try
  { // Invoke can always throw by calling a constructor of a filter that throws
    if (env2->Invoke(&result, name, AVSValue(args.data()+1, arg_expr_count), arg_expr_names+1))
      return result;
  } catch(const IScriptEnvironment::NotFound&){}

  // if that fails, try with implicit "last" (except when OOP notation was used)
  if (!oop_notation) 
  {
    try
    {
      if (env2->GetVar("last", args.data()) && env2->Invoke(&result, name, AVSValue(args.data(), arg_expr_count+1), arg_expr_names))
        return result;
    } catch(const IScriptEnvironment::NotFound&){}
  }

  env->ThrowError(env->FunctionExists(name) ?
    "Script error: Invalid arguments to function '%s'." :
    "Script error: There is no function named '%s'.", name);

  assert(0);  // we should never get here
  return 0;
}
Пример #2
0
AVSValue __cdecl MArchitecture :: getArchitecture(AVSValue args, void* user_data, IScriptEnvironment* env) {
	// Return the value
#	if defined(_M_X64) || defined(__amd64__)
	return AVSValue("x64");
#	else
	return AVSValue("x86");
#	endif
}
Пример #3
0
// Get the value of a frame.
AVSValue ConditionalReader::GetFrameValue(int framenumber) {
    framenumber = std::max(std::min(framenumber, vi.num_frames-1), 0);

    switch (mode) {
    case MODE_INT:
        return AVSValue(intVal[framenumber]);
        break;
    case MODE_FLOAT:
        return AVSValue(floatVal[framenumber]);
        break;
    case MODE_BOOL:
        return AVSValue(boolVal[framenumber]);
        break;
    }
    return AVSValue(0);
}
Пример #4
0
                // utility functions
                // Opens AVS file and sets some member variables.
                void open(const char* avsfile) {
                    DBGLOG("avsutil::impl::cavs_type::open(\"" << avsfile << "\")");

                    try {
                        // pack the filename as the argument of AviSynth filter
                        AVSValue filename = avsfile;
                        AVSValue args = AVSValue(&filename, 1);

                        // load AviSynth script
                        AVSValue imported = mv_se->Invoke("Import", args, 0);

                        // get the clip and video informations
                        mv_clip = imported.AsClip();

                        // store filename
                        mv_filepath = avsfile;
                    }
                    catch (AvisynthError& avserr) {
                        mv_is_fine = false;
                        mv_errmsg = avserr.msg;
                    }
                    catch (std::exception& ex) {
                        mv_is_fine = false;
                        mv_errmsg = ex.what();
                    }
                }
Пример #5
0
AVSValue ExpVariableReference::Evaluate(IScriptEnvironment* env) 
{
  AVSValue result;
  IScriptEnvironment2 *env2 = static_cast<IScriptEnvironment2*>(env);

  // first look for a genuine variable
  // Don't add a cache to this one, it's a Var
  if (env2->GetVar(name, &result)) {
    return result;
  }
  else {
    // Swap order to match ::Call below -- Gavino Jan 2010

    // next look for an argless function
    if (!env2->Invoke(&result, name, AVSValue(0,0)))
    {
      // finally look for a single-arg function taking implicit "last"
      AVSValue last;
      if (!env2->GetVar("last", &last) || !env2->Invoke(&result, name, last))
      {
        env->ThrowError("I don't know what '%s' means.", name);
        return 0;
      }
    }
  }
  // Add cache to Bracketless call of argless function
  if (result.IsClip()) { // Tritical Jan 2006
    return env->Invoke("Cache", result);
  }
  return result;
}
Пример #6
0
		AVSValue __cdecl DestroyPipe(AVSValue args, void* user_data, IScriptEnvironment* env) {
			HANDLE hPipe = (HANDLE)args[0].AsInt();
			DisconnectNamedPipe(hPipe);
			FlushFileBuffers(hPipe);
			CloseHandle(hPipe);
			return AVSValue();
		}
Пример #7
0
AVSValue __cdecl ImageReader::Create(AVSValue args, void*, IScriptEnvironment* env) 
{
  const char * path = args[0].AsString("c:\\%06d.ebmp");

  ImageReader *IR = new ImageReader(path, args[1].AsInt(0), args[2].AsInt(1000), args[3].AsFloat(24.0f), 
                                    args[4].AsBool(false), args[5].AsBool(false), args[6].AsString("rgb24"), env);
  // If we are returning a stream of 2 or more copies of the same image
  // then use FreezeFrame and the Cache to minimise any reloading.
  if (IR->framecopies > 1) {
	AVSValue cache_args[1] = { IR };
    AVSValue cache = env->Invoke("Cache", AVSValue(cache_args, 1));
	AVSValue ff_args[4] = { cache, 0, IR->framecopies-1, 0 };
    return env->Invoke("FreezeFrame", AVSValue(ff_args, 4)).AsClip();
  }

  return IR;

}
Пример #8
0
AVSValue LoadPlugin(AVSValue args, void* user_data, IScriptEnvironment* env) {
  bool quiet = (user_data != 0);
  args = args[0];
  const char* result=0;
  for (int i=0; i<args.ArraySize(); ++i) {
    HMODULE plugin;
    const char* plugin_name = args[i].AsString();
    if (MyLoadLibrary(plugin_name, &plugin, quiet, env)) {
      typedef const char* (__stdcall *AvisynthPluginInitFunc)(IScriptEnvironment* env);
      AvisynthPluginInitFunc AvisynthPluginInit = (AvisynthPluginInitFunc)dlsym(plugin, "AvisynthPluginInit2");
      if (!AvisynthPluginInit) {
        AvisynthPluginInit = (AvisynthPluginInitFunc)dlsym(plugin, "_AvisynthPluginInit2@4");
/*
        if (!AvisynthPluginInit) {  // Attempt C-plugin
          AvisynthPluginInit = (AvisynthPluginInitFunc)dlsym(plugin, "avisynth_c_plugin_init");
          if (AvisynthPluginInit) {
            dlclose(plugin);
            return env->Invoke("LoadCPlugin", args);
          }
        }
*/
        if (!AvisynthPluginInit) {  // Older version
          dlclose(plugin);
          if (quiet) {
            // remove the last handle from the list
            HMODULE* loaded_plugins = (HMODULE*)env->GetVar("$Plugins$").AsString();
            int j=0;
            while (loaded_plugins[j+1]) j++;
            loaded_plugins[j] = 0;
          } else {
            env->ThrowError("Plugin %s is not an AviSynth 2.5 plugin.",plugin_name);
          }
        } else {
          result = AvisynthPluginInit(env);
        }
      } else {
        result = AvisynthPluginInit(env);
      }
    }
  }
  if (loadplugin_prefix) free((void*)loadplugin_prefix);  // Tritical May 2005
  loadplugin_prefix = 0;
  return result ? AVSValue(result) : AVSValue();
}
Пример #9
0
AVSValue ConditionalReader::ConvertType(const char* content, int line, IScriptEnvironment* env)
{
    if (mode == MODE_UNKNOWN)
        ThrowLine("ConditionalReader: Type has not been defined. Line %d", line, env);

    int fields;
    switch (mode) {
    case MODE_INT:
        int ival;
        fields = sscanf(content, "%d", &ival);
        if (!fields)
            ThrowLine("ConditionalReader: Could not find an expected integer at line %d!", line, env);

        return AVSValue(ival);

    case MODE_FLOAT:
        float fval;
        fields = sscanf(content, "%e", &fval);
        if (!fields)
            ThrowLine("ConditionalReader: Could not find an expected float at line %d!", line, env);
        return AVSValue(fval);

    case MODE_BOOL:
        char bval [8];
        fields = sscanf(content, "%7s", bval);
        if (!strcasecmp((const char*)bval, "true")) {
            return AVSValue(true);
        }
        else if (!strcasecmp((const char*)bval, "t")) {
            return AVSValue(true);
        }
        else if (!strcasecmp((const char*)bval, "yes")) {
            return AVSValue(true);
        }
        else if (!strcasecmp((const char*)bval, "false")) {
            return AVSValue(false);
        }
        else if (!strcasecmp((const char*)bval, "f")) {
            return AVSValue(false);
        }
        else if (!strcasecmp((const char*)bval, "no")) {
            return AVSValue(false);
        }
        ThrowLine("ConditionalReader: Boolean value was not true or false in line %d", line, env);
    }
    return AVSValue(0);
}
Пример #10
0
/// @brief Constructor
/// @param _filename
///
AvisynthAudioProvider::AvisynthAudioProvider(wxString filename)
: filename(filename)
{
	try {
		AVSValue script;
		wxMutexLocker lock(avs_wrapper.GetMutex());

		wxFileName fn(filename);
		if (!fn.FileExists())
			throw agi::FileNotFoundError(STD_STR(filename));

		IScriptEnvironment *env = avs_wrapper.GetEnv();

		// Include
		if (filename.EndsWith(".avs")) {
			char *fname = env->SaveString(fn.GetShortPath().mb_str(csConvLocal));
			script = env->Invoke("Import", fname);
		}

		// Use DirectShowSource
		else {
			const char * argnames[3] = { 0, "video", "audio" };
			AVSValue args[3] = { env->SaveString(fn.GetShortPath().mb_str(csConvLocal)), false, true };

			// Load DirectShowSource.dll from app dir if it exists
			wxFileName dsspath(StandardPaths::DecodePath("?data/DirectShowSource.dll"));
			if (dsspath.FileExists()) {
				env->Invoke("LoadPlugin",env->SaveString(dsspath.GetShortPath().mb_str(csConvLocal)));
			}

			// Load audio with DSS if it exists
			if (env->FunctionExists("DirectShowSource")) {
				script = env->Invoke("DirectShowSource", AVSValue(args,3),argnames);
			}
			// Otherwise fail
			else {
				throw agi::AudioProviderOpenError("No suitable audio source filter found. Try placing DirectShowSource.dll in the Aegisub application directory.", 0);
			}
		}

		LoadFromClip(script);
	}
	catch (AvisynthError &err) {
		std::string errmsg(err.msg);
		if (errmsg.find("filter graph manager won't talk to me") != errmsg.npos)
			throw agi::AudioDataNotFoundError("Avisynth error: " + errmsg, 0);
		else
			throw agi::AudioProviderOpenError("Avisynth error: " + errmsg, 0);
	}
}
Пример #11
0
AVSValueStruct CAsifScriptEnvironment::Invoke(const char* command) {
    AVSValueStruct ir;
    ir.arraysize = 0;
    ir.errors = false;
    ir.returnvalue.ival = 0;
    ir.type = 0;

    std::vector<const char *> tempargnames(args.size());

    for (size_t i = 0; i < tempargnames.size(); i++)
        tempargnames[i] = (argnames[i].c_str() == "" ? nullptr : argnames[i].c_str());

    try {
        AVSValue ret = envse->Invoke(command, AVSValue(args.data(), args.size()), tempargnames.data());

        if (ret.IsClip()) {
            ir.returnvalue.cval = new CAsifClip(ret.AsClip(), envse);
            ir.type = 1;
        } else if (ret.IsBool()) {
            ir.returnvalue.bval = ret.AsBool();
            ir.type = 2;
        } else if (ret.IsInt()) {
            ir.returnvalue.ival = ret.AsInt();
            ir.type = 3;
        } else if (ret.IsFloat()) {
            ir.returnvalue.fval = (float)ret.AsFloat();
            ir.type = 4;
        } else if (ret.IsString()) {
            ir.returnvalue.sval = ret.AsString();
            ir.type = 5;
        } else if (ret.IsArray()) {
            //	ir.returnvalue.aval=ret.
            ir.arraysize = ret.ArraySize();
            ir.type = 6;
        }

    } catch (AvisynthError &e) {
        ir.type = 100;
        ir.returnvalue.sval = e.msg;
        ir.errors = 1;
    }

    ResetArgs();

    return ir;
}
Пример #12
0
AVSValue __cdecl
create_temmod(AVSValue args, void* user_data, IScriptEnvironment* env)
{
    double thy = args[1].AsFloat(8.0);
    if (thy < 0) {
        env->ThrowError("TEMmod: threshY must be higher than zero.");
    }

    int chroma = args[5].AsInt(1);
    if (chroma < 0 || chroma > 2) {
        env->ThrowError("TEMmod: chroma must be set to 0, 1, or 2.");
    }
    double thc = args[2].AsFloat(8.0);
    if (chroma == 1 && thc < 0) {
        env->ThrowError("TEMmod: threshC must be higher than zero.");
    }

    int type = args[3].AsInt(4);
    if (type < 1 && type > 5) {
        env->ThrowError("TEMmod: type must be between 1 and 5.");
    }

    int link = args[4].AsInt(1);
    if (chroma > 0 && link < 0 || link > 2) {
        env->ThrowError("TEMmod: link must be set to 0, 1 or 2.");
    }

    bool invert = args[6].AsBool(false);

    PClip clip = args[0].AsClip();
    if (args[7].AsBool(false)) {
        try {
            AVSValue blur[2] = {clip, 1.0};
            clip = env->Invoke("Blur", AVSValue(blur, 2)).AsClip();
        } catch (IScriptEnvironment::NotFound) {
            env->ThrowError("TEMmod: failed to invoke Blur().");
        }
    }

    float scale = static_cast<float>(args[8].AsFloat(0.0));
    if (scale < 0.0f) {
        env->ThrowError("TEMmod: scale must be higher than zero.");
    }

    return new TEMmod(clip, thy, thc, type, chroma, link, invert, scale, env);
}
Пример #13
0
//================================ TavisynthAudio ================================
TavisynthAudio::TavisynthAudio(const CMediaType &mt,TsampleFormat &fmt,IffdshowBase *deci,const char *scriptName):
    script(NULL),
    env(NULL),clip(NULL)
{
    if (ok) {
        env=CreateScriptEnvironment(AVISYNTH_INTERFACE_VERSION);
        if (env) {
            Textradata extradata(mt);
            script=::Tavisynth::getScriptAudio(1+extradata.size+1,(const char*)extradata.data,extradata.size);
            if (!script) {
                ok=false;
                return;
            }
            AVSValue eval_args[]= {script,scriptName};
            try {
                AVSValue val=env->Invoke("Eval",AVSValue(eval_args,2));
                if (val.IsClip()) {
                    clip=new PClip(val,env);
                    //*clip=val.AsClip();
                    vi=&(*clip)->GetVideoInfo();
                    switch (vi->SampleType()) {
                        case SAMPLE_INT16:
                            fmt.sf=TsampleFormat::SF_PCM16;
                            break;
                        case SAMPLE_INT24:
                            fmt.sf=TsampleFormat::SF_PCM24;
                            break;
                        case SAMPLE_INT32:
                            fmt.sf=TsampleFormat::SF_PCM32;
                            break;
                        case SAMPLE_FLOAT:
                            fmt.sf=TsampleFormat::SF_FLOAT32;
                            break;
                    }
                    ok=true;
                }
            } catch (AvisynthError &err) {
                if (deci) {
                    deci->dbgError(text<char_t>(err.msg));
                }
                ok=false;
            }
        }
    }
}
Пример #14
0
static AVSValue __cdecl
create_iscombed(AVSValue args, void*, ise_t* env)
{
    enum { CLIP, CTHRESH, MTHRESH, MI, BLOCKX, BLOCKY, METRIC, OPT };
    CombMask* cm = nullptr;

    try {
        AVSValue cf = env->GetVar("current_frame");
        validate(!cf.IsInt(),
                 "This filter can only be used within ConditionalFilter.");
        int n = cf.AsInt();

        PClip clip = args[CLIP].AsClip();
        int metric = args[METRIC].AsInt(0);
        int cth = args[CTHRESH].AsInt(metric == 0 ? 6 : 10);
        int mth = args[MTHRESH].AsInt(9);
        int mi = args[MI].AsInt(80);
        int blockx = args[BLOCKX].AsInt(16);
        int blocky = args[BLOCKY].AsInt(16);
        bool is_avsplus = env->FunctionExists("SetFilterMTMode");
        arch_t arch = get_arch(args[OPT].AsInt(-1), is_avsplus);

        validate(mi < 0 || mi > 128, "MI must be between 0 and 128.");
        validate(blockx != 8 && blockx != 16 && blockx != 32,
                 "blockx must be set to 8, 16 or 32.");
        validate(blocky != 8 && blocky != 16 && blocky != 32,
                 "blocky must be set to 8, 16 or 32.");

        cm = new CombMask(clip, cth, mth, false, arch, false, metric, is_avsplus);

        bool is_combed = (get_check_combed(arch))(
            cm->GetFrame(n, env), mi, blockx, blocky, is_avsplus, env);

        delete cm;

        return AVSValue(is_combed);

    } catch (std::runtime_error& e) {
        if (cm) delete cm;
        env->ThrowError("IsCombed: %s", e.what());
    }
    return 0;
}
Пример #15
0
/// @brief Read from environment
/// @param _clip
///
void AvisynthAudioProvider::LoadFromClip(AVSValue _clip) {
	AVSValue script;

	// Check if it has audio
	VideoInfo vi = _clip.AsClip()->GetVideoInfo();
	if (!vi.HasAudio()) throw agi::AudioDataNotFoundError("No audio found.", 0);

	IScriptEnvironment *env = avs_wrapper.GetEnv();

	// Convert to one channel
	char buffer[1024];
	strcpy(buffer,lagi_wxString(OPT_GET("Audio/Downmixer")->GetString()).mb_str(csConvLocal));
	script = env->Invoke(buffer, _clip);

	// Convert to 16 bits per sample
	script = env->Invoke("ConvertAudioTo16bit", script);
	vi = script.AsClip()->GetVideoInfo();

	// Convert sample rate
	int setsample = OPT_GET("Provider/Audio/AVS/Sample Rate")->GetInt();
	if (vi.SamplesPerSecond() < 32000) setsample = 44100;
	if (setsample != 0) {
		AVSValue args[2] = { script, setsample };
		script = env->Invoke("ResampleAudio", AVSValue(args,2));
	}

	// Set clip
	PClip tempclip = script.AsClip();
	vi = tempclip->GetVideoInfo();

	// Read properties
	channels = vi.AudioChannels();
	num_samples = vi.num_audio_samples;
	sample_rate = vi.SamplesPerSecond();
	bytes_per_sample = vi.BytesPerAudioSample();
	float_samples = false;

	clip = tempclip;
}
Пример #16
0
void CAsifScriptEnvironment::SetVar(const char* varname, AVSValueStruct varvalue) {
    AVSValue var;

    if (varvalue.type == 0) {
        var = AVSValue();
    } else if (varvalue.type == 1) {
        var = AVSValue(varvalue.returnvalue.cval->video);
    } else if (varvalue.type == 2) {
        var = AVSValue(varvalue.returnvalue.bval);
    } else if (varvalue.type == 3) {
        var = AVSValue(varvalue.returnvalue.ival);
    } else if (varvalue.type == 4) {
        var = AVSValue(varvalue.returnvalue.fval);
    } else if (varvalue.type == 5) {
        var = AVSValue(varvalue.returnvalue.sval);
    } else if (varvalue.type == 6) {
        var = AVSValue(0);
    }

    envse->SetGlobalVar(varname, var);
}
Пример #17
0
bool Tavisynth::getVersion(const Tconfig *config,ffstring &vers,ffstring &license)
{
    bool res=false;
    Tavisynth *dl=new Tavisynth;
    if (dl->ok) {
        IScriptEnvironment *env=dl->CreateScriptEnvironment(AVISYNTH_INTERFACE_VERSION);
        try {
            char script[]="VersionString";
            AVSValue eval_args[]= {script,"ffdshow_version_avisynth_script"};
            AVSValue val=env->Invoke("Eval",AVSValue(eval_args,2));
            vers=val.AsString();
            license=_l("(C) 2000-2003 Ben Rudiak-Gold and all subsequent developers");
            res=true;
        } catch (AvisynthError &err) {
            vers=text<char_t>(err.msg);
        }
        delete env;
    } else {
        vers=_l("not found");
        license.clear();
    }
    delete dl;
    return res;
}
Пример #18
0
ConditionalReader::ConditionalReader(PClip _child, const char* filename, const char _varname[], bool _show, IScriptEnvironment* env) :
    GenericVideoFilter(_child), show(_show), variableName(_varname)
{
    FILE * f;
    char *line;
    int lines;

    if ((f = fopen(filename, "rb")) == NULL)
        env->ThrowError("ConditionalReader: Could not open file '%s'.", filename);

    lines = 0;
    mode = MODE_UNKNOWN;

    while ((line = readline(f)) != NULL) {
        char *ptr;
        int fields;

        lines++;

        /* We skip spaces */
        ptr = skipspaces(line);

        /* Skip coment lines or empty lines */
        if(iscomment(ptr) || *ptr == '\0') {
            free(line);
            continue;
        }

        if (mode == MODE_UNKNOWN) {
            // We have not recieved a mode - We expect type.
            char keyword [1024];
            char type [1024];
            fields = sscanf(ptr,"%1023s %1023s", keyword, type);
            if (fields) {
                if (!strcasecmp((const char*)keyword, "type")) {
                    if (!strcasecmp((const char*)type, "int")) {
                        mode = MODE_INT;
                        intVal = new int[vi.num_frames];
                    } else if (!strcasecmp((const char*)type, "float")) {
                        mode = MODE_FLOAT;
                        floatVal = new float[vi.num_frames];
                    } else if (!strcasecmp((const char*)type, "bool")) {
                        mode = MODE_BOOL;
                        boolVal = new bool[vi.num_frames];
                    } else {
                        ThrowLine("ConditionalReader: Unknown 'type' specified in line %d", lines, env);
                    }// end if compare type
                }// end if compare keyword
            }// end if fields

        } else { // We have a defined mode and allocated the values.

            char keyword [1024];
            char type [1024];
            fields = sscanf(ptr,"%1023s %1023s", keyword, type);

            if (!strcasecmp((const char*)keyword, "default")) {
                AVSValue def = ConvertType((const char*)type, lines, env);
                SetRange(0, vi.num_frames-1, def);
                free(line);
                continue;
            } // end if "default"

            if (ptr[0] == 'R' || ptr[0] == 'r') {  // Range
                ptr++;
                ptr = skipspaces(ptr);
                int start;
                int stop;
                char value [64];
                fields = sscanf(ptr, "%d %d %63s", &start, &stop, value);

                if (fields != 3)
                    ThrowLine("ConditionalReader: Could not read range in line %d", lines, env);
                if (start > stop)
                    ThrowLine("ConditionalReader: The start frame is after the end frame in line %d", lines, env);

                AVSValue set = ConvertType((const char*)value, lines, env);
                SetRange(start, stop, set);
            } else if (ptr[0] == 'I' || ptr[0] == 'i') {  // Interpolate
                if (mode == MODE_BOOL)
                    ThrowLine("ConditionalReader: Cannot interpolate booleans in line %d", lines, env);

                ptr++;
                ptr = skipspaces(ptr);
                int start;
                int stop;
                char start_value [64];
                char stop_value [64];
                fields = sscanf(ptr, "%d %d %63s %63s", &start, &stop, start_value, stop_value);

                if (fields != 4)
                    ThrowLine("ConditionalReader: Could not read interpolation range in line %d", lines, env);
                if (start > stop)
                    ThrowLine("ConditionalReader: The start frame is after the end frame in line %d", lines, env);

                AVSValue set_start = ConvertType((const char*)start_value, lines, env);
                AVSValue set_stop = ConvertType((const char*)stop_value, lines, env);

                int range = stop-start;
                double diff = set_stop.AsFloat() - set_start.AsFloat();
                for (int i = 0; i<=range; i++) {
                    double where = (double)(i)/(double)range;
                    double n = where * diff + set_start.AsFloat();
                    SetFrame(i+start, (mode == MODE_FLOAT)
                             ? AVSValue(n)
                             : AVSValue((int) n));
                }
            } else {
                char value [64];
                int cframe;
                fields = sscanf(ptr, "%d %63s", &cframe, value);
                if (fields == 2) {
                    AVSValue set = ConvertType((const char*)value, lines, env);
                    SetFrame(cframe, set);
                } else {
                    AVXLOG_INFO("ConditionalReader: Ignored line %d.\n", lines);
                }
            }

        } // End we have defined type
        free(line);
    }// end while still some file left to read.

    /* We are done with the file */
    fclose(f);

    if (mode == MODE_UNKNOWN)
        env->ThrowError("ConditionalReader: Mode was not defined!");

}
AVSValue __cdecl CreateRadiusForm(AVSValue args, void *user_data, IScriptEnvironment *env)
{
   UNUSED(user_data); UNUSED(env);
   return AVSValue((new String(rf(-args[0].AsInt(1), -args[0].AsInt(1), args[0].AsInt(1), args[0].AsInt(1), args[1].AsBool(true))))->c_str()); /* grrrrr -> memory leak */
}
AVSValue __cdecl CreateStringConverter(AVSValue args, void *user_data, IScriptEnvironment *env)
{
   UNUSED(user_data); UNUSED(env);
   return AVSValue((new String(sc(args[0].AsString("x"))))->c_str()); /* grrrrr -> memory leak */
}
AVSValue __cdecl CreateGenericForm(AVSValue args, void *user_data, IScriptEnvironment *env)
{
   UNUSED(user_data); UNUSED(env);
   return AVSValue((new String(bf(args[0].AsInt(-1), args[1].AsInt(-1), args[2].AsInt(1), args[3].AsInt(1), args[4].AsBool(true))))->c_str()); /* grrrrr -> memory leak */
}
Пример #22
0
MVFlowInter::MVFlowInter(PClip _child, PClip super, PClip _mvbw, PClip _mvfw,  int _time256, double _ml,
                           bool _blend, int nSCD1, int nSCD2, bool _isse, bool _planar, IScriptEnvironment* env) :
GenericVideoFilter(_child),
MVFilter(_mvfw, "MFlowInter", env),
mvClipB(_mvbw, nSCD1, nSCD2, env),
mvClipF(_mvfw, nSCD1, nSCD2, env)
{
   time256 = _time256;
   ml = _ml;
   isse = _isse;
   planar = planar;
   blend = _blend;

	if (!mvClipB.IsBackward())
			env->ThrowError("MFlowInter: wrong backward vectors");
	if (mvClipF.IsBackward())
			env->ThrowError("MFlowInter: wrong forward vectors");

   CheckSimilarity(mvClipB, "mvbw", env);
   CheckSimilarity(mvClipF, "mvfw", env);

        SuperParams64Bits params;
        memcpy(&params, &super->GetVideoInfo().num_audio_samples, 8);
        int nHeightS = params.nHeight;
        int nSuperHPad = params.nHPad;
        int nSuperVPad = params.nVPad;
        int nSuperPel = params.nPel;
        int nSuperModeYUV = params.nModeYUV;
        int nSuperLevels = params.nLevels;
        int nSuperWidth = super->GetVideoInfo().width; // really super
        int nSuperHeight = super->GetVideoInfo().height;

        if (nHeight != nHeightS || nWidth != nSuperWidth-nSuperHPad*2)
                env->ThrowError("MFlowInter : wrong super frame clip");

    if (nPel==1)
        finest = super; // v2.0.9.1
    else
    {
    finest = new MVFinest(super, isse, env);
    AVSValue cache_args[1] = { finest };
    finest = env->Invoke("InternalCache", AVSValue(cache_args,1)).AsClip(); // add cache for speed
    }

//   if (nWidth  != vi.width || (nWidth + nHPadding*2)*nPel != finest->GetVideoInfo().width ||
//       nHeight  != vi.height || (nHeight + nVPadding*2)*nPel != finest->GetVideoInfo().height )
//			env->ThrowError("MVFlowInter: wrong source or finest frame size");

	 // may be padded for full frame cover
	 nBlkXP = (nBlkX*(nBlkSizeX - nOverlapX) + nOverlapX < nWidth) ? nBlkX+1 : nBlkX;
	 nBlkYP = (nBlkY*(nBlkSizeY - nOverlapY) + nOverlapY < nHeight) ? nBlkY+1 : nBlkY;
	 nWidthP = nBlkXP*(nBlkSizeX - nOverlapX) + nOverlapX;
	 nHeightP = nBlkYP*(nBlkSizeY - nOverlapY) + nOverlapY;
	 // for YV12
	 nWidthPUV = nWidthP/2;
	 nHeightPUV = nHeightP/yRatioUV;
	 nHeightUV = nHeight/yRatioUV;
	 nWidthUV = nWidth/2;

	 nHPaddingUV = nHPadding/2;
	 nVPaddingUV = nVPadding/yRatioUV;

	 VPitchY = (nWidthP + 15) & (~15);
	 VPitchUV = (nWidthPUV + 15) & (~15);

 	 VXFullYB = new BYTE [nHeightP*VPitchY];
	 VXFullUVB = new BYTE [nHeightPUV*VPitchUV];
 	 VYFullYB = new BYTE [nHeightP*VPitchY];
	 VYFullUVB = new BYTE [nHeightPUV*VPitchUV];

	 VXFullYF = new BYTE [nHeightP*VPitchY];
	 VXFullUVF = new BYTE [nHeightPUV*VPitchUV];
 	 VYFullYF = new BYTE [nHeightP*VPitchY];
	 VYFullUVF = new BYTE [nHeightPUV*VPitchUV];

  	 VXSmallYB = new BYTE [nBlkXP*nBlkYP];
  	 VYSmallYB = new BYTE [nBlkXP*nBlkYP];
	 VXSmallUVB = new BYTE [nBlkXP*nBlkYP];
	 VYSmallUVB = new BYTE [nBlkXP*nBlkYP];

  	 VXSmallYF = new BYTE [nBlkXP*nBlkYP];
  	 VYSmallYF = new BYTE [nBlkXP*nBlkYP];
	 VXSmallUVF = new BYTE [nBlkXP*nBlkYP];
	 VYSmallUVF = new BYTE [nBlkXP*nBlkYP];

 	 VXFullYBB = new BYTE [nHeightP*VPitchY];
	 VXFullUVBB = new BYTE [nHeightPUV*VPitchUV];
 	 VYFullYBB = new BYTE [nHeightP*VPitchY];
	 VYFullUVBB = new BYTE [nHeightPUV*VPitchUV];

	 VXFullYFF = new BYTE [nHeightP*VPitchY];
	 VXFullUVFF = new BYTE [nHeightPUV*VPitchUV];
 	 VYFullYFF = new BYTE [nHeightP*VPitchY];
	 VYFullUVFF = new BYTE [nHeightPUV*VPitchUV];

  	 VXSmallYBB = new BYTE [nBlkXP*nBlkYP];
  	 VYSmallYBB = new BYTE [nBlkXP*nBlkYP];
	 VXSmallUVBB = new BYTE [nBlkXP*nBlkYP];
	 VYSmallUVBB = new BYTE [nBlkXP*nBlkYP];

  	 VXSmallYFF = new BYTE [nBlkXP*nBlkYP];
  	 VYSmallYFF = new BYTE [nBlkXP*nBlkYP];
	 VXSmallUVFF = new BYTE [nBlkXP*nBlkYP];
	 VYSmallUVFF = new BYTE [nBlkXP*nBlkYP];

	 MaskSmallB = new BYTE [nBlkXP*nBlkYP];
	 MaskFullYB = new BYTE [nHeightP*VPitchY];
	 MaskFullUVB = new BYTE [nHeightPUV*VPitchUV];

	 MaskSmallF = new BYTE [nBlkXP*nBlkYP];
	 MaskFullYF = new BYTE [nHeightP*VPitchY];
	 MaskFullUVF = new BYTE [nHeightPUV*VPitchUV];

	 SADMaskSmallB = new BYTE [nBlkXP*nBlkYP];
	 SADMaskSmallF = new BYTE [nBlkXP*nBlkYP];


	 int CPUF_Resize = env->GetCPUFlags();
	 if (!isse) CPUF_Resize = (CPUF_Resize & !CPUF_INTEGER_SSE) & !CPUF_SSE2;

	 upsizer = new SimpleResize(nWidthP, nHeightP, nBlkXP, nBlkYP, CPUF_Resize);
	 upsizerUV = new SimpleResize(nWidthPUV, nHeightPUV, nBlkXP, nBlkYP, CPUF_Resize);

	 LUTVB = new int[256];
	 LUTVF = new int[256];
	Create_LUTV(time256, LUTVB, LUTVF);

	if ( (pixelType & VideoInfo::CS_YUY2) == VideoInfo::CS_YUY2 && !planar)
   {
		DstPlanes =  new YUY2Planes(nWidth, nHeight);
   }

}
Пример #23
0
static AVSValue __cdecl CreateFFIndex(AVSValue Args, void* UserData, IScriptEnvironment* Env) {
	FFMS_Init((int)AvisynthToFFCPUFlags(Env->GetCPUFlags()),  Args[7].AsBool(false));

	char ErrorMsg[1024];
	FFMS_ErrorInfo E;
	E.Buffer = ErrorMsg;
	E.BufferSize = sizeof(ErrorMsg);


	if (!Args[0].Defined())
    	Env->ThrowError("FFIndex: No source specified");

	const char *Source = Args[0].AsString();
	const char *CacheFile = Args[1].AsString("");
	int IndexMask = Args[2].AsInt(-1);
	int DumpMask = Args[3].AsInt(0);
	const char *AudioFile = Args[4].AsString("%sourcefile%.%trackzn%.w64");
	int ErrorHandling = Args[5].AsInt(FFMS_IEH_IGNORE);
	bool OverWrite = Args[6].AsBool(false);
	const char *DemuxerStr = Args[8].AsString("default");

	std::string DefaultCache(Source);
	DefaultCache.append(".ffindex");
	if (!strcmp(CacheFile, ""))
		CacheFile = DefaultCache.c_str();

	if (!strcmp(AudioFile, ""))
		Env->ThrowError("FFIndex: Specifying an empty audio filename is not allowed");

	int Demuxer;
	if (!strcmp(DemuxerStr, "default"))
		Demuxer = FFMS_SOURCE_DEFAULT;
	else if (!strcmp(DemuxerStr, "lavf"))
		Demuxer = FFMS_SOURCE_LAVF;
	else if (!strcmp(DemuxerStr, "matroska"))
		Demuxer = FFMS_SOURCE_MATROSKA;
	else if (!strcmp(DemuxerStr, "haalimpeg"))
		Demuxer = FFMS_SOURCE_HAALIMPEG;
	else if (!strcmp(DemuxerStr, "haaliogg"))
		Demuxer = FFMS_SOURCE_HAALIOGG;
	else
		Env->ThrowError("FFIndex: Invalid demuxer requested");

	FFMS_Index *Index = FFMS_ReadIndex(CacheFile, &E);
	if (OverWrite || !Index || (Index && FFMS_IndexBelongsToFile(Index, Source, 0) != FFMS_ERROR_SUCCESS)) {
		FFMS_Indexer *Indexer = FFMS_CreateIndexerWithDemuxer(Source, Demuxer, &E);
		if (!Indexer)
			Env->ThrowError("FFIndex: %s", E.Buffer);
		if (!(Index = FFMS_DoIndexing(Indexer, IndexMask, DumpMask, FFMS_DefaultAudioFilename, (void *)AudioFile, ErrorHandling, NULL, NULL, &E)))
			Env->ThrowError("FFIndex: %s", E.Buffer);
		if (FFMS_WriteIndex(CacheFile, Index, &E)) {
			FFMS_DestroyIndex(Index);
			Env->ThrowError("FFIndex: %s", E.Buffer);
		}
		FFMS_DestroyIndex(Index);
		if (!OverWrite)
			return AVSValue(1);
		else
			return AVSValue(2);
	} else {
		FFMS_DestroyIndex(Index);
		return AVSValue(0);
	}
}
Пример #24
0
Overlay::Overlay(PClip _child, AVSValue args, IScriptEnvironment *env) :
GenericVideoFilter(_child) {

  full_range = args[ARG_FULL_RANGE].AsBool(false);  // Maintain CCIR601 range when converting to/from RGB.

  // Make copy of the VideoInfo
  inputVi = (VideoInfo*)malloc(sizeof(VideoInfo));
  memcpy(inputVi, &vi, sizeof(VideoInfo));

  mask = 0;
  opacity = (int)(256.0*args[ARG_OPACITY].AsDblDef(1.0)+0.5);
  offset_x = args[ARG_X].AsInt(0);
  offset_y = args[ARG_Y].AsInt(0);

  overlay = args[ARG_OVERLAY].AsClip();
  overlayVi = overlay->GetVideoInfo();
  overlayConv = SelectInputCS(&overlayVi, env);

  if (!overlayConv) {
    AVSValue new_args[3] = { overlay, false, (full_range) ? "PC.601" : "rec601" };
    try {
      overlay = env->Invoke("ConvertToYV24", AVSValue(new_args, 3)).AsClip();
    } catch (...)  {}

    overlayVi = overlay->GetVideoInfo();
    overlayConv = SelectInputCS(&overlayVi, env);

    if (!overlayConv) {  // ok - now we've tried everything ;)
      env->ThrowError("Overlay: Overlay image colorspace not supported.");
    }
  }

  greymask = args[ARG_GREYMASK].AsBool(true);  // Grey mask, default true
  ignore_conditional = args[ARG_IGNORE_CONDITIONAL].AsBool(false);  // Don't ignore conditionals by default

  if (args[ARG_MASK].Defined()) {  // Mask defined
    mask = args[ARG_MASK].AsClip();
    maskVi = mask->GetVideoInfo();
    if (maskVi.width!=overlayVi.width) {
      env->ThrowError("Overlay: Mask and overlay must have the same image size! (Width is not the same)");
    }
    if (maskVi.height!=overlayVi.height) {
      env->ThrowError("Overlay: Mask and overlay must have the same image size! (Height is not the same)");
    }

    maskConv = SelectInputCS(&maskVi, env);
    if (!maskConv) {
      AVSValue new_args[3] = { mask, false, (full_range) ? "PC.601" : "rec601" };

      try {
        mask = env->Invoke((greymask) ? "ConvertToY8" : "ConvertToYV24", AVSValue(new_args, 3)).AsClip();
      } catch (...)  {}
      maskVi = mask->GetVideoInfo();
      maskConv = SelectInputCS(&maskVi, env);
      if (!maskConv) {
        env->ThrowError("Overlay: Mask image colorspace not supported.");
      }
    }

    maskImg = new Image444(maskVi.width, maskVi.height);

    if (greymask) {
      maskImg->free_chroma();
      maskImg->SetPtr(maskImg->GetPtr(PLANAR_Y), PLANAR_U);
      maskImg->SetPtr(maskImg->GetPtr(PLANAR_Y), PLANAR_V);
    }

  }

  inputCS = vi.pixel_type;
  inputConv = SelectInputCS(inputVi, env);

  if (!inputConv) {
    AVSValue new_args[3] = { child, false, (full_range) ? "PC.601" : "rec601" };
    try {
      child = env->Invoke("ConvertToYV24", AVSValue(new_args, 3)).AsClip();
    } catch (...)  {}

    vi = child->GetVideoInfo();
    memcpy(inputVi, &vi, sizeof(VideoInfo));
    inputConv = SelectInputCS(inputVi, env);
    if (!inputConv) {
      env->ThrowError("Overlay: Colorspace not supported.");
    }
  }

  outputConv = SelectOutputCS(args[ARG_OUTPUT].AsString(0),env);

  if (vi.IsYV24() && inputCS == vi.pixel_type)  // Fast path
    img = NULL;
  else
    img = new Image444(vi.width, vi.height);

  overlayImg = new Image444(overlayVi.width, overlayVi.height);

  func = SelectFunction(args[ARG_MODE].AsString("Blend"), env);

}
Пример #25
0
PClip toGrayScale(IScriptEnvironment* env, PClip clip) {
	AVSValue args[1] = { clip };
	return env->Invoke("Grayscale", AVSValue(args, 1)).AsClip();
}
Пример #26
0
ConditionalReader::ConditionalReader(PClip _child, const char* filename, const char _varname[], bool _show, IScriptEnvironment* env)
 : GenericVideoFilter(_child), show(_show), variableName(_varname), mode(MODE_UNKNOWN), offset(0), stringcache(0)
{
  FILE * f;
  char *line = 0;
  int lines;

  if ((f = fopen(filename, "rb")) == NULL)
    env->ThrowError("ConditionalReader: Could not open file '%s'.", filename);

  lines = 0;

  try {
    while ((line = readline(f)) != NULL) {
      char *ptr;
      int fields;

      lines++;

      /* We skip spaces */
      ptr = skipspaces(line);

      /* Skip coment lines or empty lines */
      if(iscomment(ptr) || *ptr == '\0') {
        free(line);
        line = 0;
        continue;
      }

      if (mode == MODE_UNKNOWN) {
        // We have not recieved a mode - We expect type.
        char* keyword = ptr;

        ptr = findspace(ptr);
        if (*ptr) {
          *ptr++ = '\0';
          if (!lstrcmpi(keyword, "type")) {
            /* We skip spaces */
            char* type = skipspaces(ptr);

            ptr = findspace(type);
            *ptr = '\0';

            if (!lstrcmpi(type, "int")) {
              mode = MODE_INT;
              intVal = new int[vi.num_frames];
            } else if (!lstrcmpi(type, "float")) {
              mode = MODE_FLOAT;
              floatVal = new float[vi.num_frames];
            } else if (!lstrcmpi(type, "bool")) {
              mode = MODE_BOOL;
              boolVal = new bool[vi.num_frames];
            } else if (!lstrcmpi(type, "string")) {
              mode = MODE_STRING;
              stringVal = new const char*[vi.num_frames];
            } else {
              ThrowLine("ConditionalReader: Unknown 'Type' specified in line %d", lines, env);
            }// end if compare type
            SetRange(0, vi.num_frames-1, AVSValue());
          }// end if compare keyword
        }// end if fields

      } else { // We have a defined mode and allocated the values.

        char* keyword = ptr;
        char* type = findspace(keyword);

        if (*type) *type++ = '\0';

        if (!lstrcmpi(keyword, "default")) {
          AVSValue def = ConvertType(type, lines, env);
          SetRange(0, vi.num_frames-1, def);

        } else if (!lstrcmpi(keyword, "offset")) {
          fields = sscanf(type, "%d", &offset);
          if (fields != 1) 
            ThrowLine("ConditionalReader: Could not read Offset in line %d", lines, env);

        } else if (keyword[0] == 'R' || keyword[0] == 'r') {  // Range
          int start;
          int stop;

          type = skipspaces(type);
          fields = sscanf(type, "%d", &start);

          type = findspace(type);
          type = skipspaces(type);
          fields += sscanf(type, "%d", &stop);

          type = findspace(type);
          if (!*type || fields != 2)
            ThrowLine("ConditionalReader: Could not read Range in line %d", lines, env);

          if (start > stop)
            ThrowLine("ConditionalReader: The Range start frame is after the end frame in line %d", lines, env);

          AVSValue set = ConvertType(type+1, lines, env);
          SetRange(start, stop, set);

        } else if (keyword[0] == 'I' || keyword[0] == 'i') {  // Interpolate
          if (mode == MODE_BOOL)
            ThrowLine("ConditionalReader: Cannot Interpolate booleans in line %d", lines, env);

          if (mode == MODE_STRING)
            ThrowLine("ConditionalReader: Cannot Interpolate strings in line %d", lines, env);

          type = skipspaces(type);
          int start;
          int stop;
          char start_value[64];
          char stop_value[64];
          fields = sscanf(type, "%d %d %63s %63s", &start, &stop, start_value, stop_value);

          if (fields != 4) 
            ThrowLine("ConditionalReader: Could not read Interpolation range in line %d", lines, env);
          if (start > stop)
            ThrowLine("ConditionalReader: The Interpolation start frame is after the end frame in line %d", lines, env);

          start_value[63] = '\0';
          AVSValue set_start = ConvertType(start_value, lines, env);

          stop_value[63] = '\0';
          AVSValue set_stop = ConvertType(stop_value, lines, env);

          const int range = stop-start;
          const double diff = (set_stop.AsFloat() - set_start.AsFloat()) / range;
          for (int i = 0; i<=range; i++) {
            const double n = i * diff + set_start.AsFloat();
            SetFrame(i+start, (mode == MODE_FLOAT)
                    ? AVSValue(n)
                    : AVSValue((int)(n+0.5)));
          }
        } else {
          int cframe;
          fields = sscanf(keyword, "%d", &cframe);
          if (*type && fields == 1) {
            AVSValue set = ConvertType(type, lines, env);
            SetFrame(cframe, set);
          } else {
            ThrowLine("ConditionalReader: Do not understand line %d", lines, env);
          }
        }
      
      } // End we have defined type
      free(line);
      line = 0;
    }// end while still some file left to read.
  }
  catch (...) {
    if (line) free(line);
    fclose(f);
    CleanUp();
    throw;
  }

  /* We are done with the file */
  fclose(f);

  if (mode == MODE_UNKNOWN)
    env->ThrowError("ConditionalReader: Type was not defined!");

}
Пример #27
0
AVSValue ConditionalReader::ConvertType(const char* content, int line, IScriptEnvironment* env)
{
  if (mode == MODE_UNKNOWN)
    ThrowLine("ConditionalReader: Type has not been defined. Line %d", line, env);

  int fields;
  switch (mode) {
    case MODE_INT:
      int ival;
      fields = sscanf(content, "%d", &ival);
      if (fields != 1)
        ThrowLine("ConditionalReader: Could not find an expected integer at line %d!", line, env);

      return AVSValue(ival);

    case MODE_FLOAT:
      float fval;
      fields = sscanf(content, "%e", &fval);
      if (fields != 1)
        ThrowLine("ConditionalReader: Could not find an expected float at line %d!", line, env);

      return AVSValue(fval);

    case MODE_BOOL:
      char bval[8];
      bval[0] = '\0';
      fields = sscanf(content, "%7s", bval);
      bval[7] = '\0';
      if (!lstrcmpi(bval, "true")) {
        return AVSValue(true);
      }
      else if (!lstrcmpi(bval, "t")) {
        return AVSValue(true);
      }
      else if (!lstrcmpi(bval, "yes")) {
        return AVSValue(true);
      }
      else if (!lstrcmp(bval, "1")) {
        return AVSValue(true);
      }
      else if (!lstrcmpi(bval, "false")) {
        return AVSValue(false);
      }
      else if (!lstrcmpi(bval, "f")) {
        return AVSValue(false);
      }
      else if (!lstrcmpi(bval, "no")) {
        return AVSValue(false);
      } 
      else if (!lstrcmp(bval, "0")) {
        return AVSValue(false);
      } 
      ThrowLine("ConditionalReader: Boolean value was not true or false in line %d", line, env);

    case MODE_STRING:
      StringCache *str;

      // Look for an existing duplicate
      for (str = stringcache; str; str = str->next ) {
        if (!lstrcmp(str->string, content)) break;
      }
      // Could not find one, add it
      if (!str) {
        str = new StringCache;
        str->string = _strdup(content);
        str->next   = stringcache;
        stringcache = str;
      }
      return AVSValue(str->string);
  }
  return AVSValue();
}
Пример #28
0
AVSValue AvisynthVideoProvider::Open(agi::fs::path const& filename) {
	IScriptEnvironment *env = avs.GetEnv();
	char *videoFilename = env->SaveString(agi::fs::ShortName(filename).c_str());

	// Avisynth file, just import it
	if (agi::fs::HasExtension(filename, "avs")) {
		LOG_I("avisynth/video") << "Opening .avs file with Import";
		decoder_name = "Avisynth/Import";
		return env->Invoke("Import", videoFilename);
	}

	// Open avi file with AviSource
	if (agi::fs::HasExtension(filename, "avi")) {
		LOG_I("avisynth/video") << "Opening .avi file with AviSource";
		try {
			const char *argnames[2] = { 0, "audio" };
			AVSValue args[2] = { videoFilename, false };
			decoder_name = "Avisynth/AviSource";
			return env->Invoke("AviSource", AVSValue(args,2), argnames);
		}
		// On Failure, fallback to DSS
		catch (AvisynthError &err) {
			LOG_E("avisynth/video") << err.msg;
			LOG_I("avisynth/video") << "Failed to open .avi file with AviSource, trying DirectShowSource";
		}
	}

	// Open d2v with mpeg2dec3
	if (agi::fs::HasExtension(filename, "d2v") && env->FunctionExists("Mpeg2Dec3_Mpeg2Source")) {
		LOG_I("avisynth/video") << "Opening .d2v file with Mpeg2Dec3_Mpeg2Source";
		auto script = env->Invoke("Mpeg2Dec3_Mpeg2Source", videoFilename);
		decoder_name = "Avisynth/Mpeg2Dec3_Mpeg2Source";

		//if avisynth is 2.5.7 beta 2 or newer old mpeg2decs will crash without this
		if (env->FunctionExists("SetPlanarLegacyAlignment")) {
			AVSValue args[2] = { script, true };
			script = env->Invoke("SetPlanarLegacyAlignment", AVSValue(args,2));
		}
		return script;
	}

	// If that fails, try opening it with DGDecode
	if (agi::fs::HasExtension(filename, "d2v") && env->FunctionExists("DGDecode_Mpeg2Source")) {
		LOG_I("avisynth/video") << "Opening .d2v file with DGDecode_Mpeg2Source";
		decoder_name = "DGDecode_Mpeg2Source";
		return env->Invoke("Avisynth/Mpeg2Source", videoFilename);

		//note that DGDecode will also have issues like if the version is too
		// ancient but no sane person would use that anyway
	}

	if (agi::fs::HasExtension(filename, "d2v") && env->FunctionExists("Mpeg2Source")) {
		LOG_I("avisynth/video") << "Opening .d2v file with other Mpeg2Source";
		AVSValue script = env->Invoke("Mpeg2Source", videoFilename);
		decoder_name = "Avisynth/Mpeg2Source";

		//if avisynth is 2.5.7 beta 2 or newer old mpeg2decs will crash without this
		if (env->FunctionExists("SetPlanarLegacyAlignment"))
			script = env->Invoke("SetPlanarLegacyAlignment", script);

		return script;
	}

	// Try loading DirectShowSource2
	if (!env->FunctionExists("dss2")) {
		auto dss2path(config::path->Decode("?data/avss.dll"));
		if (agi::fs::FileExists(dss2path))
			env->Invoke("LoadPlugin", env->SaveString(agi::fs::ShortName(dss2path).c_str()));
	}

	// If DSS2 loaded properly, try using it
	if (env->FunctionExists("dss2")) {
		LOG_I("avisynth/video") << "Opening file with DSS2";
		decoder_name = "Avisynth/DSS2";
		return env->Invoke("DSS2", videoFilename);
	}

	// Try DirectShowSource
	// Load DirectShowSource.dll from app dir if it exists
	auto dsspath(config::path->Decode("?data/DirectShowSource.dll"));
	if (agi::fs::FileExists(dsspath))
		env->Invoke("LoadPlugin", env->SaveString(agi::fs::ShortName(dsspath).c_str()));

	// Then try using DSS
	if (env->FunctionExists("DirectShowSource")) {
		const char *argnames[3] = { 0, "video", "audio" };
		AVSValue args[3] = { videoFilename, true, false };
		decoder_name = "Avisynth/DirectShowSource";
		warning = "Warning! The file is being opened using Avisynth's DirectShowSource, which has unreliable seeking. Frame numbers might not match the real number. PROCEED AT YOUR OWN RISK!";
		LOG_I("avisynth/video") << "Opening file with DirectShowSource";
		return env->Invoke("DirectShowSource", AVSValue(args,3), argnames);
	}

	// Failed to find a suitable function
	LOG_E("avisynth/video") << "DSS function not found";
	throw VideoNotSupported("No function suitable for opening the video found");
}
Пример #29
0
MVFlowBlur::MVFlowBlur(PClip _child, PClip super, PClip _mvbw, PClip _mvfw,  int _blur256, int _prec,
                           int nSCD1, int nSCD2, bool _isse, bool _planar, IScriptEnvironment* env) :
GenericVideoFilter(_child),
MVFilter(_mvfw, "MFlowBlur", env, 1, 0),
mvClipB(_mvbw, nSCD1, nSCD2, env, 1, 0),
mvClipF(_mvfw, nSCD1, nSCD2, env, 1, 0)
{
   blur256 = _blur256;
   prec = _prec;
   isse = _isse;
   planar = _planar;

   CheckSimilarity(mvClipB, "mvbw", env);
   CheckSimilarity(mvClipF, "mvfw", env);
	SuperParams64Bits params;
	memcpy(&params, &super->GetVideoInfo().num_audio_samples, 8);
	int nHeightS = params.nHeight;
	int nSuperHPad = params.nHPad;
	int nSuperVPad = params.nVPad;
	int nSuperPel = params.nPel;
	int nSuperModeYUV = params.nModeYUV;
	int nSuperLevels = params.nLevels;
	int nSuperWidth = super->GetVideoInfo().width; // really super
	int nSuperHeight = super->GetVideoInfo().height;

	if (   nHeight != nHeightS
	    || nWidth  != nSuperWidth - nSuperHPad * 2
	    || nPel    != nSuperPel)
	{
		env->ThrowError("MFlowBlur : wrong super frame clip");
	}

	if (nPel==1)
		finest = super; // v2.0.9.1
	else
	{
		finest = new MVFinest(super, isse, env);
		AVSValue cache_args[1] = { finest };
		finest = env->Invoke("InternalCache", AVSValue(cache_args,1)).AsClip(); // add cache for speed
	}

//	if (   nWidth  != vi.width  || (nWidth  + nHPadding*2)*nPel != finest->GetVideoInfo().width
//	    || nHeight != vi.height || (nHeight + nVPadding*2)*nPel != finest->GetVideoInfo().height)
//		env->ThrowError("MVFlowBlur: wrong source of finest frame size");


	nHeightUV = nHeight/yRatioUV;
	nWidthUV = nWidth/2;// for YV12
	nHPaddingUV = nHPadding/2;
	nVPaddingUV = nHPadding/yRatioUV;

	VPitchY = nWidth;
	VPitchUV= nWidthUV;

	VXFullYB = new BYTE [nHeight*VPitchY];
	VXFullUVB = new BYTE [nHeightUV*VPitchUV];
	VYFullYB = new BYTE [nHeight*VPitchY];
	VYFullUVB = new BYTE [nHeightUV*VPitchUV];

	VXFullYF = new BYTE [nHeight*VPitchY];
	VXFullUVF = new BYTE [nHeightUV*VPitchUV];
	VYFullYF = new BYTE [nHeight*VPitchY];
	VYFullUVF = new BYTE [nHeightUV*VPitchUV];

	VXSmallYB = new BYTE [nBlkX*nBlkY];
	VYSmallYB = new BYTE [nBlkX*nBlkY];
	VXSmallUVB = new BYTE [nBlkX*nBlkY];
	VYSmallUVB = new BYTE [nBlkX*nBlkY];

	VXSmallYF = new BYTE [nBlkX*nBlkY];
	VYSmallYF = new BYTE [nBlkX*nBlkY];
	VXSmallUVF = new BYTE [nBlkX*nBlkY];
	VYSmallUVF = new BYTE [nBlkX*nBlkY];

	MaskSmallB = new BYTE [nBlkX*nBlkY];
	MaskFullYB = new BYTE [nHeight*VPitchY];
	MaskFullUVB = new BYTE [nHeightUV*VPitchUV];

	MaskSmallF = new BYTE [nBlkX*nBlkY];
	MaskFullYF = new BYTE [nHeight*VPitchY];
	MaskFullUVF = new BYTE [nHeightUV*VPitchUV];

	int CPUF_Resize = env->GetCPUFlags();
	if (!isse) CPUF_Resize = (CPUF_Resize & !CPUF_INTEGER_SSE) & !CPUF_SSE2;

	upsizer = new SimpleResize(nWidth, nHeight, nBlkX, nBlkY, CPUF_Resize);
	upsizerUV = new SimpleResize(nWidthUV, nHeightUV, nBlkX, nBlkY, CPUF_Resize);

	if ( (pixelType & VideoInfo::CS_YUY2) == VideoInfo::CS_YUY2 && !planar)
	{
		DstPlanes =  new YUY2Planes(nWidth, nHeight);
	}
}
Пример #30
0
AvisynthVideoProvider::AvisynthVideoProvider(agi::fs::path const& filename, std::string const& colormatrix)
{
	agi::acs::CheckFileRead(filename);

	std::lock_guard<std::mutex> lock(avs.GetMutex());

#ifdef _WIN32
	if (agi::fs::HasExtension(filename, "avi")) {
		// Try to read the keyframes before actually opening the file as trying
		// to open the file while it's already open can cause problems with
		// badly written VFW decoders
		AVIFileInit();

		PAVIFILE pfile;
		long hr = AVIFileOpen(&pfile, filename.c_str(), OF_SHARE_DENY_WRITE, 0);
		if (hr) {
			warning = "Unable to open AVI file for reading keyframes:\n";
			switch (hr) {
				case AVIERR_BADFORMAT:
					warning += "The file is corrupted, incomplete or has an otherwise bad format.";
					break;
				case AVIERR_MEMORY:
					warning += "The file could not be opened because of insufficient memory.";
					break;
				case AVIERR_FILEREAD:
					warning += "An error occurred reading the file. There might be a problem with the storage media.";
					break;
				case AVIERR_FILEOPEN:
					warning += "The file could not be opened. It might be in use by another application, or you do not have permission to access it.";
					break;
				case REGDB_E_CLASSNOTREG:
					warning += "There is no handler installed for the file extension. This might indicate a fundamental problem in your Video for Windows installation, and can be caused by extremely stripped Windows installations.";
					break;
				default:
					warning += "Unknown error.";
					break;
			}
			goto file_exit;
		}

		PAVISTREAM ppavi;
		if (hr = AVIFileGetStream(pfile, &ppavi, streamtypeVIDEO, 0)) {
			warning = "Unable to open AVI video stream for reading keyframes:\n";
			switch (hr) {
				case AVIERR_NODATA:
					warning += "The file does not contain a usable video stream.";
					break;
				case AVIERR_MEMORY:
					warning += "Not enough memory.";
					break;
				default:
					warning += "Unknown error.";
					break;
			}
			goto file_release;
		}

		AVISTREAMINFO avis;
		if (FAILED(AVIStreamInfo(ppavi,&avis,sizeof(avis)))) {
			warning = "Unable to read keyframes from AVI file:\nCould not get stream information.";
			goto stream_release;
		}

		for (size_t i = 0; i < avis.dwLength; i++) {
			if (AVIStreamIsKeyFrame(ppavi, i))
				keyframes.push_back(i);
		}

		// If every frame is a keyframe then just discard the keyframe data as it's useless
		if (keyframes.size() == (size_t)avis.dwLength)
			keyframes.clear();

		// Clean up
stream_release:
		AVIStreamRelease(ppavi);
file_release:
		AVIFileRelease(pfile);
file_exit:
		AVIFileExit();
	}
#endif

	try {
		auto script = Open(filename);

		// Check if video was loaded properly
		if (!script.IsClip() || !script.AsClip()->GetVideoInfo().HasVideo())
			throw VideoNotSupported("No usable video found");

		vi = script.AsClip()->GetVideoInfo();
		if (!vi.IsRGB()) {
			/// @todo maybe read ColorMatrix hints for d2v files?
			AVSValue args[2] = { script, "Rec601" };
			bool force_bt601 = OPT_GET("Video/Force BT.601")->GetBool() || colormatrix == "TV.601";
			bool bt709 = vi.width > 1024 || vi.height >= 600;
			if (bt709 && (!force_bt601 || colormatrix == "TV.709")) {
				args[1] = "Rec709";
				colorspace = "TV.709";
			}
			else
				colorspace = "TV.601";
			const char *argnames[2] = { 0, "matrix" };
			script = avs.GetEnv()->Invoke("ConvertToRGB32", AVSValue(args, 2), argnames);
		}
		else
			colorspace = "None";

		RGB32Video = avs.GetEnv()->Invoke("Cache", script).AsClip();
		vi = RGB32Video->GetVideoInfo();
		fps = (double)vi.fps_numerator / vi.fps_denominator;
	}
	catch (AvisynthError const& err) {
		throw VideoOpenError("Avisynth error: " + std::string(err.msg));
	}
}