예제 #1
0
AVSValue ExpDoublePlus::Evaluate(IScriptEnvironment* env)
{
  AVSValue x = a->Evaluate(env);
  AVSValue y = b->Evaluate(env);
  if (x.IsClip() && y.IsClip())
    return new_Splice(x.AsClip(), y.AsClip(), true, env);    // AlignedSplice
  else {
    env->ThrowError("Evaluate: operands of `++' must be clips");
    return 0;
  }
}
예제 #2
0
int main() {
    try {
        cout << "Creating script environment 1..." << endl;
        IScriptEnvironment* env1 = CreateScriptEnvironment(3);

        cout << "Creating script environment 2..." << endl;
        IScriptEnvironment* env2 = CreateScriptEnvironment(3);

        cout << "Deleting script environment 1..." << endl;
        delete env1;

        cout << "Invoking BlankClip on env 2..." << endl;
        AVSValue ret = env2->Invoke("BlankClip", AVSValue(), 0);
        PClip clp = ret.AsClip();

        cout << "Reading frame 0 from env2..." << endl;
        PVideoFrame frm = clp->GetFrame(0, env2);
    } catch (AvisynthError &e) {
        cerr << "AvisynthError: " << e.msg << endl;
        return -1;
    } catch (...) {
        cerr << "unknown error" << endl;
        return -1;
    }

    return 0;
}
예제 #3
0
                // utility functions
                // Opens AVS file and sets some member variables.
                void open(const char* avsfile) {
                    DBGLOG("avsutil::impl::cavs_type::open(\"" << avsfile << "\")");

                    try {
                        // pack the filename as the argument of AviSynth filter
                        AVSValue filename = avsfile;
                        AVSValue args = AVSValue(&filename, 1);

                        // load AviSynth script
                        AVSValue imported = mv_se->Invoke("Import", args, 0);

                        // get the clip and video informations
                        mv_clip = imported.AsClip();

                        // store filename
                        mv_filepath = avsfile;
                    }
                    catch (AvisynthError& avserr) {
                        mv_is_fine = false;
                        mv_errmsg = avserr.msg;
                    }
                    catch (std::exception& ex) {
                        mv_is_fine = false;
                        mv_errmsg = ex.what();
                    }
                }
예제 #4
0
AVSValue ExpPlus::Evaluate(IScriptEnvironment* env)
{
  AVSValue x = a->Evaluate(env);
  AVSValue y = b->Evaluate(env);
  if (x.IsClip() && y.IsClip())
    return new_Splice(x.AsClip(), y.AsClip(), false, env);    // UnalignedSplice
  else if (x.IsInt() && y.IsInt())
    return x.AsInt() + y.AsInt();
  else if (x.IsFloat() && y.IsFloat())
    return x.AsFloat() + y.AsFloat();
  else if (x.IsString() && y.IsString())
    return env->Sprintf("%s%s", x.AsString(), y.AsString());
  else {
    env->ThrowError("Evaluate: operands of `+' must both be numbers, strings, or clips");
    return 0;
  }
}
예제 #5
0
/// @brief Read from environment
/// @param _clip
///
void AvisynthAudioProvider::LoadFromClip(AVSValue _clip) {
	AVSValue script;

	// Check if it has audio
	VideoInfo vi = _clip.AsClip()->GetVideoInfo();
	if (!vi.HasAudio()) throw agi::AudioDataNotFoundError("No audio found.", 0);

	IScriptEnvironment *env = avs_wrapper.GetEnv();

	// Convert to one channel
	char buffer[1024];
	strcpy(buffer,lagi_wxString(OPT_GET("Audio/Downmixer")->GetString()).mb_str(csConvLocal));
	script = env->Invoke(buffer, _clip);

	// Convert to 16 bits per sample
	script = env->Invoke("ConvertAudioTo16bit", script);
	vi = script.AsClip()->GetVideoInfo();

	// Convert sample rate
	int setsample = OPT_GET("Provider/Audio/AVS/Sample Rate")->GetInt();
	if (vi.SamplesPerSecond() < 32000) setsample = 44100;
	if (setsample != 0) {
		AVSValue args[2] = { script, setsample };
		script = env->Invoke("ResampleAudio", AVSValue(args,2));
	}

	// Set clip
	PClip tempclip = script.AsClip();
	vi = tempclip->GetVideoInfo();

	// Read properties
	channels = vi.AudioChannels();
	num_samples = vi.num_audio_samples;
	sample_rate = vi.SamplesPerSecond();
	bytes_per_sample = vi.BytesPerAudioSample();
	float_samples = false;

	clip = tempclip;
}
예제 #6
0
AVSValue ExpEqual::Evaluate(IScriptEnvironment* env) 
{
  AVSValue x = a->Evaluate(env);
  AVSValue y = b->Evaluate(env);
  if (x.IsBool() && y.IsBool()) {
    return x.AsBool() == y.AsBool();
  }
  else if (x.IsInt() && y.IsInt()) {
    return x.AsInt() == y.AsInt();
  }
  else if (x.IsFloat() && y.IsFloat()) {
    return x.AsFloat() == y.AsFloat();
  }
  else if (x.IsClip() && y.IsClip()) {
    return x.AsClip() == y.AsClip();
  }
  else if (x.IsString() && y.IsString()) {
    return !lstrcmpi(x.AsString(), y.AsString());
  }
  else {
    env->ThrowError("Evaluate: operands of `==' and `!=' must be comparable");
    return 0;
  }
}
예제 #7
0
int AvxContext::OpenFile() {
	try {
		AVSValue ret = avx_library.env->Invoke("Import", scriptName);
		if (!ret.IsClip()) {
			AVXLOG_ERROR("%s", "Script did not return a clip");
			return -1;
		}
		clip = ret.AsClip();
		vi = clip->GetVideoInfo();
	} catch (AvisynthError &e) {
		AVXLOG_ERROR("AvisynthError: %s", e.msg);
		return -1;
	}

	return 0;
}
예제 #8
0
AVSValueStruct CAsifScriptEnvironment::Invoke(const char* command) {
    AVSValueStruct ir;
    ir.arraysize = 0;
    ir.errors = false;
    ir.returnvalue.ival = 0;
    ir.type = 0;

    std::vector<const char *> tempargnames(args.size());

    for (size_t i = 0; i < tempargnames.size(); i++)
        tempargnames[i] = (argnames[i].c_str() == "" ? nullptr : argnames[i].c_str());

    try {
        AVSValue ret = envse->Invoke(command, AVSValue(args.data(), args.size()), tempargnames.data());

        if (ret.IsClip()) {
            ir.returnvalue.cval = new CAsifClip(ret.AsClip(), envse);
            ir.type = 1;
        } else if (ret.IsBool()) {
            ir.returnvalue.bval = ret.AsBool();
            ir.type = 2;
        } else if (ret.IsInt()) {
            ir.returnvalue.ival = ret.AsInt();
            ir.type = 3;
        } else if (ret.IsFloat()) {
            ir.returnvalue.fval = (float)ret.AsFloat();
            ir.type = 4;
        } else if (ret.IsString()) {
            ir.returnvalue.sval = ret.AsString();
            ir.type = 5;
        } else if (ret.IsArray()) {
            //	ir.returnvalue.aval=ret.
            ir.arraysize = ret.ArraySize();
            ir.type = 6;
        }

    } catch (AvisynthError &e) {
        ir.type = 100;
        ir.returnvalue.sval = e.msg;
        ir.errors = 1;
    }

    ResetArgs();

    return ir;
}
예제 #9
0
PVideoFrame __stdcall ScriptClip::GetFrame(int n, IScriptEnvironment* env) {
  AVSValue prev_last = env->GetVar("last");  // Store previous last
  AVSValue prev_current_frame = GetVar(env, "current_frame");  // Store previous current_frame

  env->SetVar("last",(AVSValue)child);       // Set explicit last
  env->SetVar("current_frame",(AVSValue)n);  // Set frame to be tested by the conditional filters.

  if (show) {
    PVideoFrame dst = child->GetFrame(n,env);
    env->MakeWritable(&dst);
    ApplyMessage(&dst, vi, script.AsString(), vi.width/6, 0xa0a0a0,0,0 , env );
    env->SetVar("last",prev_last);       // Restore implicit last
    env->SetVar("current_frame",prev_current_frame);       // Restore current_frame
    return dst;
  }

  AVSValue result;
  PVideoFrame eval_return;   // Frame to be returned if script should be evaluated AFTER frame has been fetched. Otherwise not used.

  if (eval_after) eval_return = child->GetFrame(n,env);

  try {
    ScriptParser parser(env, script.AsString(), "[ScriptClip]");
    PExpression exp = parser.Parse();
    result = exp->Evaluate(env);
  } catch (AvisynthError error) {    
    const char* error_msg = error.msg;  

    PVideoFrame dst = child->GetFrame(n,env);
    env->MakeWritable(&dst);
    ApplyMessage(&dst, vi, error_msg, vi.width/W_DIVISOR, 0xa0a0a0,0,0 , env );
    env->SetVar("last",prev_last);       // Restore implicit last
    env->SetVar("current_frame",prev_current_frame);       // Restore current_frame
    return dst;
  }

  env->SetVar("last",prev_last);       // Restore implicit last
  env->SetVar("current_frame",prev_current_frame);       // Restore current_frame

  if (eval_after && only_eval) return eval_return;
  if (only_eval) return child->GetFrame(n,env);
  
  const char* error = NULL;
  VideoInfo vi2;
  if (!result.IsClip()) {
    error = "ScriptClip: Function did not return a video clip!";
  } else {
    vi2 = result.AsClip()->GetVideoInfo();
    if (!vi.IsSameColorspace(vi2)) { 
      error = "ScriptClip: Function did not return a video clip of the same colorspace as the source clip!";
    } else if (vi2.width != vi.width) {
      error = "ScriptClip: Function did not return a video clip with the same width as the source clip!";
    } else if (vi2.height != vi.height) {
      error = "ScriptClip: Function did not return a video clip with the same height as the source clip!";
    }
  }

  if (error != NULL) {
    PVideoFrame dst = child->GetFrame(n,env);
    env->MakeWritable(&dst);
    ApplyMessage(&dst, vi, error, vi.width/W_DIVISOR, 0xa0a0a0,0,0 , env );
    return dst;
  }

  n = min(n,vi2.num_frames-1);  // We ignore it if the new clip is not as long as the current one. This can allow the resulting clip to be one frame.

  return result.AsClip()->GetFrame(n,env);
}
예제 #10
0
AvisynthVideoProvider::AvisynthVideoProvider(wxString filename)
: last_fnum(-1)
{
	iframe.flipped = true;
	iframe.invertChannels = true;

	wxMutexLocker lock(avs.GetMutex());

	wxFileName fname(filename);
	if (!fname.FileExists())
		throw agi::FileNotFoundError(from_wx(filename));

	wxString extension = filename.Right(4).Lower();

#ifdef _WIN32
	if (extension == ".avi") {
		// Try to read the keyframes before actually opening the file as trying
		// to open the file while it's already open can cause problems with
		// badly written VFW decoders
		AVIFileInit();

		PAVIFILE pfile;
		long hr = AVIFileOpen(&pfile, filename.wc_str(), OF_SHARE_DENY_WRITE, 0);
		if (hr) {
			warning = "Unable to open AVI file for reading keyframes:\n";
			switch (hr) {
				case AVIERR_BADFORMAT:
					warning += "The file is corrupted, incomplete or has an otherwise bad format.";
					break;
				case AVIERR_MEMORY:
					warning += "The file could not be opened because of insufficient memory.";
					break;
				case AVIERR_FILEREAD:
					warning += "An error occurred reading the file. There might be a problem with the storage media.";
					break;
				case AVIERR_FILEOPEN:
					warning += "The file could not be opened. It might be in use by another application, or you do not have permission to access it.";
					break;
				case REGDB_E_CLASSNOTREG:
					warning += "There is no handler installed for the file extension. This might indicate a fundamental problem in your Video for Windows installation, and can be caused by extremely stripped Windows installations.";
					break;
				default:
					warning += "Unknown error.";
					break;
			}
			goto file_exit;
		}

		PAVISTREAM ppavi;
		if (hr = AVIFileGetStream(pfile, &ppavi, streamtypeVIDEO, 0)) {
			warning = "Unable to open AVI video stream for reading keyframes:\n";
			switch (hr) {
				case AVIERR_NODATA:
					warning += "The file does not contain a usable video stream.";
					break;
				case AVIERR_MEMORY:
					warning += "Not enough memory.";
					break;
				default:
					warning += "Unknown error.";
					break;
			}
			goto file_release;
		}

		AVISTREAMINFO avis;
		if (FAILED(AVIStreamInfo(ppavi,&avis,sizeof(avis)))) {
			warning = "Unable to read keyframes from AVI file:\nCould not get stream information.";
			goto stream_release;
		}

		bool all_keyframes = true;
		for (size_t i = 0; i < avis.dwLength; i++) {
			if (AVIStreamIsKeyFrame(ppavi, i))
				KeyFrames.push_back(i);
			else
				all_keyframes = false;
		}

		// If every frame is a keyframe then just discard the keyframe data as it's useless
		if (all_keyframes) KeyFrames.clear();

		// Clean up
stream_release:
		AVIStreamRelease(ppavi);
file_release:
		AVIFileRelease(pfile);
file_exit:
		AVIFileExit();
	}
#endif

	try {
		AVSValue script = Open(fname, extension);

		// Check if video was loaded properly
		if (!script.IsClip() || !script.AsClip()->GetVideoInfo().HasVideo())
			throw VideoNotSupported("No usable video found");

		vi = script.AsClip()->GetVideoInfo();
		if (!vi.IsRGB()) {
			/// @todo maybe read ColorMatrix hints for d2v files?

			AVSValue args[2] = { script, "Rec601" };
			if (!OPT_GET("Video/Force BT.601")->GetBool() && (vi.width > 1024 || vi.height >= 600)) {
				args[1] = "Rec709";
				colorspace = "TV.709";
			}
			else
				colorspace = "TV.601";
			const char *argnames[2] = { 0, "matrix" };
			script = avs.GetEnv()->Invoke("ConvertToRGB32", AVSValue(args, 2), argnames);
		}
		else
			colorspace = "None";

		RGB32Video = avs.GetEnv()->Invoke("Cache", script).AsClip();
		vi = RGB32Video->GetVideoInfo();
		fps = (double)vi.fps_numerator / vi.fps_denominator;
	}
	catch (AvisynthError const& err) {
		throw VideoOpenError("Avisynth error: " + std::string(err.msg));
	}
}
예제 #11
0
int __stdcall dimzon_avs_init(SafeStruct** ppstr, char *func, char *arg, AVSDLLVideoInfo *vi)
{
	SafeStruct* pstr = NULL;

	if(!*ppstr)
	{
		pstr = ((SafeStruct*)malloc(sizeof(SafeStruct)));
		*ppstr = pstr;
		memset(pstr, 0, sizeof(SafeStruct));

		pstr->dll = LoadLibrary("avisynth.dll");
		if(!pstr->dll)
		{
			strncpy_s(pstr->err, ERRMSG_LEN, "Cannot load avisynth.dll", _TRUNCATE);
			return 1;
		}

		IScriptEnvironment* (*CreateScriptEnvironment)(int version) = (IScriptEnvironment*(*)(int)) GetProcAddress(pstr->dll, "CreateScriptEnvironment");
		if(!CreateScriptEnvironment)
		{
			strncpy_s(pstr->err, ERRMSG_LEN, "Cannot load CreateScriptEnvironment", _TRUNCATE);
			return 2;
		}

		pstr->env = CreateScriptEnvironment(AVISYNTH_INTERFACE_VERSION);

		if (pstr->env == NULL)
		{
			strncpy_s(pstr->err, ERRMSG_LEN, "Required Avisynth 2.5", _TRUNCATE);
			return 3;
		}
	}
	else
	{
		pstr = *ppstr;
	}

	pstr->err[0] = 0;

	//Заходили только чтоб получить ppstr
	if (!func || strlen(func) == 0 || !arg)
		return 0;

	try
	{
		AVSValue arg(arg);
		AVSValue res;

		if (vi != NULL && vi->mt_import == MT_DISABLED)
		{
			//Если надо, отключаем MT - до импорта
			try { pstr->env->Invoke("SetMTMode", 0); }
			catch (IScriptEnvironment::NotFound) { /*AviSynth без MT*/ }
		}

		res = pstr->env->Invoke(func, AVSValue(&arg, 1));
		if(!*ppstr) return 1;

		if (!res.IsClip())
		{
			strncpy_s(pstr->err, ERRMSG_LEN, "The script's return was not a video clip.", _TRUNCATE);
			return 4;
		}

		if (vi != NULL && (vi->mt_import == MT_ADDDISTR || vi->mt_import == MT_ADDM1DISTR))
		{
			try
			{
				//Если надо, добавляем Distributor() - после импорта
				AVSValue mt_test = pstr->env->Invoke("GetMTMode", false);
				const int mt_mode = mt_test.IsInt() ? mt_test.AsInt() : 0;
				if (mt_mode > 0 && mt_mode < 5)
				{
					if (mt_mode != 1 && vi->mt_import == MT_ADDM1DISTR)
						pstr->env->Invoke("SetMTMode", 1);

					res = pstr->env->Invoke("Distributor", res);
				}
			}
			catch (IScriptEnvironment::NotFound) { /*AviSynth без MT*/ }

			if (!res.IsClip())
			{
				strncpy_s(pstr->err, ERRMSG_LEN, "After adding \"Distributor()\" the script's return was not a video clip.", _TRUNCATE);
				return 4;
			}
		}

		pstr->clp = res.AsClip();
		VideoInfo inf = pstr->clp->GetVideoInfo();

		if (inf.HasVideo())
		{
			string filter = "";
			string err_string = "";

			//Original и Requested PixelType
			if (vi != NULL) vi->pixel_type_orig = inf.pixel_type;
			int pixel_type_req = (vi != NULL) ? vi->pixel_type : 0;

			if (pixel_type_req == 0) { /*Выводим видео как оно есть, без проверок и преобразований*/ }
			else if (pixel_type_req == inf.CS_BGR32) { if (!inf.IsRGB32()) { filter = "ConvertToRGB32"; err_string = "AviSynthWrapper: Cannot convert video to RGB32!"; }}
			else if (pixel_type_req == inf.CS_BGR24) { if (!inf.IsRGB24()) { filter = "ConvertToRGB24"; err_string = "AviSynthWrapper: Cannot convert video to RGB24!"; }}
			else if (pixel_type_req == inf.CS_YUY2) { if (!inf.IsYUY2()) { filter = "ConvertToYUY2"; err_string = "AviSynthWrapper: Cannot convert video to YUY2!"; }}
			else if (pixel_type_req == inf.CS_YV12) { if (!inf.IsYV12()) { filter = "ConvertToYV12"; err_string = "AviSynthWrapper: Cannot convert video to YV12!"; }}
			else if (pixel_type_req == inf.CS_I420) { if (!inf.IsYV12()) { filter = "ConvertToYV12"; err_string = "AviSynthWrapper: Cannot convert video to YV12!"; }}
			else
			{
				//"2.5 Baked API will see all new planar as YV12"
				//YV411, YV24, YV16 и Y8 в IsYV12() определяются как YV12
				strncpy_s(pstr->err, ERRMSG_LEN, "AviSynthWrapper: Requested PixelType isn't valid or such conversion is not yet implemented!", _TRUNCATE);
				return 5;
			}

			if (filter.length() > 0)
			{
				res = pstr->env->Invoke(filter.c_str(), AVSValue(&res, 1));

				pstr->clp = res.AsClip();
				VideoInfo infh = pstr->clp->GetVideoInfo();

				if (pixel_type_req == inf.CS_BGR32 && !infh.IsRGB32() ||
					pixel_type_req == inf.CS_BGR24 && !infh.IsRGB24() ||
					pixel_type_req == inf.CS_YUY2 && !infh.IsYUY2() ||
					pixel_type_req == inf.CS_YV12 && !infh.IsYV12() ||
					pixel_type_req == inf.CS_I420 && !infh.IsYV12())
				{
					strncpy_s(pstr->err, ERRMSG_LEN, err_string.c_str(), _TRUNCATE);
					return 5;
				}
			}
		}

		if (inf.HasAudio())
		{
			string filter = "";
			string err_string = "";

			//Original и Requested SampleType
			if (vi != NULL) vi->sample_type_orig = inf.sample_type;
			int sample_type_req = (vi != NULL) ? vi->sample_type : 0;

			if (sample_type_req == 0) { /*Выводим звук как он есть, без проверок и преобразований*/ }
			else if (sample_type_req == SAMPLE_FLOAT) { if (inf.sample_type != SAMPLE_FLOAT) { filter = "ConvertAudioToFloat"; err_string = "AviSynthWrapper: Cannot convert audio to FLOAT!"; }}
			else if (sample_type_req == SAMPLE_INT32) { if (inf.sample_type != SAMPLE_INT32) { filter = "ConvertAudioTo32bit"; err_string = "AviSynthWrapper: Cannot convert audio to INT32!"; }}
			else if (sample_type_req == SAMPLE_INT24) { if (inf.sample_type != SAMPLE_INT24) { filter = "ConvertAudioTo24bit"; err_string = "AviSynthWrapper: Cannot convert audio to INT24!"; }}
			else if (sample_type_req == SAMPLE_INT16) { if (inf.sample_type != SAMPLE_INT16) { filter = "ConvertAudioTo16bit"; err_string = "AviSynthWrapper: Cannot convert audio to INT16!"; }}
			else if (sample_type_req == SAMPLE_INT8) { if (inf.sample_type != SAMPLE_INT8) { filter = "ConvertAudioTo8bit"; err_string = "AviSynthWrapper: Cannot convert audio to INT8!"; }}
			else
			{
				strncpy_s(pstr->err, ERRMSG_LEN, "AviSynthWrapper: Requested SampleType isn't valid or such conversion is not yet implemented!", _TRUNCATE);
				return 6;
			}

			if (filter.length() > 0)
			{
				res = pstr->env->Invoke(filter.c_str(), res);

				pstr->clp = res.AsClip();
				VideoInfo infh = pstr->clp->GetVideoInfo();

				if (sample_type_req == SAMPLE_FLOAT && infh.sample_type != SAMPLE_FLOAT ||
					sample_type_req == SAMPLE_INT32 && infh.sample_type != SAMPLE_INT32 ||
					sample_type_req == SAMPLE_INT24 && infh.sample_type != SAMPLE_INT24 ||
					sample_type_req == SAMPLE_INT16 && infh.sample_type != SAMPLE_INT16 ||
					sample_type_req == SAMPLE_INT8 && infh.sample_type != SAMPLE_INT8)
				{
					strncpy_s(pstr->err, ERRMSG_LEN, err_string.c_str(), _TRUNCATE);
					return 6;
				}
			}
		}

		inf = pstr->clp->GetVideoInfo();
		if (vi != NULL) {
			vi->width   = inf.width;
			vi->height  = inf.height;
			vi->raten   = inf.fps_numerator;
			vi->rated   = inf.fps_denominator;
			vi->field_based = (inf.IsFieldBased()) ? 1 : 0;
			vi->first_field = (inf.IsTFF()) ? 1 : (inf.IsBFF()) ? 2 : 0;
			vi->num_frames = inf.num_frames;

			//if (vi->pixel_type == 0) vi->pixel_type = inf.pixel_type;
			//if (vi->sample_type == 0) vi->sample_type = inf.sample_type;
			vi->pixel_type = inf.pixel_type;
			vi->sample_type = inf.sample_type;

			vi->audio_samples_per_second = inf.audio_samples_per_second;
			vi->num_audio_samples        = inf.num_audio_samples;
			vi->nchannels                = inf.nchannels;
		}

		//Нужен ли нам вообще этот res?!
		if(pstr->res) delete pstr->res;
		pstr->res = new AVSValue(res);

		pstr->err[0] = 0;
		return 0;
	}
	catch(AvisynthError err)
	{
		strncpy_s(pstr->err, ERRMSG_LEN, err.msg, _TRUNCATE);
		return AVS_GERROR;
	}
}
예제 #12
0
int __stdcall dimzon_avs_invoke(SafeStruct* pstr, char *func, char **arg, int len, AVSDLLVideoInfo *vi, float* func_out)
{
	try
	{
		*func_out = -FLT_MAX;
		pstr->err[0] = 0;

		const int N = 10;
		int actual_len = 0;

		AVSValue args[N] = { };
		if (len == 0) args[0] = 0;
		else if (len > N) len = N;

		for(int i = 0; i < len; i++)
		{
			if (strlen(arg[i]) > 0)
			{
				string lower = arg[i];
				bool was_letters = false;
				bool was_digits = false;
				bool was_spaces = false;

				//Слишком длинные значения - точно текст
				for (unsigned int n = 0; n < lower.size() && lower.size() <= 10; n++)
				{
					lower[n] = tolower(lower[n]);
					if (!was_letters && isalpha(lower[n])) was_letters = true;
					if (!was_digits && isdigit(lower[n])) was_digits = true;
					if (!was_spaces && isspace(lower[n])) was_spaces = true;
				}

				if (i == 0 && was_letters && !was_digits && !was_spaces && lower.compare("last") == 0)
				{
					//Clip (last)
					if(!pstr->clp) throw AvisynthError("AviSynthWrapper: The \"last\" clip was requested, but it doesn't exist!");
					args[actual_len] = pstr->clp; //pstr->res->AsClip();
					actual_len += 1;

					//pstr->clp; pstr->res->AsClip(); //С обработкой после прошлых вызовов Invoke
					//pstr->env->GetVar("last").AsClip(); //"Чистый" выход скрипта
				}
				else if (was_letters && !was_digits && !was_spaces && lower.compare("true") == 0)
				{
					//Bool (true)
					args[actual_len] = true;
					actual_len += 1;
				}
				else if (was_letters && !was_digits && !was_spaces && lower.compare("false") == 0)
				{
					//Bool (false)
					args[actual_len] = false;
					actual_len += 1;
				}
				else if (!was_letters && was_digits && !was_spaces && lower.find(".") != string::npos)
				{
					//Float (double..)
					args[actual_len] = atof(arg[i]);
					actual_len += 1;
				}
				else if (!was_letters && was_digits && !was_spaces)
				{
					//Integer
					args[actual_len] = atoi(arg[i]);
					actual_len += 1;
				}
				else
				{
					//String
					args[actual_len] = arg[i];
					actual_len += 1;
				}
			}
		}

		AVSValue res = pstr->env->Invoke(func, AVSValue(args, actual_len));

		if (!res.IsClip())
		{
			//Вывод результата
			if (res.IsBool())
			{ 
				if(!res.AsBool()) *func_out = 0;
				else *func_out = FLT_MAX;
			}
			else if (res.IsInt()) *func_out = (float)res.AsInt();
			else if (res.IsFloat()) *func_out = (float)res.AsFloat();
			else if (res.IsString()) { *func_out = FLT_MAX; strncpy_s(pstr->err, ERRMSG_LEN, res.AsString(), _TRUNCATE); }
		}
		else
		{
			pstr->clp = res.AsClip();
			VideoInfo inf = pstr->clp->GetVideoInfo();

			if (vi != NULL)
			{
				vi->width   = inf.width;
				vi->height  = inf.height;
				vi->raten   = inf.fps_numerator;
				vi->rated   = inf.fps_denominator;
				vi->field_based = (inf.IsFieldBased()) ? 1 : 0;
				vi->first_field = (inf.IsTFF()) ? 1 : (inf.IsBFF()) ? 2 : 0;
				vi->num_frames = inf.num_frames;

				//Или не меняем?
				if (vi->pixel_type_orig == 0) vi->pixel_type_orig = inf.pixel_type;
				if (vi->sample_type_orig == 0) vi->sample_type_orig = inf.sample_type;

				vi->pixel_type = inf.pixel_type;
				vi->sample_type = inf.sample_type;

				vi->num_audio_samples        = inf.num_audio_samples;
				vi->audio_samples_per_second = inf.audio_samples_per_second;
				vi->nchannels                = inf.nchannels;
			}

			//Нужен ли нам вообще этот res?!
			if(pstr->res) delete pstr->res;
			pstr->res = new AVSValue(res);

			pstr->err[0] = 0;
		}

		return 0;
	}
	catch(AvisynthError err)
	{
		strncpy_s(pstr->err, ERRMSG_LEN, err.msg, _TRUNCATE);
		return AVS_GERROR;
	}
	catch(IScriptEnvironment::NotFound)
	{
		strncpy_s(pstr->err, ERRMSG_LEN, "AviSynthWrapper: Wrong function name or invalid parameters was passed to Invoke!", _TRUNCATE);
		return AVS_VARNFOUND
	}
}