Beispiel #1
0
int AvxContext::MPlayerCommandVideo(char *command) {
	char format[sizeof("bgr32")];
	bool flipVertical = false;

	if (vi.IsRGB24()) {
		sprintf(format, "bgr24");
		flipVertical = true;
	} else if (vi.IsRGB32()) {
		sprintf(format, "bgr32");
		flipVertical = true;
	} else if (vi.IsYUY2()) {
		sprintf(format, "yuy2");
	} else if (vi.IsYV12()) {
		sprintf(format, "yv12");
	} else {
		AVXLOG_ERROR("%s", "Unsupported colorspace");
		return -1;
	}

	sprintf(command, MPLAYER " %s -demuxer rawvideo -rawvideo w=%d:h=%d:format=%s - 1> /dev/null",
		flipVertical ? "-flip" : "", vi.width, vi.height, format);
	return 0;
}
/////////////
// Get frame
PVideoFrame __stdcall DrawPRS::GetFrame(int n, IScriptEnvironment* env) {
    // Avisynth frame
    PVideoFrame avsFrame = child->GetFrame(n,env);

    try {
        // Check if there is anything to be drawn
        if (file.HasDataAtFrame(n)) {
            // Create the PRSFrame structure
            env->MakeWritable(&avsFrame);
            PRSVideoFrame frame;
            frame.data[0] = (char*) avsFrame->GetWritePtr();
            frame.w = avsFrame->GetRowSize()/4;
            frame.h = avsFrame->GetHeight();
            frame.pitch = avsFrame->GetPitch();
            frame.flipColors = true;
            frame.flipVertical = true;

            // Set colorspace
            VideoInfo vi = child->GetVideoInfo();
            if (vi.IsYV12()) frame.colorSpace = ColorSpace_YV12;
            else if (vi.IsYUY2()) frame.colorSpace = ColorSpace_YUY2;
            else if (vi.IsRGB32()) frame.colorSpace = ColorSpace_RGB32;
            else if (vi.IsRGB24()) frame.colorSpace = ColorSpace_RGB24;

            // Draw into the frame
            file.DrawFrame(n,&frame);
        }
    }

    // Catch exception
    catch (const std::exception &e) {
        env->ThrowError(e.what());
    }

    // Return frame
    return avsFrame;
}
Beispiel #3
0
int __stdcall dimzon_avs_init(SafeStruct** ppstr, char *func, char *arg, AVSDLLVideoInfo *vi)
{
	SafeStruct* pstr = NULL;

	if(!*ppstr)
	{
		pstr = ((SafeStruct*)malloc(sizeof(SafeStruct)));
		*ppstr = pstr;
		memset(pstr, 0, sizeof(SafeStruct));

		pstr->dll = LoadLibrary("avisynth.dll");
		if(!pstr->dll)
		{
			strncpy_s(pstr->err, ERRMSG_LEN, "Cannot load avisynth.dll", _TRUNCATE);
			return 1;
		}

		IScriptEnvironment* (*CreateScriptEnvironment)(int version) = (IScriptEnvironment*(*)(int)) GetProcAddress(pstr->dll, "CreateScriptEnvironment");
		if(!CreateScriptEnvironment)
		{
			strncpy_s(pstr->err, ERRMSG_LEN, "Cannot load CreateScriptEnvironment", _TRUNCATE);
			return 2;
		}

		pstr->env = CreateScriptEnvironment(AVISYNTH_INTERFACE_VERSION);

		if (pstr->env == NULL)
		{
			strncpy_s(pstr->err, ERRMSG_LEN, "Required Avisynth 2.5", _TRUNCATE);
			return 3;
		}
	}
	else
	{
		pstr = *ppstr;
	}

	pstr->err[0] = 0;

	//Заходили только чтоб получить ppstr
	if (!func || strlen(func) == 0 || !arg)
		return 0;

	try
	{
		AVSValue arg(arg);
		AVSValue res;

		if (vi != NULL && vi->mt_import == MT_DISABLED)
		{
			//Если надо, отключаем MT - до импорта
			try { pstr->env->Invoke("SetMTMode", 0); }
			catch (IScriptEnvironment::NotFound) { /*AviSynth без MT*/ }
		}

		res = pstr->env->Invoke(func, AVSValue(&arg, 1));
		if(!*ppstr) return 1;

		if (!res.IsClip())
		{
			strncpy_s(pstr->err, ERRMSG_LEN, "The script's return was not a video clip.", _TRUNCATE);
			return 4;
		}

		if (vi != NULL && (vi->mt_import == MT_ADDDISTR || vi->mt_import == MT_ADDM1DISTR))
		{
			try
			{
				//Если надо, добавляем Distributor() - после импорта
				AVSValue mt_test = pstr->env->Invoke("GetMTMode", false);
				const int mt_mode = mt_test.IsInt() ? mt_test.AsInt() : 0;
				if (mt_mode > 0 && mt_mode < 5)
				{
					if (mt_mode != 1 && vi->mt_import == MT_ADDM1DISTR)
						pstr->env->Invoke("SetMTMode", 1);

					res = pstr->env->Invoke("Distributor", res);
				}
			}
			catch (IScriptEnvironment::NotFound) { /*AviSynth без MT*/ }

			if (!res.IsClip())
			{
				strncpy_s(pstr->err, ERRMSG_LEN, "After adding \"Distributor()\" the script's return was not a video clip.", _TRUNCATE);
				return 4;
			}
		}

		pstr->clp = res.AsClip();
		VideoInfo inf = pstr->clp->GetVideoInfo();

		if (inf.HasVideo())
		{
			string filter = "";
			string err_string = "";

			//Original и Requested PixelType
			if (vi != NULL) vi->pixel_type_orig = inf.pixel_type;
			int pixel_type_req = (vi != NULL) ? vi->pixel_type : 0;

			if (pixel_type_req == 0) { /*Выводим видео как оно есть, без проверок и преобразований*/ }
			else if (pixel_type_req == inf.CS_BGR32) { if (!inf.IsRGB32()) { filter = "ConvertToRGB32"; err_string = "AviSynthWrapper: Cannot convert video to RGB32!"; }}
			else if (pixel_type_req == inf.CS_BGR24) { if (!inf.IsRGB24()) { filter = "ConvertToRGB24"; err_string = "AviSynthWrapper: Cannot convert video to RGB24!"; }}
			else if (pixel_type_req == inf.CS_YUY2) { if (!inf.IsYUY2()) { filter = "ConvertToYUY2"; err_string = "AviSynthWrapper: Cannot convert video to YUY2!"; }}
			else if (pixel_type_req == inf.CS_YV12) { if (!inf.IsYV12()) { filter = "ConvertToYV12"; err_string = "AviSynthWrapper: Cannot convert video to YV12!"; }}
			else if (pixel_type_req == inf.CS_I420) { if (!inf.IsYV12()) { filter = "ConvertToYV12"; err_string = "AviSynthWrapper: Cannot convert video to YV12!"; }}
			else
			{
				//"2.5 Baked API will see all new planar as YV12"
				//YV411, YV24, YV16 и Y8 в IsYV12() определяются как YV12
				strncpy_s(pstr->err, ERRMSG_LEN, "AviSynthWrapper: Requested PixelType isn't valid or such conversion is not yet implemented!", _TRUNCATE);
				return 5;
			}

			if (filter.length() > 0)
			{
				res = pstr->env->Invoke(filter.c_str(), AVSValue(&res, 1));

				pstr->clp = res.AsClip();
				VideoInfo infh = pstr->clp->GetVideoInfo();

				if (pixel_type_req == inf.CS_BGR32 && !infh.IsRGB32() ||
					pixel_type_req == inf.CS_BGR24 && !infh.IsRGB24() ||
					pixel_type_req == inf.CS_YUY2 && !infh.IsYUY2() ||
					pixel_type_req == inf.CS_YV12 && !infh.IsYV12() ||
					pixel_type_req == inf.CS_I420 && !infh.IsYV12())
				{
					strncpy_s(pstr->err, ERRMSG_LEN, err_string.c_str(), _TRUNCATE);
					return 5;
				}
			}
		}

		if (inf.HasAudio())
		{
			string filter = "";
			string err_string = "";

			//Original и Requested SampleType
			if (vi != NULL) vi->sample_type_orig = inf.sample_type;
			int sample_type_req = (vi != NULL) ? vi->sample_type : 0;

			if (sample_type_req == 0) { /*Выводим звук как он есть, без проверок и преобразований*/ }
			else if (sample_type_req == SAMPLE_FLOAT) { if (inf.sample_type != SAMPLE_FLOAT) { filter = "ConvertAudioToFloat"; err_string = "AviSynthWrapper: Cannot convert audio to FLOAT!"; }}
			else if (sample_type_req == SAMPLE_INT32) { if (inf.sample_type != SAMPLE_INT32) { filter = "ConvertAudioTo32bit"; err_string = "AviSynthWrapper: Cannot convert audio to INT32!"; }}
			else if (sample_type_req == SAMPLE_INT24) { if (inf.sample_type != SAMPLE_INT24) { filter = "ConvertAudioTo24bit"; err_string = "AviSynthWrapper: Cannot convert audio to INT24!"; }}
			else if (sample_type_req == SAMPLE_INT16) { if (inf.sample_type != SAMPLE_INT16) { filter = "ConvertAudioTo16bit"; err_string = "AviSynthWrapper: Cannot convert audio to INT16!"; }}
			else if (sample_type_req == SAMPLE_INT8) { if (inf.sample_type != SAMPLE_INT8) { filter = "ConvertAudioTo8bit"; err_string = "AviSynthWrapper: Cannot convert audio to INT8!"; }}
			else
			{
				strncpy_s(pstr->err, ERRMSG_LEN, "AviSynthWrapper: Requested SampleType isn't valid or such conversion is not yet implemented!", _TRUNCATE);
				return 6;
			}

			if (filter.length() > 0)
			{
				res = pstr->env->Invoke(filter.c_str(), res);

				pstr->clp = res.AsClip();
				VideoInfo infh = pstr->clp->GetVideoInfo();

				if (sample_type_req == SAMPLE_FLOAT && infh.sample_type != SAMPLE_FLOAT ||
					sample_type_req == SAMPLE_INT32 && infh.sample_type != SAMPLE_INT32 ||
					sample_type_req == SAMPLE_INT24 && infh.sample_type != SAMPLE_INT24 ||
					sample_type_req == SAMPLE_INT16 && infh.sample_type != SAMPLE_INT16 ||
					sample_type_req == SAMPLE_INT8 && infh.sample_type != SAMPLE_INT8)
				{
					strncpy_s(pstr->err, ERRMSG_LEN, err_string.c_str(), _TRUNCATE);
					return 6;
				}
			}
		}

		inf = pstr->clp->GetVideoInfo();
		if (vi != NULL) {
			vi->width   = inf.width;
			vi->height  = inf.height;
			vi->raten   = inf.fps_numerator;
			vi->rated   = inf.fps_denominator;
			vi->field_based = (inf.IsFieldBased()) ? 1 : 0;
			vi->first_field = (inf.IsTFF()) ? 1 : (inf.IsBFF()) ? 2 : 0;
			vi->num_frames = inf.num_frames;

			//if (vi->pixel_type == 0) vi->pixel_type = inf.pixel_type;
			//if (vi->sample_type == 0) vi->sample_type = inf.sample_type;
			vi->pixel_type = inf.pixel_type;
			vi->sample_type = inf.sample_type;

			vi->audio_samples_per_second = inf.audio_samples_per_second;
			vi->num_audio_samples        = inf.num_audio_samples;
			vi->nchannels                = inf.nchannels;
		}

		//Нужен ли нам вообще этот res?!
		if(pstr->res) delete pstr->res;
		pstr->res = new AVSValue(res);

		pstr->err[0] = 0;
		return 0;
	}
	catch(AvisynthError err)
	{
		strncpy_s(pstr->err, ERRMSG_LEN, err.msg, _TRUNCATE);
		return AVS_GERROR;
	}
}