void FFMS_VideoSource::SetOutputFormat(const AVPixelFormat *TargetFormats, int Width, int Height, int Resizer) {
	TargetWidth = Width;
	TargetHeight = Height;
	TargetResizer = Resizer;
	TargetPixelFormats.clear();
	while (*TargetFormats != FFMS_PIX_FMT(NONE))
		TargetPixelFormats.push_back(*TargetFormats++);
	OutputFormat = FFMS_PIX_FMT(NONE);

	ReAdjustOutputFormat();
	OutputFrame(DecodeFrame);
}
static AVColorRange handle_jpeg(AVPixelFormat *format) {
	switch (*format) {
		case FFMS_PIX_FMT(YUVJ420P): *format = FFMS_PIX_FMT(YUV420P); return AVCOL_RANGE_JPEG;
		case FFMS_PIX_FMT(YUVJ422P): *format = FFMS_PIX_FMT(YUV422P); return AVCOL_RANGE_JPEG;
		case FFMS_PIX_FMT(YUVJ444P): *format = FFMS_PIX_FMT(YUV444P); return AVCOL_RANGE_JPEG;
		case FFMS_PIX_FMT(YUVJ440P): *format = FFMS_PIX_FMT(YUV440P); return AVCOL_RANGE_JPEG;
		default:                                                      return AVCOL_RANGE_UNSPECIFIED;
	}
}
void FFMS_VideoSource::ResetInputFormat() {
	InputFormatOverridden = false;
	InputFormat = FFMS_PIX_FMT(NONE);
	InputColorSpace = AVCOL_SPC_UNSPECIFIED;
	InputColorRange = AVCOL_RANGE_UNSPECIFIED;

	ReAdjustOutputFormat();
	OutputFrame(DecodeFrame);
}
void FFMS_VideoSource::SetVideoProperties() {
	VP.RFFDenominator = CodecContext->time_base.num;
	VP.RFFNumerator = CodecContext->time_base.den;
	if (CodecContext->codec_id == FFMS_ID(H264)) {
		if (VP.RFFNumerator & 1)
			VP.RFFDenominator *= 2;
		else
			VP.RFFNumerator /= 2;
	}
	VP.NumFrames = Frames.VisibleFrameCount();
	VP.TopFieldFirst = DecodeFrame->top_field_first;
	VP.ColorSpace = CodecContext->colorspace;
	VP.ColorRange = CodecContext->color_range;
	// these pixfmt's are deprecated but still used
	if (CodecContext->pix_fmt == FFMS_PIX_FMT(YUVJ420P) ||
		CodecContext->pix_fmt == FFMS_PIX_FMT(YUVJ422P) ||
		CodecContext->pix_fmt == FFMS_PIX_FMT(YUVJ444P)
		)
		VP.ColorRange = AVCOL_RANGE_JPEG;


	VP.FirstTime = ((Frames.front().PTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000;
	VP.LastTime = ((Frames.back().PTS * Frames.TB.Num) / (double)Frames.TB.Den) / 1000;

	if (CodecContext->width <= 0 || CodecContext->height <= 0)
		throw FFMS_Exception(FFMS_ERROR_DECODING, FFMS_ERROR_CODEC,
			"Codec returned zero size video");

	// attempt to correct framerate to the proper NTSC fraction, if applicable
	CorrectRationalFramerate(&VP.FPSNumerator, &VP.FPSDenominator);
	// correct the timebase, if necessary
	CorrectTimebase(&VP, &Frames.TB);

	// Set AR variables
	VP.SARNum = CodecContext->sample_aspect_ratio.num;
	VP.SARDen = CodecContext->sample_aspect_ratio.den;

	// Set input and output formats now that we have a CodecContext
	DetectInputFormat();

	OutputFormat = InputFormat;
	OutputColorSpace = InputColorSpace;
	OutputColorRange = InputColorRange;
}
示例#5
0
AVPixelFormat FindBestPixelFormat(const std::vector<AVPixelFormat> &Dsts, AVPixelFormat Src) {
	// some trivial special cases to make sure there's as little conversion as possible
	if (Dsts.empty())
		return FFMS_PIX_FMT(NONE);
	if (Dsts.size() == 1)
		return Dsts[0];

	// is the input in the output?
	auto i = std::find(Dsts.begin(), Dsts.end(), Src);
	if (i != Dsts.end())
		return Src;

	// If it's an evil paletted format pretend it's normal RGB when calculating loss
    if (Src == FFMS_PIX_FMT(PAL8))
		Src = FFMS_PIX_FMT(RGB32);

	i = Dsts.begin();
	LossAttributes Loss = CalculateLoss(*i++, Src);
	for (; i != Dsts.end(); ++i) {
		LossAttributes CLoss = CalculateLoss(*i, Src);
		if (Loss.CSLoss >= 3 && CLoss.CSLoss < Loss.CSLoss) { // favor the same color format output
			Loss = CLoss;
		} else if (Loss.DepthDifference >= 0 && CLoss.DepthDifference >= 0) { // focus on chroma undersamling and conversion loss if the target depth has been achieved
			if ((CLoss.ChromaUndersampling < Loss.ChromaUndersampling)
				|| (CLoss.ChromaUndersampling == Loss.ChromaUndersampling && CLoss.CSLoss < Loss.CSLoss)
				|| (CLoss.ChromaUndersampling == Loss.ChromaUndersampling && CLoss.CSLoss == Loss.CSLoss && CLoss.DepthDifference < Loss.DepthDifference)
				|| (CLoss.ChromaUndersampling == Loss.ChromaUndersampling && CLoss.CSLoss == Loss.CSLoss
					&& CLoss.DepthDifference == Loss.DepthDifference && CLoss.ChromaOversampling < Loss.ChromaOversampling))
				Loss = CLoss;
		} else { // put priority on reaching the same depth as the input
			if ((CLoss.DepthDifference > Loss.DepthDifference)
				|| (CLoss.DepthDifference == Loss.DepthDifference && CLoss.ChromaUndersampling < Loss.ChromaUndersampling)
				|| (CLoss.DepthDifference == Loss.DepthDifference && CLoss.ChromaUndersampling == Loss.ChromaUndersampling && CLoss.CSLoss < Loss.CSLoss)
				|| (CLoss.DepthDifference == Loss.DepthDifference && CLoss.ChromaUndersampling == Loss.ChromaUndersampling
					&& CLoss.CSLoss == Loss.CSLoss && CLoss.ChromaOversampling < Loss.ChromaOversampling))
				Loss = CLoss;
		}
	}

	return Loss.Format;
}
FFMS_Frame *FFMS_VideoSource::OutputFrame(AVFrame *Frame) {
	SanityCheckFrameForData(Frame);

	if (LastFrameWidth != CodecContext->width || LastFrameHeight != CodecContext->height || LastFramePixelFormat != CodecContext->pix_fmt) {
		if (TargetHeight > 0 && TargetWidth > 0 && !TargetPixelFormats.empty()) {
			if (!InputFormatOverridden) {
				InputFormat = FFMS_PIX_FMT(NONE);
				InputColorSpace = AVCOL_SPC_UNSPECIFIED;
				InputColorRange = AVCOL_RANGE_UNSPECIFIED;
			}

			ReAdjustOutputFormat();
		}
	}

	if (SWS) {
		sws_scale(SWS, Frame->data, Frame->linesize, 0, CodecContext->height, SWSFrameData, SWSFrameLinesize);
		CopyAVFrameFields(SWSFrameData, SWSFrameLinesize, LocalFrame);
	} else {
		// Special case to avoid ugly casts
		for (int i = 0; i < 4; i++) {
			LocalFrame.Data[i] = Frame->data[i];
			LocalFrame.Linesize[i] = Frame->linesize[i];
		}
	}

	LocalFrame.EncodedWidth = CodecContext->width;
	LocalFrame.EncodedHeight = CodecContext->height;
	LocalFrame.EncodedPixelFormat = CodecContext->pix_fmt;
	LocalFrame.ScaledWidth = TargetWidth;
	LocalFrame.ScaledHeight = TargetHeight;
	LocalFrame.ConvertedPixelFormat = OutputFormat;
	LocalFrame.KeyFrame = Frame->key_frame;
	LocalFrame.PictType = av_get_picture_type_char(Frame->pict_type);
	LocalFrame.RepeatPict = Frame->repeat_pict;
	LocalFrame.InterlacedFrame = Frame->interlaced_frame;
	LocalFrame.TopFieldFirst = Frame->top_field_first;
	LocalFrame.ColorSpace = OutputColorSpace;
	LocalFrame.ColorRange = OutputColorRange;
	LocalFrame.ColorPrimaries = CodecContext->color_primaries;
	LocalFrame.TransferCharateristics = CodecContext->color_trc;
	LocalFrame.ChromaLocation = CodecContext->chroma_sample_location;

	LastFrameHeight = CodecContext->height;
	LastFrameWidth = CodecContext->width;
	LastFramePixelFormat = CodecContext->pix_fmt;

	return &LocalFrame;
}
void FFMS_VideoSource::SetInputFormat(int ColorSpace, int ColorRange, AVPixelFormat Format) {
	InputFormatOverridden = true;

	if (Format != FFMS_PIX_FMT(NONE))
		InputFormat = Format;
	if (ColorRange != AVCOL_RANGE_UNSPECIFIED)
		InputColorRange = (AVColorRange)ColorRange;
	if (ColorSpace != AVCOL_SPC_UNSPECIFIED)
		InputColorSpace = (AVColorSpace)ColorSpace;

	if (TargetPixelFormats.size()) {
		ReAdjustOutputFormat();
		OutputFrame(DecodeFrame);
	}
}
void FFMS_VideoSource::ReAdjustOutputFormat() {
	if (SWS) {
		sws_freeContext(SWS);
		SWS = nullptr;
	}

	DetectInputFormat();

	OutputFormat = FindBestPixelFormat(TargetPixelFormats, InputFormat);
	if (OutputFormat == FFMS_PIX_FMT(NONE)) {
		ResetOutputFormat();
		throw FFMS_Exception(FFMS_ERROR_SCALING, FFMS_ERROR_INVALID_ARGUMENT,
			"No suitable output format found");
	}

	OutputColorRange = handle_jpeg(&OutputFormat);
	if (OutputColorRange == AVCOL_RANGE_UNSPECIFIED)
		OutputColorRange = CodecContext->color_range;
	if (OutputColorRange == AVCOL_RANGE_UNSPECIFIED)
		OutputColorRange = InputColorRange;

	OutputColorSpace = CodecContext->colorspace;
	if (OutputColorSpace == AVCOL_SPC_UNSPECIFIED)
		OutputColorSpace = InputColorSpace;

	if (InputFormat != OutputFormat ||
		TargetWidth != CodecContext->width ||
		TargetHeight != CodecContext->height ||
		InputColorSpace != OutputColorSpace ||
		InputColorRange != OutputColorRange)
	{
		SWS = GetSwsContext(
			CodecContext->width, CodecContext->height, InputFormat, InputColorSpace, InputColorRange,
			TargetWidth, TargetHeight, OutputFormat, OutputColorSpace, OutputColorRange,
			TargetResizer);

		if (!SWS) {
			ResetOutputFormat();
			throw FFMS_Exception(FFMS_ERROR_SCALING, FFMS_ERROR_INVALID_ARGUMENT,
				"Failed to allocate SWScale context");
		}
	}

	av_freep(&SWSFrameData[0]);
	if (av_image_alloc(SWSFrameData, SWSFrameLinesize, TargetWidth, TargetHeight, OutputFormat, 4) < 0)
		throw FFMS_Exception(FFMS_ERROR_SCALING, FFMS_ERROR_ALLOCATION_FAILED,
			"Could not allocate frame with new resolution.");
}
void FFMS_VideoSource::ResetOutputFormat() {
	if (SWS) {
		sws_freeContext(SWS);
		SWS = nullptr;
	}

	TargetWidth = -1;
	TargetHeight = -1;
	TargetPixelFormats.clear();

	OutputFormat = FFMS_PIX_FMT(NONE);
	OutputColorSpace = AVCOL_SPC_UNSPECIFIED;
	OutputColorRange = AVCOL_RANGE_UNSPECIFIED;

	OutputFrame(DecodeFrame);
}
void FFMS_VideoSource::DetectInputFormat() {
	if (InputFormat == FFMS_PIX_FMT(NONE))
		InputFormat = CodecContext->pix_fmt;

	AVColorRange RangeFromFormat = handle_jpeg(&InputFormat);

	if (InputColorRange == AVCOL_RANGE_UNSPECIFIED)
		InputColorRange = RangeFromFormat;
	if (InputColorRange == AVCOL_RANGE_UNSPECIFIED)
		InputColorRange = CodecContext->color_range;
	if (InputColorRange == AVCOL_RANGE_UNSPECIFIED)
		InputColorRange = AVCOL_RANGE_MPEG;

	if (InputColorSpace == AVCOL_SPC_UNSPECIFIED)
		InputColorSpace = CodecContext->colorspace;
}
示例#11
0
文件: avssources.cpp 项目: jeeb/ffms2
static AVPixelFormat CSNameToPIXFMT(const char *CSName, AVPixelFormat Default, bool HighBitDepth) {
	if (!CSName)
		return FFMS_PIX_FMT(NONE);
	std::string s = CSName;
	std::transform(s.begin(), s.end(), s.begin(), toupper);
	if (s == "")
		return Default;
	if (s == "YUV9" || s == "YUV410P8")
		return FFMS_PIX_FMT(YUV410P);
	if (s == "YV411" || s == "YUV411P8")
		return FFMS_PIX_FMT(YUV411P);
	if (s == "YV12" || s == "YUV420P8")
		return FFMS_PIX_FMT(YUV420P);
	if (s == "YV16" || s == "YUV422P8")
		return FFMS_PIX_FMT(YUV422P);
	if (s == "YV24" || s == "YUV444P8")
		return FFMS_PIX_FMT(YUV444P);
	if (s == "Y8" || s == "GRAY8")
		return FFMS_PIX_FMT(GRAY8);
	if (s == "YUY2")
		return FFMS_PIX_FMT(YUYV422);
	if (s == "RGB24")
		return FFMS_PIX_FMT(BGR24);
	if (s == "RGB32")
		return FFMS_PIX_FMT(RGB32);
    if (HighBitDepth) {
        if (s == "YUVA420P8")
            return FFMS_PIX_FMT(YUVA420P);
        if (s == "YUVA422P8")
            return FFMS_PIX_FMT(YUVA422P);
        if (s == "YUVA444P8")
            return FFMS_PIX_FMT(YUVA444P);
        if (s == "YUV420P16")
            return FFMS_PIX_FMT(YUV420P16);
        if (s == "YUVA420P16")
            return FFMS_PIX_FMT(YUVA420P16);
        if (s == "YUV422P16")
            return FFMS_PIX_FMT(YUV422P16);
        if (s == "YUVA422P16")
            return FFMS_PIX_FMT(YUVA422P16);
        if (s == "YUV444P16")
            return FFMS_PIX_FMT(YUV444P16);
        if (s == "YUVA444P16")
            return FFMS_PIX_FMT(YUVA444P16);
        if (s == "YUV420P10")
            return FFMS_PIX_FMT(YUV420P10);
        if (s == "YUVA420P10")
            return FFMS_PIX_FMT(YUVA420P10);
        if (s == "YUV422P10")
            return FFMS_PIX_FMT(YUV422P10);
        if (s == "YUVA422P10")
            return FFMS_PIX_FMT(YUVA422P10);
        if (s == "YUV444P10")
            return FFMS_PIX_FMT(YUV444P10);
        if (s == "YUVA444P10")
            return FFMS_PIX_FMT(YUVA444P10);
        if (s == "RGBP16")
            return FFMS_PIX_FMT(GBRP16);
        if (s == "RGBAP16")
            return FFMS_PIX_FMT(GBRAP16);
        if (s == "Y16" || s == "GRAY16")
            return FFMS_PIX_FMT(GRAY16);
    }

	return FFMS_PIX_FMT(NONE);
}
示例#12
0
文件: avssources.cpp 项目: jeeb/ffms2
void AvisynthVideoSource::InitOutputFormat(
	int ResizeToWidth, int ResizeToHeight, const char *ResizerName,
	const char *ConvertToFormatName, IScriptEnvironment *Env) {

	ErrorInfo E;
	const FFMS_VideoProperties *VP = FFMS_GetVideoProperties(V);
	const FFMS_Frame *F = FFMS_GetFrame(V, 0, &E);
	if (!F)
		Env->ThrowError("FFVideoSource: %s", E.Buffer);

    std::vector<int> TargetFormats;
    if (HighBitDepth) {
        TargetFormats.push_back(FFMS_GetPixFmt("yuv420p16"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuva420p16"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuv422p16"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuva422p16"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuv444p16"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuva444p16"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuv420p10"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuva420p10"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuv422p10"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuva422p10"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuv444p10"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuva444p10"));
        TargetFormats.push_back(FFMS_GetPixFmt("gbrp16"));
        TargetFormats.push_back(FFMS_GetPixFmt("gbrap16"));
        TargetFormats.push_back(FFMS_GetPixFmt("gray16"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuva420p"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuva422p"));
        TargetFormats.push_back(FFMS_GetPixFmt("yuva444p"));
    }
    TargetFormats.push_back(FFMS_GetPixFmt("yuv410p"));
    TargetFormats.push_back(FFMS_GetPixFmt("yuv411p"));
    TargetFormats.push_back(FFMS_GetPixFmt("yuv420p"));
    TargetFormats.push_back(FFMS_GetPixFmt("yuv422p"));
    TargetFormats.push_back(FFMS_GetPixFmt("yuv444p"));
    TargetFormats.push_back(FFMS_GetPixFmt("gray8"));
    TargetFormats.push_back(FFMS_GetPixFmt("yuyv422"));
    TargetFormats.push_back(FFMS_GetPixFmt("bgra"));

    // Remove unsupported formats from list so they don't appear as an early termination
    TargetFormats.erase(std::remove(TargetFormats.begin(), TargetFormats.end(), -1), TargetFormats.end());

    TargetFormats.push_back(-1);

	// PIX_FMT_NV21 is misused as a return value different to the defined ones in the function
	AVPixelFormat TargetPixelFormat = CSNameToPIXFMT(ConvertToFormatName, FFMS_PIX_FMT(NV21), HighBitDepth);
	if (TargetPixelFormat == FFMS_PIX_FMT(NONE))
		Env->ThrowError("FFVideoSource: Invalid colorspace name specified");

	if (TargetPixelFormat != FFMS_PIX_FMT(NV21)) {
        TargetFormats.clear();
        TargetFormats.push_back(TargetPixelFormat);
        TargetFormats.push_back(-1);
	}

	if (ResizeToWidth <= 0)
		ResizeToWidth = F->EncodedWidth;

	if (ResizeToHeight <= 0)
		ResizeToHeight = F->EncodedHeight;

	int Resizer = ResizerNameToSWSResizer(ResizerName);
	if (Resizer == 0)
		Env->ThrowError("FFVideoSource: Invalid resizer name specified");

	if (FFMS_SetOutputFormatV2(V, TargetFormats.data(),
		ResizeToWidth, ResizeToHeight, Resizer, &E))
		Env->ThrowError("FFVideoSource: No suitable output format found");

	F = FFMS_GetFrame(V, 0, &E);
    TargetFormats.clear();
    TargetFormats.push_back(F->ConvertedPixelFormat);
    TargetFormats.push_back(-1);

		// This trick is required to first get the "best" default format and then set only that format as the output
	if (FFMS_SetOutputFormatV2(V, TargetFormats.data(),
		ResizeToWidth, ResizeToHeight, Resizer, &E))
		Env->ThrowError("FFVideoSource: No suitable output format found");

	F = FFMS_GetFrame(V, 0, &E);

	if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuvj420p") || F->ConvertedPixelFormat == FFMS_GetPixFmt("yuv420p"))
		VI.pixel_type = VideoInfo::CS_I420;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuva420p"))
        VI.pixel_type = VideoInfo::CS_YUVA420;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuvj422p") || F->ConvertedPixelFormat == FFMS_GetPixFmt("yuv422p"))
        VI.pixel_type = VideoInfo::CS_YV16;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuva422p"))
        VI.pixel_type = VideoInfo::CS_YUVA422;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuvj444p") || F->ConvertedPixelFormat == FFMS_GetPixFmt("yuv444p"))
        VI.pixel_type = VideoInfo::CS_YV24;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuva444p"))
        VI.pixel_type = VideoInfo::CS_YUVA444;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuv411p"))
        VI.pixel_type = VideoInfo::CS_YV411;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuv410p"))
        VI.pixel_type = VideoInfo::CS_YUV9;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("gray8"))
        VI.pixel_type = VideoInfo::CS_Y8;
	else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuyv422"))
		VI.pixel_type = VideoInfo::CS_YUY2;
	else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("rgb32"))
		VI.pixel_type = VideoInfo::CS_BGR32;
	else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("bgr24"))
		VI.pixel_type = VideoInfo::CS_BGR24;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuv420p16"))
        VI.pixel_type = VideoInfo::CS_YUV420P16;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuva420p16"))
        VI.pixel_type = VideoInfo::CS_YUVA420P16;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuv422p16"))
        VI.pixel_type = VideoInfo::CS_YUV422P16;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuva422p16"))
        VI.pixel_type = VideoInfo::CS_YUVA422P16;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuv444p16"))
        VI.pixel_type = VideoInfo::CS_YUV444P16;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuva444p16"))
        VI.pixel_type = VideoInfo::CS_YUVA444P16;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuv420p10"))
        VI.pixel_type = VideoInfo::CS_YUV420P10;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuva420p10"))
        VI.pixel_type = VideoInfo::CS_YUVA420P10;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuv422p10"))
        VI.pixel_type = VideoInfo::CS_YUV422P10;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuva422p10"))
        VI.pixel_type = VideoInfo::CS_YUVA422P10;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuv444p10"))
        VI.pixel_type = VideoInfo::CS_YUV444P10;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("yuva444p10"))
        VI.pixel_type = VideoInfo::CS_YUVA444P10;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("gbrp16"))
        VI.pixel_type = VideoInfo::CS_RGBP16;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("gbrap16"))
        VI.pixel_type = VideoInfo::CS_RGBAP16;
    else if (F->ConvertedPixelFormat == FFMS_GetPixFmt("gray16"))
        VI.pixel_type = VideoInfo::CS_Y16;
	else
		Env->ThrowError("FFVideoSource: No suitable output format found");

	if (RFFMode > 0 && ResizeToHeight != F->EncodedHeight)
		Env->ThrowError("FFVideoSource: Vertical scaling not allowed in RFF mode");

	if (RFFMode > 0 && TargetPixelFormat != FFMS_PIX_FMT(NV21))
		Env->ThrowError("FFVideoSource: Only the default output colorspace can be used in RFF mode");

	// set color information variables
	Env->SetVar(Env->Sprintf("%s%s", this->VarPrefix, "FFCOLOR_SPACE"), F->ColorSpace);
	Env->SetVar(Env->Sprintf("%s%s", this->VarPrefix, "FFCOLOR_RANGE"), F->ColorRange);

	if (VP->TopFieldFirst)
		VI.image_type = VideoInfo::IT_TFF;
	else
		VI.image_type = VideoInfo::IT_BFF;

	VI.width = F->ScaledWidth;
	VI.height = F->ScaledHeight;

	// Crop to obey subsampling width/height requirements
    VI.width -= VI.width % (1 << GetSubSamplingW(VI));
    VI.height -= VI.height % (1 << (GetSubSamplingH(VI) + (RFFMode > 0 ? 1 : 0)));
}
FFMS_VideoSource::FFMS_VideoSource(const char *SourceFile, FFMS_Index &Index, int Track, int Threads)
: Index(Index)
, CodecContext(nullptr)
{
	if (Track < 0 || Track >= static_cast<int>(Index.size()))
		throw FFMS_Exception(FFMS_ERROR_INDEX, FFMS_ERROR_INVALID_ARGUMENT,
			"Out of bounds track index selected");

	if (Index[Track].TT != FFMS_TYPE_VIDEO)
		throw FFMS_Exception(FFMS_ERROR_INDEX, FFMS_ERROR_INVALID_ARGUMENT,
			"Not a video track");

	if (Index[Track].empty())
		throw FFMS_Exception(FFMS_ERROR_INDEX, FFMS_ERROR_INVALID_ARGUMENT,
			"Video track contains no frames");

	if (!Index.CompareFileSignature(SourceFile))
		throw FFMS_Exception(FFMS_ERROR_INDEX, FFMS_ERROR_FILE_MISMATCH,
			"The index does not match the source file");

	Frames = Index[Track];
	VideoTrack = Track;

	VP = {};
	LocalFrame = {};
	SWS = nullptr;
	LastFrameNum = 0;
	CurrentFrame = 1;
	DelayCounter = 0;
	InitialDecode = 1;

	LastFrameHeight = -1;
	LastFrameWidth = -1;
	LastFramePixelFormat = FFMS_PIX_FMT(NONE);

	TargetHeight = -1;
	TargetWidth = -1;
	TargetResizer = 0;

	OutputFormat = FFMS_PIX_FMT(NONE);
	OutputColorSpace = AVCOL_SPC_UNSPECIFIED;
	OutputColorRange = AVCOL_RANGE_UNSPECIFIED;

	InputFormatOverridden = false;
	InputFormat = FFMS_PIX_FMT(NONE);
	InputColorSpace = AVCOL_SPC_UNSPECIFIED;
	InputColorRange = AVCOL_RANGE_UNSPECIFIED;
	if (Threads < 1)
		// libav current has issues with greater than 16 threads
		DecodingThreads = (std::min)(std::thread::hardware_concurrency(), 16u);
	else
		DecodingThreads = Threads;
	DecodeFrame = av_frame_alloc();
	LastDecodedFrame = av_frame_alloc();

	// Dummy allocations so the unallocated case doesn't have to be handled later
	if (av_image_alloc(SWSFrameData, SWSFrameLinesize, 16, 16, FFMS_PIX_FMT(GRAY8), 4) < 0)
		throw FFMS_Exception(FFMS_ERROR_INDEX, FFMS_ERROR_ALLOCATION_FAILED,
			"Could not allocate dummy frame.");

	Index.AddRef();
}