/// @brief Checks if the file is an YUV4MPEG file or not /// Note that it reports the error by throwing an exception, /// not by returning a false value. void YUV4MPEGVideoProvider::CheckFileFormat() { char buf[10]; if (fread(buf, 10, 1, sf) != 1) throw VideoNotSupported("CheckFileFormat: Failed reading header"); if (strncmp("YUV4MPEG2 ", buf, 10)) throw VideoNotSupported("CheckFileFormat: File is not a YUV4MPEG file (bad magic)"); fseeko(sf, 0, SEEK_SET); }
/// @brief Get provider /// @param video /// @return /// VideoProvider *VideoProviderFactory::GetProvider(std::string video) { //XXX std::vector<std::string> list = GetClasses(OPT_GET("Video/Provider")->GetString()); std::vector<std::string> list = GetClasses("ffmpegsource"); if (video.find("?dummy") == 0) list.insert(list.begin(), "Dummy"); list.insert(list.begin(), "YUV4MPEG"); bool fileFound = false; bool fileSupported = false; std::string errors; errors.reserve(1024); for (int i = 0; i < (signed)list.size(); ++i) { std::string err; try { VideoProvider *provider = Create(list[i], video); LOG_I("manager/video/provider") << list[i] << ": opened " << video; if (provider->WantsCaching()) { return new VideoProviderCache(provider); } return provider; } catch (agi::FileNotFoundError const&) { err = list[i] + ": file not found."; // Keep trying other providers as this one may just not be able to // open a valid path } catch (VideoNotSupported const&) { fileFound = true; err = list[i] + ": video is not in a supported format."; } catch (VideoOpenError const& ex) { fileSupported = true; err = list[i] + ": " + ex.GetMessage(); } catch (agi::vfr::Error const& ex) { fileSupported = true; err = list[i] + ": " + ex.GetMessage(); } errors += err; errors += "\n"; LOG_D("manager/video/provider") << err; } // No provider could open the file LOG_E("manager/video/provider") << "Could not open " << video; std::string msg = "Could not open " + video + ":\n" + errors; if (!fileFound) throw agi::FileNotFoundError(video); if (!fileSupported) throw VideoNotSupported(msg); throw VideoOpenError(msg); }
/// @brief Get provider /// @param video /// @return /// VideoProvider *VideoProviderFactory::GetProvider(wxString video) { std::vector<std::string> list = GetClasses(OPT_GET("Video/Provider")->GetString()); if (video.StartsWith("?dummy")) list.insert(list.begin(), "Dummy"); list.insert(list.begin(), "YUV4MPEG"); bool fileFound = false; bool fileSupported = false; std::string errors; errors.reserve(1024); for (auto const& factory : list) { std::string err; try { VideoProvider *provider = Create(factory, video); LOG_I("manager/video/provider") << factory << ": opened " << from_wx(video); if (provider->WantsCaching()) { return new VideoProviderCache(provider); } return provider; } catch (agi::FileNotFoundError const&) { err = factory + ": file not found."; // Keep trying other providers as this one may just not be able to // open a valid path } catch (VideoNotSupported const&) { fileFound = true; err = factory + ": video is not in a supported format."; } catch (VideoOpenError const& ex) { fileSupported = true; err = factory + ": " + ex.GetMessage(); } catch (agi::vfr::Error const& ex) { fileSupported = true; err = factory + ": " + ex.GetMessage(); } errors += err; errors += "\n"; LOG_D("manager/video/provider") << err; } // No provider could open the file LOG_E("manager/video/provider") << "Could not open " << from_wx(video); std::string msg = "Could not open " + from_wx(video) + ":\n" + errors; if (!fileFound) throw agi::FileNotFoundError(from_wx(video)); if (!fileSupported) throw VideoNotSupported(msg); throw VideoOpenError(msg); }
std::unique_ptr<VideoProvider> VideoProviderFactory::GetProvider(agi::fs::path const& video_file, std::string const& colormatrix) { std::vector<std::string> factories = GetClasses(OPT_GET("Video/Provider")->GetString()); factories.insert(factories.begin(), "YUV4MPEG"); factories.insert(factories.begin(), "Dummy"); bool found = false; bool supported = false; std::string errors; errors.reserve(1024); for (auto const& factory : factories) { std::string err; try { auto provider = Create(factory, video_file, colormatrix); LOG_I("manager/video/provider") << factory << ": opened " << video_file; return provider->WantsCaching() ? agi::util::make_unique<VideoProviderCache>(std::move(provider)) : std::move(provider); } catch (agi::fs::FileNotFound const&) { err = "file not found."; // Keep trying other providers as this one may just not be able to // open a valid path } catch (VideoNotSupported const&) { found = true; err = "video is not in a supported format."; } catch (VideoOpenError const& ex) { supported = true; err = ex.GetMessage(); } catch (agi::vfr::Error const& ex) { supported = true; err = ex.GetMessage(); } errors += factory + ": " + err + "\n"; LOG_D("manager/video/provider") << factory << ": " << err; } // No provider could open the file LOG_E("manager/video/provider") << "Could not open " << video_file; std::string msg = "Could not open " + video_file.string() + ":\n" + errors; if (!found) throw agi::fs::FileNotFound(video_file.string()); if (!supported) throw VideoNotSupported(msg); throw VideoOpenError(msg); }
std::unique_ptr<VideoProvider> VideoProviderFactory::GetProvider(agi::fs::path const& filename, std::string const& colormatrix, agi::BackgroundRunner *br) { auto preferred = OPT_GET("Video/Provider")->GetString(); auto sorted = GetSorted(boost::make_iterator_range(std::begin(providers), std::end(providers)), preferred); bool found = false; bool supported = false; std::string errors; errors.reserve(1024); for (auto factory : sorted) { std::string err; try { auto provider = factory->create(filename, colormatrix, br); if (!provider) continue; LOG_I("manager/video/provider") << factory->name << ": opened " << filename; return provider->WantsCaching() ? CreateCacheVideoProvider(std::move(provider)) : std::move(provider); } catch (agi::fs::FileNotFound const&) { err = "file not found."; // Keep trying other providers as this one may just not be able to // open a valid path } catch (VideoNotSupported const&) { found = true; err = "video is not in a supported format."; } catch (VideoOpenError const& ex) { supported = true; err = ex.GetMessage(); } catch (agi::vfr::Error const& ex) { supported = true; err = ex.GetMessage(); } errors += std::string(factory->name) + ": " + err + "\n"; LOG_D("manager/video/provider") << factory->name << ": " << err; } // No provider could open the file LOG_E("manager/video/provider") << "Could not open " << filename; std::string msg = "Could not open " + filename.string() + ":\n" + errors; if (!found) throw agi::fs::FileNotFound(filename.string()); if (!supported) throw VideoNotSupported(msg); throw VideoOpenError(msg); }
AvisynthVideoProvider::AvisynthVideoProvider(agi::fs::path const& filename, std::string const& colormatrix) { agi::acs::CheckFileRead(filename); std::lock_guard<std::mutex> lock(avs.GetMutex()); #ifdef _WIN32 if (agi::fs::HasExtension(filename, "avi")) { // Try to read the keyframes before actually opening the file as trying // to open the file while it's already open can cause problems with // badly written VFW decoders AVIFileInit(); PAVIFILE pfile; long hr = AVIFileOpen(&pfile, filename.c_str(), OF_SHARE_DENY_WRITE, 0); if (hr) { warning = "Unable to open AVI file for reading keyframes:\n"; switch (hr) { case AVIERR_BADFORMAT: warning += "The file is corrupted, incomplete or has an otherwise bad format."; break; case AVIERR_MEMORY: warning += "The file could not be opened because of insufficient memory."; break; case AVIERR_FILEREAD: warning += "An error occurred reading the file. There might be a problem with the storage media."; break; case AVIERR_FILEOPEN: warning += "The file could not be opened. It might be in use by another application, or you do not have permission to access it."; break; case REGDB_E_CLASSNOTREG: warning += "There is no handler installed for the file extension. This might indicate a fundamental problem in your Video for Windows installation, and can be caused by extremely stripped Windows installations."; break; default: warning += "Unknown error."; break; } goto file_exit; } PAVISTREAM ppavi; if (hr = AVIFileGetStream(pfile, &ppavi, streamtypeVIDEO, 0)) { warning = "Unable to open AVI video stream for reading keyframes:\n"; switch (hr) { case AVIERR_NODATA: warning += "The file does not contain a usable video stream."; break; case AVIERR_MEMORY: warning += "Not enough memory."; break; default: warning += "Unknown error."; break; } goto file_release; } AVISTREAMINFO avis; if (FAILED(AVIStreamInfo(ppavi,&avis,sizeof(avis)))) { warning = "Unable to read keyframes from AVI file:\nCould not get stream information."; goto stream_release; } for (size_t i = 0; i < avis.dwLength; i++) { if (AVIStreamIsKeyFrame(ppavi, i)) keyframes.push_back(i); } // If every frame is a keyframe then just discard the keyframe data as it's useless if (keyframes.size() == (size_t)avis.dwLength) keyframes.clear(); // Clean up stream_release: AVIStreamRelease(ppavi); file_release: AVIFileRelease(pfile); file_exit: AVIFileExit(); } #endif try { auto script = Open(filename); // Check if video was loaded properly if (!script.IsClip() || !script.AsClip()->GetVideoInfo().HasVideo()) throw VideoNotSupported("No usable video found"); vi = script.AsClip()->GetVideoInfo(); if (!vi.IsRGB()) { /// @todo maybe read ColorMatrix hints for d2v files? AVSValue args[2] = { script, "Rec601" }; bool force_bt601 = OPT_GET("Video/Force BT.601")->GetBool() || colormatrix == "TV.601"; bool bt709 = vi.width > 1024 || vi.height >= 600; if (bt709 && (!force_bt601 || colormatrix == "TV.709")) { args[1] = "Rec709"; colorspace = "TV.709"; } else colorspace = "TV.601"; const char *argnames[2] = { 0, "matrix" }; script = avs.GetEnv()->Invoke("ConvertToRGB32", AVSValue(args, 2), argnames); } else colorspace = "None"; RGB32Video = avs.GetEnv()->Invoke("Cache", script).AsClip(); vi = RGB32Video->GetVideoInfo(); fps = (double)vi.fps_numerator / vi.fps_denominator; } catch (AvisynthError const& err) { throw VideoOpenError("Avisynth error: " + std::string(err.msg)); } }
AVSValue AvisynthVideoProvider::Open(agi::fs::path const& filename) { IScriptEnvironment *env = avs.GetEnv(); char *videoFilename = env->SaveString(agi::fs::ShortName(filename).c_str()); // Avisynth file, just import it if (agi::fs::HasExtension(filename, "avs")) { LOG_I("avisynth/video") << "Opening .avs file with Import"; decoder_name = "Avisynth/Import"; return env->Invoke("Import", videoFilename); } // Open avi file with AviSource if (agi::fs::HasExtension(filename, "avi")) { LOG_I("avisynth/video") << "Opening .avi file with AviSource"; try { const char *argnames[2] = { 0, "audio" }; AVSValue args[2] = { videoFilename, false }; decoder_name = "Avisynth/AviSource"; return env->Invoke("AviSource", AVSValue(args,2), argnames); } // On Failure, fallback to DSS catch (AvisynthError &err) { LOG_E("avisynth/video") << err.msg; LOG_I("avisynth/video") << "Failed to open .avi file with AviSource, trying DirectShowSource"; } } // Open d2v with mpeg2dec3 if (agi::fs::HasExtension(filename, "d2v") && env->FunctionExists("Mpeg2Dec3_Mpeg2Source")) { LOG_I("avisynth/video") << "Opening .d2v file with Mpeg2Dec3_Mpeg2Source"; auto script = env->Invoke("Mpeg2Dec3_Mpeg2Source", videoFilename); decoder_name = "Avisynth/Mpeg2Dec3_Mpeg2Source"; //if avisynth is 2.5.7 beta 2 or newer old mpeg2decs will crash without this if (env->FunctionExists("SetPlanarLegacyAlignment")) { AVSValue args[2] = { script, true }; script = env->Invoke("SetPlanarLegacyAlignment", AVSValue(args,2)); } return script; } // If that fails, try opening it with DGDecode if (agi::fs::HasExtension(filename, "d2v") && env->FunctionExists("DGDecode_Mpeg2Source")) { LOG_I("avisynth/video") << "Opening .d2v file with DGDecode_Mpeg2Source"; decoder_name = "DGDecode_Mpeg2Source"; return env->Invoke("Avisynth/Mpeg2Source", videoFilename); //note that DGDecode will also have issues like if the version is too // ancient but no sane person would use that anyway } if (agi::fs::HasExtension(filename, "d2v") && env->FunctionExists("Mpeg2Source")) { LOG_I("avisynth/video") << "Opening .d2v file with other Mpeg2Source"; AVSValue script = env->Invoke("Mpeg2Source", videoFilename); decoder_name = "Avisynth/Mpeg2Source"; //if avisynth is 2.5.7 beta 2 or newer old mpeg2decs will crash without this if (env->FunctionExists("SetPlanarLegacyAlignment")) script = env->Invoke("SetPlanarLegacyAlignment", script); return script; } // Try loading DirectShowSource2 if (!env->FunctionExists("dss2")) { auto dss2path(config::path->Decode("?data/avss.dll")); if (agi::fs::FileExists(dss2path)) env->Invoke("LoadPlugin", env->SaveString(agi::fs::ShortName(dss2path).c_str())); } // If DSS2 loaded properly, try using it if (env->FunctionExists("dss2")) { LOG_I("avisynth/video") << "Opening file with DSS2"; decoder_name = "Avisynth/DSS2"; return env->Invoke("DSS2", videoFilename); } // Try DirectShowSource // Load DirectShowSource.dll from app dir if it exists auto dsspath(config::path->Decode("?data/DirectShowSource.dll")); if (agi::fs::FileExists(dsspath)) env->Invoke("LoadPlugin", env->SaveString(agi::fs::ShortName(dsspath).c_str())); // Then try using DSS if (env->FunctionExists("DirectShowSource")) { const char *argnames[3] = { 0, "video", "audio" }; AVSValue args[3] = { videoFilename, true, false }; decoder_name = "Avisynth/DirectShowSource"; warning = "Warning! The file is being opened using Avisynth's DirectShowSource, which has unreliable seeking. Frame numbers might not match the real number. PROCEED AT YOUR OWN RISK!"; LOG_I("avisynth/video") << "Opening file with DirectShowSource"; return env->Invoke("DirectShowSource", AVSValue(args,3), argnames); } // Failed to find a suitable function LOG_E("avisynth/video") << "DSS function not found"; throw VideoNotSupported("No function suitable for opening the video found"); }
/// @brief Opens video /// @param filename The filename to open void FFmpegSourceVideoProvider::LoadVideo(wxString filename) { wxString FileNameShort = wxFileName(filename).GetShortPath(); FFMS_Indexer *Indexer = FFMS_CreateIndexer(FileNameShort.utf8_str(), &ErrInfo); if (Indexer == NULL) { throw agi::FileNotFoundError(ErrInfo.Buffer); } std::map<int,wxString> TrackList = GetTracksOfType(Indexer, FFMS_TYPE_VIDEO); if (TrackList.size() <= 0) throw VideoNotSupported("no video tracks found"); // initialize the track number to an invalid value so we can detect later on // whether the user actually had to choose a track or not int TrackNumber = -1; if (TrackList.size() > 1) { TrackNumber = AskForTrackSelection(TrackList, FFMS_TYPE_VIDEO); // if it's still -1 here, user pressed cancel if (TrackNumber == -1) throw agi::UserCancelException("video loading cancelled by user"); } // generate a name for the cache file wxString CacheName = GetCacheFilename(filename); // try to read index FFMS_Index *Index = NULL; Index = FFMS_ReadIndex(CacheName.utf8_str(), &ErrInfo); bool IndexIsValid = false; if (Index != NULL) { if (FFMS_IndexBelongsToFile(Index, FileNameShort.utf8_str(), &ErrInfo)) { FFMS_DestroyIndex(Index); Index = NULL; } else IndexIsValid = true; } // time to examine the index and check if the track we want is indexed // technically this isn't really needed since all video tracks should always be indexed, // but a bit of sanity checking never hurt anyone if (IndexIsValid && TrackNumber >= 0) { FFMS_Track *TempTrackData = FFMS_GetTrackFromIndex(Index, TrackNumber); if (FFMS_GetNumFrames(TempTrackData) <= 0) { IndexIsValid = false; FFMS_DestroyIndex(Index); Index = NULL; } } // moment of truth if (!IndexIsValid) { int TrackMask = OPT_GET("Provider/FFmpegSource/Index All Tracks")->GetBool() ? FFMS_TRACKMASK_ALL : FFMS_TRACKMASK_NONE; try { // ignore audio decoding errors here, we don't care right now Index = DoIndexing(Indexer, CacheName, TrackMask, FFMS_IEH_IGNORE); } catch (wxString err) { throw VideoOpenError(STD_STR(err)); } } // update access time of index file so it won't get cleaned away wxFileName(CacheName).Touch(); // we have now read the index and may proceed with cleaning the index cache if (!CleanCache()) { //do something? } // track number still not set? if (TrackNumber < 0) { // just grab the first track TrackNumber = FFMS_GetFirstIndexedTrackOfType(Index, FFMS_TYPE_VIDEO, &ErrInfo); if (TrackNumber < 0) { FFMS_DestroyIndex(Index); Index = NULL; throw VideoNotSupported(std::string("Couldn't find any video tracks: ") + ErrInfo.Buffer); } } // set thread count int Threads = OPT_GET("Provider/Video/FFmpegSource/Decoding Threads")->GetInt(); // set seekmode // TODO: give this its own option? int SeekMode; if (OPT_GET("Provider/Video/FFmpegSource/Unsafe Seeking")->GetBool()) SeekMode = FFMS_SEEK_UNSAFE; else SeekMode = FFMS_SEEK_NORMAL; VideoSource = FFMS_CreateVideoSource(FileNameShort.utf8_str(), TrackNumber, Index, Threads, SeekMode, &ErrInfo); FFMS_DestroyIndex(Index); Index = NULL; if (VideoSource == NULL) { throw VideoOpenError(std::string("Failed to open video track: ") + ErrInfo.Buffer); } // load video properties VideoInfo = FFMS_GetVideoProperties(VideoSource); const FFMS_Frame *TempFrame = FFMS_GetFrame(VideoSource, 0, &ErrInfo); if (TempFrame == NULL) { throw VideoOpenError(std::string("Failed to decode first frame: ") + ErrInfo.Buffer); } Width = TempFrame->EncodedWidth; Height = TempFrame->EncodedHeight; if (FFMS_SetOutputFormatV(VideoSource, 1LL << FFMS_GetPixFmt("bgra"), Width, Height, FFMS_RESIZER_BICUBIC, &ErrInfo)) { throw VideoOpenError(std::string("Failed to set output format: ") + ErrInfo.Buffer); } // get frame info data FFMS_Track *FrameData = FFMS_GetTrackFromVideo(VideoSource); if (FrameData == NULL) throw VideoOpenError("failed to get frame data"); const FFMS_TrackTimeBase *TimeBase = FFMS_GetTimeBase(FrameData); if (TimeBase == NULL) throw VideoOpenError("failed to get track time base"); const FFMS_FrameInfo *CurFrameData; // build list of keyframes and timecodes std::vector<int> TimecodesVector; for (int CurFrameNum = 0; CurFrameNum < VideoInfo->NumFrames; CurFrameNum++) { CurFrameData = FFMS_GetFrameInfo(FrameData, CurFrameNum); if (CurFrameData == NULL) { throw VideoOpenError(STD_STR(wxString::Format(L"Couldn't get info about frame %d", CurFrameNum))); } // keyframe? if (CurFrameData->KeyFrame) KeyFramesList.push_back(CurFrameNum); // calculate timestamp and add to timecodes vector int Timestamp = (int)((CurFrameData->PTS * TimeBase->Num) / TimeBase->Den); TimecodesVector.push_back(Timestamp); } Timecodes = agi::vfr::Framerate(TimecodesVector); FrameNumber = 0; }
/// @brief Opens video /// @param filename The filename to open void FFmpegSourceVideoProvider::LoadVideo(wxString filename) { wxString FileNameShort = wxFileName(filename).GetShortPath(); FFMS_Indexer *Indexer = FFMS_CreateIndexer(FileNameShort.utf8_str(), &ErrInfo); if (!Indexer) throw agi::FileNotFoundError(ErrInfo.Buffer); std::map<int,wxString> TrackList = GetTracksOfType(Indexer, FFMS_TYPE_VIDEO); if (TrackList.size() <= 0) throw VideoNotSupported("no video tracks found"); // initialize the track number to an invalid value so we can detect later on // whether the user actually had to choose a track or not int TrackNumber = -1; if (TrackList.size() > 1) { TrackNumber = AskForTrackSelection(TrackList, FFMS_TYPE_VIDEO); // if it's still -1 here, user pressed cancel if (TrackNumber == -1) throw agi::UserCancelException("video loading cancelled by user"); } // generate a name for the cache file wxString CacheName = GetCacheFilename(filename); // try to read index agi::scoped_holder<FFMS_Index*, void (FFMS_CC*)(FFMS_Index*)> Index(FFMS_ReadIndex(CacheName.utf8_str(), &ErrInfo), FFMS_DestroyIndex); if (Index && FFMS_IndexBelongsToFile(Index, FileNameShort.utf8_str(), &ErrInfo)) Index = NULL; // time to examine the index and check if the track we want is indexed // technically this isn't really needed since all video tracks should always be indexed, // but a bit of sanity checking never hurt anyone if (Index && TrackNumber >= 0) { FFMS_Track *TempTrackData = FFMS_GetTrackFromIndex(Index, TrackNumber); if (FFMS_GetNumFrames(TempTrackData) <= 0) Index = NULL; } // moment of truth if (!Index) { int TrackMask = FFMS_TRACKMASK_NONE; if (OPT_GET("Provider/FFmpegSource/Index All Tracks")->GetBool() || OPT_GET("Video/Open Audio")->GetBool()) TrackMask = FFMS_TRACKMASK_ALL; Index = DoIndexing(Indexer, CacheName, TrackMask, GetErrorHandlingMode()); } else { FFMS_CancelIndexing(Indexer); } // update access time of index file so it won't get cleaned away wxFileName(CacheName).Touch(); // we have now read the index and may proceed with cleaning the index cache CleanCache(); // track number still not set? if (TrackNumber < 0) { // just grab the first track TrackNumber = FFMS_GetFirstIndexedTrackOfType(Index, FFMS_TYPE_VIDEO, &ErrInfo); if (TrackNumber < 0) throw VideoNotSupported(std::string("Couldn't find any video tracks: ") + ErrInfo.Buffer); } // set thread count int Threads = OPT_GET("Provider/Video/FFmpegSource/Decoding Threads")->GetInt(); if (FFMS_GetVersion() < ((2 << 24) | (17 << 16) | (2 << 8) | 1) && FFMS_GetSourceType(Index) == FFMS_SOURCE_LAVF) Threads = 1; // set seekmode // TODO: give this its own option? int SeekMode; if (OPT_GET("Provider/Video/FFmpegSource/Unsafe Seeking")->GetBool()) SeekMode = FFMS_SEEK_UNSAFE; else SeekMode = FFMS_SEEK_NORMAL; VideoSource = FFMS_CreateVideoSource(FileNameShort.utf8_str(), TrackNumber, Index, Threads, SeekMode, &ErrInfo); if (!VideoSource) throw VideoOpenError(std::string("Failed to open video track: ") + ErrInfo.Buffer); // load video properties VideoInfo = FFMS_GetVideoProperties(VideoSource); const FFMS_Frame *TempFrame = FFMS_GetFrame(VideoSource, 0, &ErrInfo); if (!TempFrame) throw VideoOpenError(std::string("Failed to decode first frame: ") + ErrInfo.Buffer); Width = TempFrame->EncodedWidth; Height = TempFrame->EncodedHeight; if (VideoInfo->SARDen > 0 && VideoInfo->SARNum > 0) DAR = double(Width) * VideoInfo->SARNum / ((double)Height * VideoInfo->SARDen); else DAR = double(Width) / Height; // Assuming TV for unspecified wxString ColorRange = TempFrame->ColorRange == FFMS_CR_JPEG ? "PC" : "TV"; int CS = TempFrame->ColorSpace; #if FFMS_VERSION >= ((2 << 24) | (17 << 16) | (1 << 8) | 0) if (CS != FFMS_CS_RGB && CS != FFMS_CS_BT470BG && OPT_GET("Video/Force BT.601")->GetBool()) { if (FFMS_SetInputFormatV(VideoSource, FFMS_CS_BT470BG, TempFrame->ColorRange, FFMS_GetPixFmt(""), &ErrInfo)) throw VideoOpenError(std::string("Failed to set input format: ") + ErrInfo.Buffer); CS = FFMS_CS_BT470BG; } #endif switch (CS) { case FFMS_CS_RGB: ColorSpace = "None"; break; case FFMS_CS_BT709: ColorSpace = wxString::Format("%s.709", ColorRange); break; case FFMS_CS_UNSPECIFIED: ColorSpace = wxString::Format("%s.%s", ColorRange, Width > 1024 || Height >= 600 ? "709" : "601"); break; case FFMS_CS_FCC: ColorSpace = wxString::Format("%s.FCC", ColorRange); break; case FFMS_CS_BT470BG: case FFMS_CS_SMPTE170M: ColorSpace = wxString::Format("%s.601", ColorRange); break; case FFMS_CS_SMPTE240M: ColorSpace = wxString::Format("%s.240M", ColorRange); break; default: throw VideoOpenError("Unknown video color space"); break; } const int TargetFormat[] = { FFMS_GetPixFmt("bgra"), -1 }; if (FFMS_SetOutputFormatV2(VideoSource, TargetFormat, Width, Height, FFMS_RESIZER_BICUBIC, &ErrInfo)) { throw VideoOpenError(std::string("Failed to set output format: ") + ErrInfo.Buffer); } // get frame info data FFMS_Track *FrameData = FFMS_GetTrackFromVideo(VideoSource); if (FrameData == NULL) throw VideoOpenError("failed to get frame data"); const FFMS_TrackTimeBase *TimeBase = FFMS_GetTimeBase(FrameData); if (TimeBase == NULL) throw VideoOpenError("failed to get track time base"); const FFMS_FrameInfo *CurFrameData; // build list of keyframes and timecodes std::vector<int> TimecodesVector; for (int CurFrameNum = 0; CurFrameNum < VideoInfo->NumFrames; CurFrameNum++) { CurFrameData = FFMS_GetFrameInfo(FrameData, CurFrameNum); if (CurFrameData == NULL) { throw VideoOpenError(STD_STR(wxString::Format("Couldn't get info about frame %d", CurFrameNum))); } // keyframe? if (CurFrameData->KeyFrame) KeyFramesList.push_back(CurFrameNum); // calculate timestamp and add to timecodes vector int Timestamp = (int)((CurFrameData->PTS * TimeBase->Num) / TimeBase->Den); TimecodesVector.push_back(Timestamp); } if (TimecodesVector.size() < 2) Timecodes = 25.0; else Timecodes = agi::vfr::Framerate(TimecodesVector); FrameNumber = 0; }