void Thesaurus::OnLanguageChanged() { impl.reset(); std::string language = OPT_GET("Tool/Thesaurus/Language")->GetString(); if (language.empty()) return; wxString path = StandardPaths::DecodePath(lagi_wxString(OPT_GET("Path/Dictionary")->GetString()) + "/"); // Get index and data paths wxString idxpath = wxString::Format("%s/th_%s.idx", path, language); wxString datpath = wxString::Format("%s/th_%s.dat", path, language); // If they aren't in the user dictionary path, check the application directory if (!wxFileExists(idxpath) || !wxFileExists(datpath)) { path = StandardPaths::DecodePath("?data/dictionaries/"); idxpath = wxString::Format("%s/th_%s.idx", path, language); datpath = wxString::Format("%s/th_%s.dat", path, language); if (!wxFileExists(idxpath) || !wxFileExists(datpath)) return; } LOG_I("thesaurus/file") << "Using thesaurus: " << datpath.c_str(); impl.reset(new agi::Thesaurus(STD_STR(datpath), STD_STR(idxpath))); }
TextFileReader::TextFileReader(wxString const& filename, wxString encoding, bool trim) : trim(trim) { if (encoding.empty()) encoding = CharSetDetect::GetEncoding(filename); file.reset(agi::io::Open(STD_STR(filename), true)); iter = agi::line_iterator<wxString>(*file, STD_STR(encoding)); }
void VideoContext::SaveTimecodes(wxString filename) { try { FPS().Save(STD_STR(filename), IsLoaded() ? GetLength() : -1); config::mru->Add("Timecodes", STD_STR(filename)); } catch(const agi::FileSystemError&) { wxLogError("Could not write to " + filename); } }
void FrameMain::LoadSubtitles(wxString const& filename, wxString const& charset) { if (context->ass->loaded) { if (TryToCloseSubs() == wxCANCEL) return; } try { // Make sure that file isn't actually a timecode file try { TextFileReader testSubs(filename, charset); wxString cur = testSubs.ReadLineFromFile(); if (cur.Left(10) == "# timecode") { context->videoController->LoadTimecodes(filename); return; } } catch (...) { // if trying to load the file as timecodes fails it's fairly // safe to assume that it is in fact not a timecode file } context->ass->Load(filename, charset); wxFileName file(filename); StandardPaths::SetPathValue("?script", file.GetPath()); config::mru->Add("Subtitle", STD_STR(filename)); OPT_SET("Path/Last/Subtitles")->SetString(STD_STR(file.GetPath())); // Save backup of file if (context->ass->CanSave() && OPT_GET("App/Auto/Backup")->GetBool()) { if (file.FileExists()) { wxString path = lagi_wxString(OPT_GET("Path/Auto/Backup")->GetString()); if (path.empty()) path = file.GetPath(); wxFileName dstpath(StandardPaths::DecodePath(path + "/")); if (!dstpath.DirExists()) wxMkdir(dstpath.GetPath()); dstpath.SetFullName(file.GetName() + ".ORIGINAL." + file.GetExt()); wxCopyFile(file.GetFullPath(), dstpath.GetFullPath(), true); } } } catch (agi::FileNotFoundError const&) { wxMessageBox(filename + " not found.", "Error", wxOK | wxICON_ERROR | wxCENTER, this); config::mru->Remove("Subtitle", STD_STR(filename)); return; } catch (agi::Exception const& err) { wxMessageBox(lagi_wxString(err.GetChainedMessage()), "Error", wxOK | wxICON_ERROR | wxCENTER, this); } catch (...) { wxMessageBox("Unknown error", "Error", wxOK | wxICON_ERROR | wxCENTER, this); return; } }
/// @brief Get provider /// @param video /// @return /// VideoProvider *VideoProviderFactory::GetProvider(wxString video) { std::vector<std::string> list = GetClasses(OPT_GET("Video/Provider")->GetString()); if (video.StartsWith("?dummy")) list.insert(list.begin(), "Dummy"); list.insert(list.begin(), "YUV4MPEG"); bool fileFound = false; bool fileSupported = false; std::string errors; errors.reserve(1024); for (int i = 0; i < (signed)list.size(); ++i) { std::string err; try { VideoProvider *provider = Create(list[i], video); LOG_I("manager/video/provider") << list[i] << ": opened " << STD_STR(video); if (provider->WantsCaching()) { return new VideoProviderCache(provider); } return provider; } catch (agi::FileNotFoundError const&) { err = list[i] + ": file not found."; // Keep trying other providers as this one may just not be able to // open a valid path } catch (VideoNotSupported const&) { fileFound = true; err = list[i] + ": video is not in a supported format."; } catch (VideoOpenError const& ex) { fileSupported = true; err = list[i] + ": " + ex.GetMessage(); } catch (agi::vfr::Error const& ex) { fileSupported = true; err = list[i] + ": " + ex.GetMessage(); } errors += err; errors += "\n"; LOG_D("manager/video/provider") << err; } // No provider could open the file LOG_E("manager/video/provider") << "Could not open " << STD_STR(video); std::string msg = "Could not open " + STD_STR(video) + ":\n" + errors; if (!fileFound) throw agi::FileNotFoundError(STD_STR(video)); if (!fileSupported) throw VideoNotSupported(msg); throw VideoOpenError(msg); }
void AudioController::SaveClip(wxString const& filename, SampleRange const& range) const { if (filename.empty() || range.begin() > provider->GetNumSamples() || range.length() == 0) return; agi::io::Save outfile(STD_STR(filename), true); std::ofstream& out(outfile.Get()); size_t bytes_per_sample = provider->GetBytesPerSample() * provider->GetChannels(); size_t bufsize = range.length() * bytes_per_sample; int intval; short shortval; out << "RIFF"; out.write((char*)&(intval=bufsize+36),4); out<< "WAVEfmt "; out.write((char*)&(intval=16),4); out.write((char*)&(shortval=1),2); out.write((char*)&(shortval=provider->GetChannels()),2); out.write((char*)&(intval=provider->GetSampleRate()),4); out.write((char*)&(intval=provider->GetSampleRate()*provider->GetChannels()*provider->GetBytesPerSample()),4); out.write((char*)&(intval=provider->GetChannels()*provider->GetBytesPerSample()),2); out.write((char*)&(shortval=provider->GetBytesPerSample()<<3),2); out << "data"; out.write((char*)&bufsize,4); //samples per read size_t spr = 65536 / bytes_per_sample; std::vector<char> buf(bufsize); for(int64_t i = range.begin(); i < range.end(); i += spr) { size_t len = std::min<size_t>(spr, range.end() - i); provider->GetAudio(&buf[0], i, len); out.write(&buf[0], len * bytes_per_sample); } }
wxArrayString lagi_MRU_wxAS(const wxString &list) { const agi::MRUManager::MRUListMap *map = config::mru->Get(STD_STR(list)); wxArrayString work; work.reserve(map->size()); transform(map->begin(), map->end(), std::back_inserter(work), lagi_wxString); return work; }
void VideoContext::LoadKeyframes(wxString filename) { if (filename == keyFramesFilename || filename.empty()) return; try { keyFrames = agi::keyframe::Load(STD_STR(filename)); keyFramesFilename = filename; KeyframesOpen(keyFrames); config::mru->Add("Keyframes", STD_STR(filename)); } catch (agi::keyframe::Error const& err) { wxMessageBox(err.GetMessage(), "Error opening keyframes file", wxOK | wxICON_ERROR | wxCENTER, context->parent); config::mru->Remove("Keyframes", STD_STR(filename)); } catch (agi::FileSystemError const&) { wxLogError("Could not open file " + filename); config::mru->Remove("Keyframes", STD_STR(filename)); } }
void VideoContext::LoadTimecodes(wxString filename) { if (filename == ovrTimecodeFile || filename.empty()) return; try { ovrFPS = agi::vfr::Framerate(STD_STR(filename)); ovrTimecodeFile = filename; config::mru->Add("Timecodes", STD_STR(filename)); OnSubtitlesCommit(); TimecodesOpen(ovrFPS); } catch (const agi::FileSystemError&) { wxLogError("Could not open file " + filename); config::mru->Remove("Timecodes", STD_STR(filename)); } catch (const agi::vfr::Error& e) { wxLogError("Timecode file parse error: %s", e.GetMessage()); } }
void DialogShiftTimes::SaveHistory(json::Array const& shifted_blocks) { json::Object new_entry; new_entry["filename"] = STD_STR(wxFileName(context->ass->filename).GetFullName()); new_entry["is by time"] = shift_by_time->GetValue(); new_entry["is backward"] = shift_backward->GetValue(); new_entry["amount"] = STD_STR(shift_by_time->GetValue() ? shift_time->GetValue() : shift_frames->GetValue()); new_entry["fields"] = time_fields->GetSelection(); new_entry["mode"] = selection_mode->GetSelection(); new_entry["selection"] = shifted_blocks; history->push_front(new_entry); try { json::Writer::Write(*history, agi::io::Save(history_filename).Get()); } catch (agi::FileSystemError const& e) { LOG_E("dialog_shift_times/save_history") << "Cannot save shift times history: " << e.GetChainedMessage(); } }
void DialogSearchReplace::FindReplace(int mode) { if (mode < 0 || mode > 2) return; // Variables wxString LookFor = FindEdit->GetValue(); if (!LookFor) return; // Setup Search.isReg = CheckRegExp->IsChecked() && CheckRegExp->IsEnabled(); Search.matchCase = CheckMatchCase->IsChecked(); Search.updateVideo = CheckUpdateVideo->IsChecked() && CheckUpdateVideo->IsEnabled(); Search.LookFor = LookFor; Search.CanContinue = true; Search.affect = Affect->GetSelection(); Search.field = Field->GetSelection(); // Find if (mode == 0) { Search.FindNext(); if (hasReplace) { wxString ReplaceWith = ReplaceEdit->GetValue(); Search.ReplaceWith = ReplaceWith; config::mru->Add("Replace", STD_STR(ReplaceWith)); } } // Replace else { wxString ReplaceWith = ReplaceEdit->GetValue(); Search.ReplaceWith = ReplaceWith; if (mode == 1) Search.ReplaceNext(); else Search.ReplaceAll(); config::mru->Add("Replace", STD_STR(ReplaceWith)); } // Add to history config::mru->Add("Find", STD_STR(LookFor)); UpdateDropDowns(); }
/// @brief DOCME /// @param evt /// void DialogAutomation::OnAdd(wxCommandEvent &evt) { // build filename filter list wxString fnfilter, catchall; const std::vector<Automation4::ScriptFactory*> &factories = Automation4::ScriptFactory::GetFactories(); for (int i = 0; i < (int)factories.size(); i++) { const Automation4::ScriptFactory *fact = factories[i]; if (fact->GetEngineName().IsEmpty() || fact->GetFilenamePattern().IsEmpty()) continue; fnfilter = wxString::Format(_T("%s%s scripts (%s)|%s|"), fnfilter.c_str(), fact->GetEngineName().c_str(), fact->GetFilenamePattern().c_str(), fact->GetFilenamePattern().c_str()); catchall << fact->GetFilenamePattern() << _T(";"); } #ifdef __WINDOWS__ fnfilter += _T("All files|*.*"); #else fnfilter += _T("All files|*"); #endif if (!catchall.IsEmpty()) { catchall.RemoveLast(); } if (factories.size() > 1) { fnfilter = _T("All supported scripts|") + catchall + _T("|") + fnfilter; } wxString fname = wxFileSelector(_("Add Automation script"), lagi_wxString(OPT_GET("Path/Last/Automation")->GetString()), wxEmptyString, wxEmptyString, fnfilter, wxFD_OPEN|wxFD_FILE_MUST_EXIST, this); if (!fname.IsEmpty()) { wxFileName fnpath(fname); OPT_SET("Path/Last/Automation")->SetString(STD_STR(fnpath.GetPath())); // TODO: make sure each script is only loaded once. check in both local and global managers!! // it doesn't break for macros, but will for export filters, and maybe for file formats, // and makes for confusion in the UI anyhow try { ExtraScriptInfo ei; ei.script = Automation4::ScriptFactory::CreateFromFile(fname, false); local_manager->Add(ei.script); ei.is_global = false; AddScript(ei); } catch (const wchar_t *e) { wxLogError(e); } catch (...) { wxLogError(_T("Unknown error loading script")); } } }
void RAMAudioProvider::FillCache(AudioProvider *source, agi::ProgressSink *ps) { ps->SetMessage(STD_STR(_("Reading into RAM"))); int64_t readsize = CacheBlockSize / source->GetBytesPerSample(); for (int i = 0; i < blockcount; i++) { source->GetAudio((char*)blockcache[i], i * readsize, std::min(readsize, num_samples - i * readsize)); ps->SetProgress(i, (blockcount - 1)); if (ps->IsCancelled()) { Clear(); return; } } }
/// @brief Constructor /// @param _filename /// AvisynthAudioProvider::AvisynthAudioProvider(wxString filename) : filename(filename) { try { AVSValue script; wxMutexLocker lock(avs_wrapper.GetMutex()); wxFileName fn(filename); if (!fn.FileExists()) throw agi::FileNotFoundError(STD_STR(filename)); IScriptEnvironment *env = avs_wrapper.GetEnv(); // Include if (filename.EndsWith(".avs")) { char *fname = env->SaveString(fn.GetShortPath().mb_str(csConvLocal)); script = env->Invoke("Import", fname); } // Use DirectShowSource else { const char * argnames[3] = { 0, "video", "audio" }; AVSValue args[3] = { env->SaveString(fn.GetShortPath().mb_str(csConvLocal)), false, true }; // Load DirectShowSource.dll from app dir if it exists wxFileName dsspath(StandardPaths::DecodePath("?data/DirectShowSource.dll")); if (dsspath.FileExists()) { env->Invoke("LoadPlugin",env->SaveString(dsspath.GetShortPath().mb_str(csConvLocal))); } // Load audio with DSS if it exists if (env->FunctionExists("DirectShowSource")) { script = env->Invoke("DirectShowSource", AVSValue(args,3),argnames); } // Otherwise fail else { throw agi::AudioProviderOpenError("No suitable audio source filter found. Try placing DirectShowSource.dll in the Aegisub application directory.", 0); } } LoadFromClip(script); } catch (AvisynthError &err) { std::string errmsg(err.msg); if (errmsg.find("filter graph manager won't talk to me") != errmsg.npos) throw agi::AudioDataNotFoundError("Avisynth error: " + errmsg, 0); else throw agi::AudioProviderOpenError("Avisynth error: " + errmsg, 0); } }
/// @brief Read a frame or file header at a given file position /// @param startpos The byte offset at where to start reading /// @param reset_pos If true, the function will reset the file position to what it was before the function call before returning /// @return A list of parameters std::vector<wxString> YUV4MPEGVideoProvider::ReadHeader(int64_t startpos, bool reset_pos) { int64_t oldpos = ftello(sf); std::vector<wxString> tags; wxString curtag; int bytesread = 0; int buf; if (fseeko(sf, startpos, SEEK_SET)) throw VideoOpenError(STD_STR(wxString::Format(L"YUV4MPEG video provider: ReadHeader: failed seeking to position %d", startpos))); // read header until terminating newline (0x0A) is found while ((buf = fgetc(sf)) != 0x0A) { if (ferror(sf)) throw VideoOpenError("ReadHeader: Failed to read from file"); if (feof(sf)) { // you know, this is one of the places where it would be really nice // to be able to throw an exception object that tells the caller that EOF was reached LOG_D("provider/video/yuv4mpeg") << "ReadHeader: Reached EOF, returning"; break; } // some basic low-effort sanity checking if (buf == 0x00) throw VideoOpenError("ReadHeader: Malformed header (unexpected NUL)"); if (++bytesread >= YUV4MPEG_HEADER_MAXLEN) throw VideoOpenError("ReadHeader: Malformed header (no terminating newline found)"); // found a new tag if (buf == 0x20) { tags.push_back(curtag); curtag.Clear(); } else curtag.Append(static_cast<wxChar>(buf)); } // if only one tag with no trailing space was found (possible in the // FRAME header case), make sure we get it if (!curtag.IsEmpty()) { tags.push_back(curtag); curtag.Clear(); } if (reset_pos) fseeko(sf, oldpos, SEEK_SET); return tags; }
/// @brief Constructor /// @param filename The filename to open FFmpegSourceVideoProvider::FFmpegSourceVideoProvider(wxString filename) : VideoSource(NULL) , VideoInfo(NULL) , Width(-1) , Height(-1) , FrameNumber(-1) , COMInited(false) { #ifdef WIN32 HRESULT res = CoInitializeEx(NULL, COINIT_APARTMENTTHREADED); if (SUCCEEDED(res)) COMInited = true; else if (res != RPC_E_CHANGED_MODE) throw VideoOpenError("COM initialization failure"); #endif // initialize ffmpegsource // FIXME: CPU detection? #if FFMS_VERSION >= ((2 << 24) | (14 << 16) | (0 << 8) | 0) FFMS_Init(0, 1); #else FFMS_Init(0); #endif ErrInfo.Buffer = FFMSErrMsg; ErrInfo.BufferSize = sizeof(FFMSErrMsg); ErrInfo.ErrorType = FFMS_ERROR_SUCCESS; ErrInfo.SubType = FFMS_ERROR_SUCCESS; SetLogLevel(); // and here we go try { LoadVideo(filename); } catch (wxString const& err) { Close(); throw VideoOpenError(STD_STR(err)); } catch (...) { Close(); throw; } }
void TTXTSubtitleFormat::ReadFile(AssFile *target, wxString const& filename, wxString const& encoding) const { target->LoadDefault(false); // Load XML document wxXmlDocument doc; if (!doc.Load(filename)) throw TTXTParseError("Failed loading TTXT XML file.", 0); // Check root node name if (doc.GetRoot()->GetName() != "TextStream") throw TTXTParseError("Invalid TTXT file.", 0); // Check version wxString verStr = doc.GetRoot()->GetAttribute("version", ""); int version = -1; if (verStr == "1.0") version = 0; else if (verStr == "1.1") version = 1; else throw TTXTParseError("Unknown TTXT version: " + STD_STR(verStr), 0); // Get children AssDialogue *diag = 0; int lines = 0; for (wxXmlNode *child = doc.GetRoot()->GetChildren(); child; child = child->GetNext()) { // Line if (child->GetName() == "TextSample") { if ((diag = ProcessLine(child, diag, version))) { lines++; target->Line.push_back(diag); } } // Header else if (child->GetName() == "TextStreamHeader") { ProcessHeader(child); } } // No lines? if (lines == 0) target->Line.push_back(new AssDialogue); }
/// @brief Constructor /// @param filename The filename to open FFmpegSourceVideoProvider::FFmpegSourceVideoProvider(wxString filename) try : VideoSource(NULL, FFMS_DestroyVideoSource) , VideoInfo(NULL) , Width(-1) , Height(-1) , FrameNumber(-1) { ErrInfo.Buffer = FFMSErrMsg; ErrInfo.BufferSize = sizeof(FFMSErrMsg); ErrInfo.ErrorType = FFMS_ERROR_SUCCESS; ErrInfo.SubType = FFMS_ERROR_SUCCESS; SetLogLevel(); // and here we go LoadVideo(filename); } catch (wxString const& err) { throw VideoOpenError(STD_STR(err)); } catch (const char * err) { throw VideoOpenError(err); }
/// @brief Indexes the file /// @return The number of frames found in the file /// This function goes through the file, finds and parses all file and frame headers, /// and creates a seek table that lists the byte positions of all frames so seeking /// can easily be done. int YUV4MPEGVideoProvider::IndexFile() { int framecount = 0; int64_t curpos = ftello(sf); // the ParseFileHeader() call in LoadVideo() will already have read // the file header for us and set the seek position correctly while (true) { curpos = ftello(sf); // update position // continue reading headers until no more are found std::vector<wxString> tags = ReadHeader(curpos, false); curpos = ftello(sf); if (tags.empty()) break; // no more headers Y4M_FrameFlags flags = Y4M_FFLAG_NOTSET; if (!tags.front().Cmp("YUV4MPEG2")) { ParseFileHeader(tags); continue; } else if (!tags.front().Cmp("FRAME")) flags = ParseFrameHeader(tags); if (flags == Y4M_FFLAG_NONE) { framecount++; seek_table.push_back(curpos); // seek to next frame header start position if (fseeko(sf, frame_sz, SEEK_CUR)) throw VideoOpenError(STD_STR(wxString::Format("IndexFile: failed seeking to position %d", curpos + frame_sz))); } else { /// @todo implement rff flags etc } } return framecount; }
DialogShiftTimes::DialogShiftTimes(agi::Context *context) : wxDialog(context->parent, -1, _("Shift Times")) , context(context) , history_filename(STD_STR(StandardPaths::DecodePath("?user/shift_history.json"))) , history(new json::Array) , timecodes_loaded_slot(context->videoController->AddTimecodesListener(&DialogShiftTimes::OnTimecodesLoaded, this)) { SetIcon(GETICON(shift_times_toolbutton_16)); // Create controls shift_by_time = new wxRadioButton(this, -1, _("&Time: "), wxDefaultPosition, wxDefaultSize, wxRB_GROUP); shift_by_time->SetToolTip(_("Shift by time")); shift_by_time->Bind(wxEVT_COMMAND_RADIOBUTTON_SELECTED, &DialogShiftTimes::OnByTime, this); shift_by_frames = new wxRadioButton(this, -1 , _("&Frames: ")); shift_by_frames->SetToolTip(_("Shift by frames")); shift_by_frames->Bind(wxEVT_COMMAND_RADIOBUTTON_SELECTED, &DialogShiftTimes::OnByFrames, this); shift_time = new TimeEdit(this, -1, context); shift_time->SetToolTip(_("Enter time in h:mm:ss.cs notation")); shift_frames = new wxTextCtrl(this, -1); shift_frames->SetToolTip(_("Enter number of frames to shift by")); shift_forward = new wxRadioButton(this, -1, _("For&ward"), wxDefaultPosition, wxDefaultSize, wxRB_GROUP); shift_forward->SetToolTip(_("Shifts subs forward, making them appear later. Use if they are appearing too soon.")); shift_backward = new wxRadioButton(this, -1, _("&Backward")); shift_backward->SetToolTip(_("Shifts subs backward, making them appear earlier. Use if they are appearing too late.")); wxString selection_mode_vals[] = { _("&All rows"), _("Selected &rows"), _("Selection &onward") }; selection_mode = new wxRadioBox(this, -1, _("Affect"), wxDefaultPosition, wxDefaultSize, 3, selection_mode_vals, 1); wxString time_field_vals[] = { _("Start a&nd End times"), _("&Start times only"), _("&End times only") }; time_fields = new wxRadioBox(this, -1, _("Times"), wxDefaultPosition, wxDefaultSize, 3, time_field_vals, 1); history_box = new wxListBox(this, -1, wxDefaultPosition, wxSize(350, 100), 0, NULL, wxLB_HSCROLL); wxButton *clear_button = new wxButton(this, -1, _("&Clear")); clear_button->Bind(wxEVT_COMMAND_BUTTON_CLICKED, &DialogShiftTimes::OnClear, this); // Set initial control states OnTimecodesLoaded(context->videoController->FPS()); OnSelectedSetChanged(Selection(), Selection()); LoadHistory(); shift_time->SetTime(OPT_GET("Tool/Shift Times/Time")->GetInt()); *shift_frames << (int)OPT_GET("Tool/Shift Times/Frames")->GetInt(); shift_by_frames->SetValue(!OPT_GET("Tool/Shift Times/ByTime")->GetBool() && shift_by_frames->IsEnabled()); time_fields->SetSelection(OPT_GET("Tool/Shift Times/Type")->GetInt()); selection_mode->SetSelection(OPT_GET("Tool/Shift Times/Affect")->GetInt()); shift_backward->SetValue(OPT_GET("Tool/Shift Times/Direction")->GetBool()); if (shift_by_frames->GetValue()) shift_time->Disable(); else shift_frames->Disable(); // Position controls wxSizer *shift_amount_sizer = new wxFlexGridSizer(2, 2, 5, 5); shift_amount_sizer->Add(shift_by_time, wxSizerFlags(0).Align(wxALIGN_CENTER_VERTICAL)); shift_amount_sizer->Add(shift_time, wxSizerFlags(1)); shift_amount_sizer->Add(shift_by_frames, wxSizerFlags(0).Align(wxALIGN_CENTER_VERTICAL)); shift_amount_sizer->Add(shift_frames, wxSizerFlags(1)); wxSizer *shift_direction_sizer = new wxBoxSizer(wxHORIZONTAL); shift_direction_sizer->Add(shift_forward, wxSizerFlags(1).Expand()); shift_direction_sizer->Add(shift_backward, wxSizerFlags(1).Expand().Border(wxLEFT)); wxSizer *shift_by_sizer = new wxStaticBoxSizer(wxVERTICAL, this, _("Shift by")); shift_by_sizer->Add(shift_amount_sizer, wxSizerFlags().Expand()); shift_by_sizer->Add(shift_direction_sizer, wxSizerFlags().Expand().Border(wxTOP)); wxSizer *left_sizer = new wxBoxSizer(wxVERTICAL); left_sizer->Add(shift_by_sizer, wxSizerFlags().Expand().Border(wxBOTTOM)); left_sizer->Add(selection_mode, wxSizerFlags().Expand().Border(wxBOTTOM)); left_sizer->Add(time_fields, wxSizerFlags().Expand()); wxSizer *history_sizer = new wxStaticBoxSizer(wxVERTICAL, this, _("Load from history")); history_sizer->Add(history_box, wxSizerFlags(1).Expand()); history_sizer->Add(clear_button, wxSizerFlags().Expand().Border(wxTOP)); wxSizer *top_sizer = new wxBoxSizer(wxHORIZONTAL); top_sizer->Add(left_sizer, wxSizerFlags().Border(wxALL & ~wxRIGHT).Expand()); top_sizer->Add(history_sizer, wxSizerFlags().Border().Expand()); wxSizer *main_sizer = new wxBoxSizer(wxVERTICAL); main_sizer->Add(top_sizer, wxSizerFlags().Border(wxALL & ~wxBOTTOM)); main_sizer->Add(CreateButtonSizer(wxOK | wxCANCEL | wxHELP), wxSizerFlags().Right().Border()); SetSizerAndFit(main_sizer); CenterOnParent(); Bind(wxEVT_COMMAND_BUTTON_CLICKED, &DialogShiftTimes::Process, this, wxID_OK); Bind(wxEVT_COMMAND_BUTTON_CLICKED, std::tr1::bind(&HelpButton::OpenPage, "Shift Times"), wxID_HELP); shift_time->Bind(wxEVT_COMMAND_TEXT_ENTER, &DialogShiftTimes::Process, this); history_box->Bind(wxEVT_COMMAND_LISTBOX_DOUBLECLICKED, &DialogShiftTimes::OnHistoryClick, this); context->selectionController->AddSelectionListener(this); }
/// @brief Constructor /// @param filename The filename to open YUV4MPEGVideoProvider::YUV4MPEGVideoProvider(wxString fname) : sf(NULL) , inited(false) , w (0) , h (0) , num_frames(-1) , cur_fn(-1) , pixfmt(Y4M_PIXFMT_NONE) , imode(Y4M_ILACE_NOTSET) { fps_rat.num = -1; fps_rat.den = 1; try { wxString filename = wxFileName(fname).GetShortPath(); #ifdef WIN32 sf = _wfopen(filename.wc_str(), L"rb"); #else sf = fopen(filename.utf8_str(), "rb"); #endif if (sf == NULL) throw agi::FileNotFoundError(STD_STR(fname)); CheckFileFormat(); ParseFileHeader(ReadHeader(0, false)); if (w <= 0 || h <= 0) throw VideoOpenError("Invalid resolution"); if (fps_rat.num <= 0 || fps_rat.den <= 0) { fps_rat.num = 25; fps_rat.den = 1; LOG_D("provider/video/yuv4mpeg") << "framerate info unavailable, assuming 25fps"; } if (pixfmt == Y4M_PIXFMT_NONE) pixfmt = Y4M_PIXFMT_420JPEG; if (imode == Y4M_ILACE_NOTSET) imode = Y4M_ILACE_UNKNOWN; luma_sz = w * h; switch (pixfmt) { case Y4M_PIXFMT_420JPEG: case Y4M_PIXFMT_420MPEG2: case Y4M_PIXFMT_420PALDV: chroma_sz = (w * h) >> 2; break; case Y4M_PIXFMT_422: chroma_sz = (w * h) >> 1; break; /// @todo add support for more pixel formats default: throw VideoOpenError("Unsupported pixel format"); } frame_sz = luma_sz + chroma_sz*2; num_frames = IndexFile(); if (num_frames <= 0 || seek_table.empty()) throw VideoOpenError("Unable to determine file length"); cur_fn = 0; fseeko(sf, 0, SEEK_SET); } catch (...) { if (sf) fclose(sf); throw; } }
static iterator find_command(std::string const& name) { iterator it = cmd_map.find(name); if (it == cmd_map.end()) throw CommandNotFound(STD_STR(wxString::Format(_("'%s' is not a valid command name"), lagi_wxString(name)))); return it; }
using PercentDecoder = ConditionalEscaper<PercentEncodedPred, CharPercentDecoder>; /** * A quick-and-dirty escaper that will just encode any "unsafe", and non-reserved characters */ URI_CHAR_PRED(SafePredicate, _CM_URI_SAFE); typedef ChainedEscaper<PercentEncodedPassthruEscaper, PercentEncoder<SafePredicate>> SafeEncoder; /* * Scheme escaping */ CHAR_ESCAPER(SchemeEncoder, ch, index, , ( /* throws invalid_argument */ if (!(_CM_URI_SCHEME(ch)) || (index == 0 && !(_CM_ALPHA(ch)))) { throw std::invalid_argument("Invalid scheme character '" + STD_STR(ch) + "' at index " + STD_STR(index)); } /* Lowercase the scheme always */ return _CM_UPPER(ch) ? STD_STR((char) std::tolower(ch)) : opt_string(); )); /** * User Info escaping */ URI_CHAR_PRED(UserInfoPredicate, _CM_URI_USER_INFO); typedef ChainedEscaper<PercentEncodedPassthruEscaper, PercentEncoder<UserInfoPredicate>> UserInfoEncoder; /**
AudioProvider *AudioProviderFactory::GetProvider(wxString const& filename, int cache) { AudioProvider *provider = 0; bool found_file = false; bool found_audio = false; std::string msg; if (!OPT_GET("Provider/Audio/PCM/Disable")->GetBool()) { // Try a PCM provider first try { provider = CreatePCMAudioProvider(filename); LOG_D("audio_provider") << "Using PCM provider"; } catch (agi::FileNotFoundError const& err) { msg = "PCM audio provider: " + err.GetMessage() + " not found.\n"; } catch (agi::AudioOpenError const& err) { found_file = true; msg += err.GetChainedMessage() + "\n"; } } if (!provider) { std::vector<std::string> list = GetClasses(OPT_GET("Audio/Provider")->GetString()); if (list.empty()) throw agi::NoAudioProvidersError("No audio providers are available.", 0); for (size_t i = 0; i < list.size() ; ++i) { try { provider = Create(list[i], filename); if (provider) { LOG_D("audio_provider") << "Using audio provider: " << list[i]; break; } } catch (agi::FileNotFoundError const& err) { msg += list[i] + ": " + err.GetMessage() + " not found.\n"; } catch (agi::AudioDataNotFoundError const& err) { found_file = true; msg += list[i] + ": " + err.GetChainedMessage() + "\n"; } catch (agi::AudioOpenError const& err) { found_audio = true; found_file = true; msg += list[i] + ": " + err.GetChainedMessage() + "\n"; } } } if (!provider) { if (found_audio) throw agi::AudioProviderOpenError(msg, 0); if (found_file) throw agi::AudioDataNotFoundError(msg, 0); throw agi::FileNotFoundError(STD_STR(filename)); } bool needsCache = provider->NeedsCache(); // Give it a converter if needed if (provider->GetBytesPerSample() != 2 || provider->GetSampleRate() < 32000 || provider->GetChannels() != 1) provider = CreateConvertAudioProvider(provider); // Change provider to RAM/HD cache if needed if (cache == -1) cache = OPT_GET("Audio/Cache/Type")->GetInt(); if (!cache || !needsCache) { return new LockAudioProvider(provider); } DialogProgress progress(wxGetApp().frame, _("Load audio")); // Convert to RAM if (cache == 1) return new RAMAudioProvider(provider, &progress); // Convert to HD if (cache == 2) return new HDAudioProvider(provider, &progress); throw agi::AudioCacheOpenError("Unknown caching method", 0); }
void AudioController::OpenAudio(const wxString &url) { if (!url) throw agi::InternalError("AudioController::OpenAudio() was passed an empty string. This must not happen.", 0); wxString path_part; AudioProvider *new_provider = 0; if (url.StartsWith("dummy-audio:", &path_part)) { /* * scheme ::= "dummy-audio" ":" signal-specifier "?" signal-parameters * signal-specifier ::= "silence" | "noise" | "sine" "/" frequency * frequency ::= integer * signal-parameters ::= signal-parameter [ "&" signal-parameters ] * signal-parameter ::= signal-parameter-name "=" integer * signal-parameter-name ::= "sr" | "bd" | "ch" | "ln" * * Signal types: * "silence", a silent signal is generated. * "noise", a white noise signal is generated. * "sine", a sine wave is generated at the specified frequency. * * Signal parameters: * "sr", sample rate to generate signal at. * "bd", bit depth to generate signal at (usually 16). * "ch", number of channels to generate, usually 1 or 2. The same signal is generated * in every channel even if one would be LFE. * "ln", length of signal in samples. ln/sr gives signal length in seconds. */ new_provider = new DummyAudioProvider(5*30*60*1000, path_part.StartsWith("noise")); } else if (url.StartsWith("video-audio:", &path_part)) { /* * scheme ::= "video-audio" ":" stream-type * stream-type ::= "stream" | "cache" * * Stream types: * * "stream", the audio is streamed as required directly from the video provider, * and cannot be used to drive an audio display. Seeking is unreliable. * * "cache", the entire audio is cached to memory or disk. Audio displays can be * driven and seeking is reliable. Opening takes longer because the entire audio * stream has to be decoded and stored. */ } else if (url.StartsWith("file:", &path_part)) { /* * scheme ::= "file" ":" "//" file-system-path * * On Unix-like systems, the file system path is regular. On Windows-systems, the * path uses forward slashes instead of back-slashes and the drive letter is * preceded by a slash. * * URL-encoding?? */ } else { /* * Assume it's not a URI but instead a filename in the platform's native format. */ try { new_provider = AudioProviderFactory::GetProvider(url); StandardPaths::SetPathValue("?audio", wxFileName(url).GetPath()); } catch (agi::UserCancelException const&) { throw; } catch (...) { config::mru->Remove("Audio", STD_STR(url)); throw; } } CloseAudio(); provider = new_provider; try { player = AudioPlayerFactory::GetAudioPlayer(provider); } catch (...) { delete provider; provider = 0; throw; } audio_url = url; config::mru->Add("Audio", STD_STR(url)); try { // Tell listeners about this. AnnounceAudioOpen(provider); } catch (...) { CloseAudio(); throw; } }
void DialogTextImport::OnOK(wxCommandEvent &) { OPT_SET("Tool/Import/Text/Actor Separator")->SetString(STD_STR(edit_separator->GetValue())); OPT_SET("Tool/Import/Text/Comment Starter")->SetString(STD_STR(edit_comment->GetValue())); EndModal(wxID_OK); }
/// @brief Parses a list of parameters and sets reader state accordingly /// @param tags The list of parameters to parse void YUV4MPEGVideoProvider::ParseFileHeader(const std::vector<wxString>& tags) { if (tags.size() <= 1) throw VideoOpenError("ParseFileHeader: contentless header"); if (tags.front().Cmp("YUV4MPEG2")) throw VideoOpenError("ParseFileHeader: malformed header (bad magic)"); // temporary stuff int t_w = -1; int t_h = -1; int t_fps_num = -1; int t_fps_den = -1; Y4M_InterlacingMode t_imode = Y4M_ILACE_NOTSET; Y4M_PixelFormat t_pixfmt = Y4M_PIXFMT_NONE; for (unsigned i = 1; i < tags.size(); i++) { wxString tag; long tmp_long1 = 0; long tmp_long2 = 0; if (tags[i].StartsWith("W", &tag)) { if (!tag.ToLong(&tmp_long1)) throw VideoOpenError("ParseFileHeader: invalid width"); t_w = (int)tmp_long1; } else if (tags[i].StartsWith("H", &tag)) { if (!tag.ToLong(&tmp_long1)) throw VideoOpenError("ParseFileHeader: invalid height"); t_h = (int)tmp_long1; } else if (tags[i].StartsWith("F", &tag)) { if (!(tag.BeforeFirst(':')).ToLong(&tmp_long1) && tag.AfterFirst(':').ToLong(&tmp_long2)) throw VideoOpenError("ParseFileHeader: invalid framerate"); t_fps_num = (int)tmp_long1; t_fps_den = (int)tmp_long2; } else if (tags[i].StartsWith("C", &tag)) { // technically this should probably be case sensitive, // but being liberal in what you accept doesn't hurt tag.MakeLower(); if (tag == "420") t_pixfmt = Y4M_PIXFMT_420JPEG; // is this really correct? else if (tag == "420jpeg") t_pixfmt = Y4M_PIXFMT_420JPEG; else if (tag == "420mpeg2") t_pixfmt = Y4M_PIXFMT_420MPEG2; else if (tag == "420paldv") t_pixfmt = Y4M_PIXFMT_420PALDV; else if (tag == "411") t_pixfmt = Y4M_PIXFMT_411; else if (tag == "422") t_pixfmt = Y4M_PIXFMT_422; else if (tag == "444") t_pixfmt = Y4M_PIXFMT_444; else if (tag == "444alpha") t_pixfmt = Y4M_PIXFMT_444ALPHA; else if (tag == "mono") t_pixfmt = Y4M_PIXFMT_MONO; else throw VideoOpenError("ParseFileHeader: invalid or unknown colorspace"); } else if (tags[i].StartsWith("I", &tag)) { tag.MakeLower(); if (tag == "p") t_imode = Y4M_ILACE_PROGRESSIVE; else if (tag == "t") t_imode = Y4M_ILACE_TFF; else if (tag == "b") t_imode = Y4M_ILACE_BFF; else if (tag == "m") t_imode = Y4M_ILACE_MIXED; else if (tag == "?") t_imode = Y4M_ILACE_UNKNOWN; else throw VideoOpenError("ParseFileHeader: invalid or unknown interlacing mode"); } else LOG_D("provider/video/yuv4mpeg") << "Unparsed tag: " << STD_STR(tags[i]); } // The point of all this is to allow multiple YUV4MPEG2 headers in a single file // (can happen if you concat several files) as long as they have identical // header flags. The spec doesn't explicitly say you have to allow this, // but the "reference implementation" (mjpegtools) does, so I'm doing it too. if (inited) { if (t_w > 0 && t_w != w) throw VideoOpenError("ParseFileHeader: illegal width change"); if (t_h > 0 && t_h != h) throw VideoOpenError("ParseFileHeader: illegal height change"); if ((t_fps_num > 0 && t_fps_den > 0) && (t_fps_num != fps_rat.num || t_fps_den != fps_rat.den)) throw VideoOpenError("ParseFileHeader: illegal framerate change"); if (t_pixfmt != Y4M_PIXFMT_NONE && t_pixfmt != pixfmt) throw VideoOpenError("ParseFileHeader: illegal colorspace change"); if (t_imode != Y4M_ILACE_NOTSET && t_imode != imode) throw VideoOpenError("ParseFileHeader: illegal interlacing mode change"); } else { w = t_w; h = t_h; fps_rat.num = t_fps_num; fps_rat.den = t_fps_den; pixfmt = t_pixfmt != Y4M_PIXFMT_NONE ? t_pixfmt : Y4M_PIXFMT_420JPEG; imode = t_imode != Y4M_ILACE_NOTSET ? t_imode : Y4M_ILACE_UNKNOWN; fps = double(fps_rat.num) / fps_rat.den; inited = true; } }
void DialogSpellChecker::OnChangeLanguage(wxCommandEvent&) { wxString code = dictionary_lang_codes[language->GetSelection()]; OPT_SET("Tool/Spell Checker/Language")->SetString(STD_STR(code)); FindNext(); }
SubtitlesProviderErrorEvent::SubtitlesProviderErrorEvent(wxString err) : agi::Exception(STD_STR(err), nullptr) { SetEventType(EVT_SUBTITLES_ERROR); }
/// @brief Opens video /// @param filename The filename to open void FFmpegSourceVideoProvider::LoadVideo(wxString filename) { wxString FileNameShort = wxFileName(filename).GetShortPath(); FFMS_Indexer *Indexer = FFMS_CreateIndexer(FileNameShort.utf8_str(), &ErrInfo); if (Indexer == NULL) { throw agi::FileNotFoundError(ErrInfo.Buffer); } std::map<int,wxString> TrackList = GetTracksOfType(Indexer, FFMS_TYPE_VIDEO); if (TrackList.size() <= 0) throw VideoNotSupported("no video tracks found"); // initialize the track number to an invalid value so we can detect later on // whether the user actually had to choose a track or not int TrackNumber = -1; if (TrackList.size() > 1) { TrackNumber = AskForTrackSelection(TrackList, FFMS_TYPE_VIDEO); // if it's still -1 here, user pressed cancel if (TrackNumber == -1) throw agi::UserCancelException("video loading cancelled by user"); } // generate a name for the cache file wxString CacheName = GetCacheFilename(filename); // try to read index FFMS_Index *Index = NULL; Index = FFMS_ReadIndex(CacheName.utf8_str(), &ErrInfo); bool IndexIsValid = false; if (Index != NULL) { if (FFMS_IndexBelongsToFile(Index, FileNameShort.utf8_str(), &ErrInfo)) { FFMS_DestroyIndex(Index); Index = NULL; } else IndexIsValid = true; } // time to examine the index and check if the track we want is indexed // technically this isn't really needed since all video tracks should always be indexed, // but a bit of sanity checking never hurt anyone if (IndexIsValid && TrackNumber >= 0) { FFMS_Track *TempTrackData = FFMS_GetTrackFromIndex(Index, TrackNumber); if (FFMS_GetNumFrames(TempTrackData) <= 0) { IndexIsValid = false; FFMS_DestroyIndex(Index); Index = NULL; } } // moment of truth if (!IndexIsValid) { int TrackMask = OPT_GET("Provider/FFmpegSource/Index All Tracks")->GetBool() ? FFMS_TRACKMASK_ALL : FFMS_TRACKMASK_NONE; try { // ignore audio decoding errors here, we don't care right now Index = DoIndexing(Indexer, CacheName, TrackMask, FFMS_IEH_IGNORE); } catch (wxString err) { throw VideoOpenError(STD_STR(err)); } } // update access time of index file so it won't get cleaned away wxFileName(CacheName).Touch(); // we have now read the index and may proceed with cleaning the index cache if (!CleanCache()) { //do something? } // track number still not set? if (TrackNumber < 0) { // just grab the first track TrackNumber = FFMS_GetFirstIndexedTrackOfType(Index, FFMS_TYPE_VIDEO, &ErrInfo); if (TrackNumber < 0) { FFMS_DestroyIndex(Index); Index = NULL; throw VideoNotSupported(std::string("Couldn't find any video tracks: ") + ErrInfo.Buffer); } } // set thread count int Threads = OPT_GET("Provider/Video/FFmpegSource/Decoding Threads")->GetInt(); // set seekmode // TODO: give this its own option? int SeekMode; if (OPT_GET("Provider/Video/FFmpegSource/Unsafe Seeking")->GetBool()) SeekMode = FFMS_SEEK_UNSAFE; else SeekMode = FFMS_SEEK_NORMAL; VideoSource = FFMS_CreateVideoSource(FileNameShort.utf8_str(), TrackNumber, Index, Threads, SeekMode, &ErrInfo); FFMS_DestroyIndex(Index); Index = NULL; if (VideoSource == NULL) { throw VideoOpenError(std::string("Failed to open video track: ") + ErrInfo.Buffer); } // load video properties VideoInfo = FFMS_GetVideoProperties(VideoSource); const FFMS_Frame *TempFrame = FFMS_GetFrame(VideoSource, 0, &ErrInfo); if (TempFrame == NULL) { throw VideoOpenError(std::string("Failed to decode first frame: ") + ErrInfo.Buffer); } Width = TempFrame->EncodedWidth; Height = TempFrame->EncodedHeight; if (FFMS_SetOutputFormatV(VideoSource, 1LL << FFMS_GetPixFmt("bgra"), Width, Height, FFMS_RESIZER_BICUBIC, &ErrInfo)) { throw VideoOpenError(std::string("Failed to set output format: ") + ErrInfo.Buffer); } // get frame info data FFMS_Track *FrameData = FFMS_GetTrackFromVideo(VideoSource); if (FrameData == NULL) throw VideoOpenError("failed to get frame data"); const FFMS_TrackTimeBase *TimeBase = FFMS_GetTimeBase(FrameData); if (TimeBase == NULL) throw VideoOpenError("failed to get track time base"); const FFMS_FrameInfo *CurFrameData; // build list of keyframes and timecodes std::vector<int> TimecodesVector; for (int CurFrameNum = 0; CurFrameNum < VideoInfo->NumFrames; CurFrameNum++) { CurFrameData = FFMS_GetFrameInfo(FrameData, CurFrameNum); if (CurFrameData == NULL) { throw VideoOpenError(STD_STR(wxString::Format(L"Couldn't get info about frame %d", CurFrameNum))); } // keyframe? if (CurFrameData->KeyFrame) KeyFramesList.push_back(CurFrameNum); // calculate timestamp and add to timecodes vector int Timestamp = (int)((CurFrameData->PTS * TimeBase->Num) / TimeBase->Den); TimecodesVector.push_back(Timestamp); } Timecodes = agi::vfr::Framerate(TimecodesVector); FrameNumber = 0; }