void job_dialog::on_view_log(wxCommandEvent &) { wxString log; size_t i; for (i = 0; i < jobs.size(); i++) { if (!lv_jobs->IsSelected(i)) continue; log += wxString::Format(Z("--- BEGIN job %d (%s, added on %s)"), jobs[i].id, jobs[i].description->c_str(), format_date_time(jobs[i].added_on).c_str()); if (-1 != jobs[i].started_on) log += wxString::Format(Z(", started on %s"), format_date_time(jobs[i].started_on).c_str()); log += wxT("\n"); if (jobs[i].log->empty()) log += Z("--- No job output found.\n"); else log += *jobs[i].log; if (log.Last() != wxT('\n')) log += wxT("\n"); log += wxString::Format(Z("--- END job %d"), jobs[i].id); if (-1 != jobs[i].finished_on) log += wxString::Format(Z(", finished on %s"), format_date_time(jobs[i].finished_on).c_str()); log += wxT("\n\n"); } if (log.empty()) return; job_log_dialog dlg(this, log); }
void job_dialog::create_list_item(int i) { wxString s; s.Printf(wxT("%d"), jobs[i].id); long dummy = lv_jobs->InsertItem(i, s); s = JOBS_PENDING == jobs[i].status ? Z("pending") : JOBS_DONE == jobs[i].status ? Z("done") : JOBS_DONE_WARNINGS == jobs[i].status ? Z("done/warnings") : JOBS_ABORTED == jobs[i].status ? Z("aborted") : Z("failed"); lv_jobs->SetItem(dummy, 1, s); s = *jobs[i].description; while (s.length() < 15) s += wxT(" "); lv_jobs->SetItem(dummy, 2, s); lv_jobs->SetItem(dummy, 3, format_date_time(jobs[i].added_on)); if (jobs[i].started_on != -1) s = format_date_time(jobs[i].started_on); else s = wxT(" "); lv_jobs->SetItem(dummy, 4, s); if (jobs[i].finished_on != -1) s = format_date_time(jobs[i].finished_on); else s = wxT(" "); lv_jobs->SetItem(dummy, 5, s); }
static void draw_row_callback(GContext *ctx, const Layer *cell_layer, MenuIndex *cell_index, void *callback_context) { int event_no = get_event_no_from_row_index(cell_index->row); if (!persist_exists(event_no * PERSIST_EVENT_FIELDCOUNT + PERSIST_EVENT_TITLE)) return; char event_title[PERSIST_STRING_MAX_LENGTH]; persist_read_string(event_no * PERSIST_EVENT_FIELDCOUNT + PERSIST_EVENT_TITLE, event_title, PERSIST_STRING_MAX_LENGTH); char event_subtitle[55]; char duration_string[10] = ""; if (persist_exists(event_no * PERSIST_EVENT_FIELDCOUNT + PERSIST_EVENT_START_DATE)) { int start_date = persist_read_int(event_no * PERSIST_EVENT_FIELDCOUNT + PERSIST_EVENT_START_DATE); if (persist_exists(event_no * PERSIST_EVENT_FIELDCOUNT + PERSIST_EVENT_END_DATE)) { int end_date = persist_read_int(event_no * PERSIST_EVENT_FIELDCOUNT + PERSIST_EVENT_END_DATE); int duration = end_date - start_date; if (duration / 3600.0 != 24) { format_date_time(event_subtitle, 40, (time_t)start_date, 1); float_to_string(duration_string, 10, duration / 3600.0); strcat(event_subtitle, ", "); strcat(event_subtitle, duration_string); strcat(event_subtitle, "h"); } else { format_date_time(event_subtitle, 40, (time_t)start_date, 0); } } else { format_date_time(event_subtitle, 40, (time_t)start_date, 1); } } menu_cell_basic_draw(ctx, cell_layer, event_title, event_subtitle, NULL); }
BOOST_LOG_GLOBAL_LOGGER_INIT(__logger, bl::sources::severity_logger_mt) { bl::sources::severity_logger_mt<boost::log::trivial::severity_level> logger; logger.add_attribute("TimeStamp", bl::attributes::local_clock()); using text_sink = bl::sinks::asynchronous_sink<bl::sinks::text_ostream_backend>; auto consoleSink = boost::make_shared<text_sink>(); consoleSink->locked_backend()->add_stream(boost::shared_ptr<std::ostream>(&std::cout, boost::null_deleter())); consoleSink->set_filter(severity >= boost::log::trivial::severity_level::warning); using file_sink = bl::sinks::asynchronous_sink<bl::sinks::text_file_backend>; auto logfileSink = boost::make_shared<file_sink>(); { auto logfileBackend = logfileSink->locked_backend(); logfileBackend->set_time_based_rotation(bl::sinks::file::rotation_at_time_point(0, 0, 0)); logfileBackend->set_file_name_pattern("Logs\\ashbot_%Y%m%d.log"); logfileBackend->auto_flush(); logfileBackend->set_file_collector( bl::sinks::file::make_collector(bl::keywords::target = "Logs\\Archive") ); } namespace expr = bl::expressions; bl::formatter fmt = expr::stream << format_date_time(timestamp, "%Y-%m-%d %H:%M:%S.%f") << " [" << boost::log::trivial::severity << "] " << expr::smessage; logfileSink->set_formatter(fmt); bl::core::get()->add_sink(consoleSink); bl::core::get()->add_sink(logfileSink); return logger; }
void job_run_dialog::on_end_process(wxProcessEvent &evt) { process_input(); int ndx = jobs_to_start[current_job]; int exit_code = evt.GetExitCode(); bool remove_job; wxString status; if (abort) { jobs[ndx].status = JOBS_ABORTED; status = Z("aborted"); remove_job = false; } else if (0 == exit_code) { jobs[ndx].status = JOBS_DONE; status = Z("completed OK"); remove_job = CJAR_NEVER != mdlg->options.clear_job_after_run_mode; } else if (1 == exit_code) { jobs[ndx].status = JOBS_DONE_WARNINGS; status = Z("completed with warnings"); remove_job = (CJAR_ALWAYS == mdlg->options.clear_job_after_run_mode) || (CJAR_WARNINGS == mdlg->options.clear_job_after_run_mode); } else { jobs[ndx].status = JOBS_FAILED; status = Z("failed"); remove_job = CJAR_ALWAYS == mdlg->options.clear_job_after_run_mode; } jobs[ndx].finished_on = wxGetUTCTime(); add_to_log(wxString::Format(Z("Finished job ID %d on %s: status '%s'"), jobs[ndx].id, format_date_time(jobs[ndx].finished_on).c_str(), status.c_str())); if (remove_job) { jobs.erase(jobs.begin() + ndx, jobs.begin() + ndx + 1); for (auto idx = 0u; jobs_to_start.size() > idx; ++idx) if (jobs_to_start[idx] >= ndx) jobs_to_start[idx]--; } mdlg->save_job_queue(); delete process; process = nullptr; out = nullptr; wxRemoveFile(opt_file_name); if (!abort) g_jobs->SetValue((current_job + 1) * 100); start_next_job(); }
void job_run_dialog::start_next_job() { t_update->Stop(); ++current_job; if ((static_cast<int>(jobs_to_start.size()) <= current_job) || cb_abort_after_current->IsChecked() || abort) { if ( abort || ( cb_abort_after_current->IsChecked() && (current_job < static_cast<int>(jobs_to_start.size())))) add_to_log(wxString::Format(Z("Aborted processing on %s"), format_date_time(wxGetUTCTime()).c_str())); else add_to_log(wxString::Format(Z("Finished processing on %s"), format_date_time(wxGetUTCTime()).c_str())); b_abort->Enable(false); cb_abort_after_current->Enable(false); b_ok->Enable(true); b_ok->SetFocus(); SetTitle(Z("mkvmerge has finished")); st_remaining_time->SetLabel(wxT("---")); st_remaining_time_total->SetLabel(wxT("---")); #if defined(SYS_WINDOWS) if (m_taskbar_progress) m_taskbar_progress->set_state(TBPF_NOPROGRESS); #endif return; } m_start_time = mtx::sys::get_current_time_millis(); m_next_remaining_time_update = m_start_time + 8000; st_remaining_time->SetLabel(Z("is being estimated")); #if defined(SYS_WINDOWS) if (m_taskbar_progress) { m_taskbar_progress->set_state(TBPF_NORMAL); m_taskbar_progress->set_value(current_job * 100, jobs_to_start.size() * 100); } #endif int ndx = jobs_to_start[current_job]; st_jobs->SetLabel(wxString::Format(Z("Processing job %d/%d"), current_job + 1, (int)jobs_to_start.size())); st_current->SetLabel(wxString::Format(Z("Current job ID %d:"), jobs[ndx].id)); mdlg->load(wxString::Format(wxT("%s/%d.mmg"), app->get_jobs_folder().c_str(), jobs[ndx].id), true); opt_file_name = get_temp_settings_file_name(); wxFile *opt_file; try { opt_file = new wxFile(opt_file_name, wxFile::write); } catch (...) { jobs[ndx].log->Printf(Z("Could not create a temporary file for mkvmerge's command line option called '%s' (error code %d, %s)."), opt_file_name.c_str(), errno, wxUCS(strerror(errno))); jobs[ndx].status = JOBS_FAILED; mdlg->save_job_queue(); if (process) { delete process; process = nullptr; } start_next_job(); return; } static const unsigned char utf8_bom[3] = {0xef, 0xbb, 0xbf}; opt_file->Write(utf8_bom, 3); opt_file->Write(wxT("--gui-mode\n")); mdlg->update_command_line(); wxArrayString *arg_list = &mdlg->get_command_line_args(); size_t i; for (i = 1; i < arg_list->Count(); i++) { if ((*arg_list)[i].Length() == 0) opt_file->Write(wxT("#EMPTY#")); else { std::string arg_utf8 = escape(wxMB((*arg_list)[i])); opt_file->Write(arg_utf8.c_str(), arg_utf8.length()); } opt_file->Write(wxT("\n")); } delete opt_file; process = new wxProcess(this, 1); process->Redirect(); wxString command_line = wxString::Format(wxT("\"%s\" \"@%s\""), (*arg_list)[0].c_str(), opt_file_name.c_str()); pid = wxExecute(command_line, wxEXEC_ASYNC, process); if (0 == pid) { wxLogError(wxT("Execution of '%s' failed."), command_line.c_str()); return; } out = process->GetInputStream(); *jobs[ndx].log = wxEmptyString; jobs[ndx].started_on = wxGetUTCTime(); jobs[ndx].finished_on = -1; add_to_log(wxString::Format(Z("Starting job ID %d (%s) on %s"), jobs[ndx].id, jobs[ndx].description->c_str(), format_date_time(jobs[ndx].started_on).c_str())); t_update->Start(100); }
int TransmitProteinsToChildProcesses() { int numProteins = (int) proteins.size(); vector< simplethread_handle_t > workerHandles; int sourceProcess, batchSize; bool IsFinished = false; Timer searchTime( true ); float totalSearchTime = 0.01f; float lastUpdate = 0.0f; int i = 0; int numChildrenFinished = 0; while( numChildrenFinished < g_numChildren ) { #ifdef MPI_DEBUG cout << g_hostString << " is listening for a child process to offer to search some proteins." << endl; #endif // Listen for a process requesting proteins. // Extract the number of CPUs available on the process. MPI_Recv( &sourceProcess, 1, MPI_INT, MPI_ANY_SOURCE, 0xFF, MPI_COMM_WORLD, &st ); int sourceCPUs = 0; MPI_Recv( &sourceCPUs, 1, MPI_INT, sourceProcess, 0xFF, MPI_COMM_WORLD, &st ); int pOffset = i; // Scale the batchSize with the number of cpus in the requested process. batchSize = min( numProteins-i, g_rtConfig->ProteinBatchSize*sourceCPUs ); stringstream packStream; binary_oarchive packArchive( packStream ); try { packArchive & pOffset; string proteinStream; for( int j = i; j < i + batchSize; ++j ) { proteinStream += ">" + proteins[j].getName() + " " + proteins[j].getDescription() + "\n" + proteins[j].getSequence() + "\n"; } packArchive & proteinStream; } catch( exception& e ) { cerr << g_hostString << " had an error: " << e.what() << endl; exit(1); } #ifdef MPI_DEBUG cout << "Process #" << sourceProcess << " has " << sourceCPUs << " cpus. Sending " << batchSize << " proteins." << endl; #endif if( i < numProteins ) { MPI_Ssend( &batchSize, 1, MPI_INT, sourceProcess, 0x99, MPI_COMM_WORLD ); #ifdef MPI_DEBUG cout << g_hostString << " is sending " << batchSize << " proteins." << endl; Timer sendTime(true); #endif string pack = packStream.str(); int len = (int) pack.length(); MPI_Send( &len, 1, MPI_INT, sourceProcess, 0x00, MPI_COMM_WORLD ); MPI_Send( (void*) pack.c_str(), len, MPI_CHAR, sourceProcess, 0x01, MPI_COMM_WORLD ); #ifdef MPI_DEBUG cout << g_hostString << " finished sending " << batchSize << " proteins; " << sendTime.End() << " seconds elapsed." << endl; #endif i += batchSize; } else { batchSize = 0; MPI_Ssend( &batchSize, 1, MPI_INT, sourceProcess, 0x99, MPI_COMM_WORLD ); #ifdef MPI_DEBUG cout << "Process #" << sourceProcess << " has been informed that all proteins have been searched." << endl; #endif ++numChildrenFinished; } totalSearchTime = searchTime.TimeElapsed(); if( !IsFinished && ( ( totalSearchTime - lastUpdate > g_rtConfig->StatusUpdateFrequency ) || i+1 == numProteins ) ) { if( i+1 == numProteins ) IsFinished = true; float proteinsPerSec = float(i+1) / totalSearchTime; bpt::time_duration estimatedTimeRemaining(0, 0, round((numProteins - i) / proteinsPerSec)); cout << "Searched " << i << " of " << numProteins << " proteins; " << round(proteinsPerSec) << " per second, " << format_date_time("%H:%M:%S", bpt::time_duration(0, 0, round(totalSearchTime))) << " elapsed, " << format_date_time("%H:%M:%S", estimatedTimeRemaining) << " remaining." << endl; lastUpdate = totalSearchTime; } } return 0; }