void CloudUploader::Upload (const UploadJob& job) { if (IsRunning ()) Queue_ << job; else StartJob (job); }
nsresult sbFileMetadataService::ProxiedStartJob(nsIArray* aMediaItemsArray, nsIStringEnumerator* aRequiredProperties, sbMetadataJob::JobType aJobType, sbIJobProgress** _retval) { TRACE(("%s[%.8x]", __FUNCTION__, this)); nsresult rv = NS_OK; // Make sure StartJob is called on the main thread if (!NS_IsMainThread()) { LOG(("%s[%.8x] proxying main thread StartJob()", __FUNCTION__, this)); nsCOMPtr<nsIThread> target; rv = NS_GetMainThread(getter_AddRefs(target)); NS_ENSURE_SUCCESS(rv, rv); nsCOMPtr<sbIFileMetadataService> proxy; rv = do_GetProxyForObject(target, NS_GET_IID(sbIFileMetadataService), static_cast<sbIFileMetadataService*>(this), NS_PROXY_SYNC | NS_PROXY_ALWAYS, getter_AddRefs(proxy)); NS_ENSURE_SUCCESS(rv, rv); // Can't call StartJob via proxy, since it is not // an interface method. if (aJobType == sbMetadataJob::TYPE_WRITE) { rv = proxy->Write(aMediaItemsArray, aRequiredProperties, _retval); } else { rv = proxy->Read(aMediaItemsArray, _retval); } } else { rv = StartJob(aMediaItemsArray, aRequiredProperties, aJobType, _retval); } return rv; }
void CloudUploader::handleUploadFinished (const QString& localPath, CloudStorageError error, const QString& errorStr) { emit finishedCopying (); const bool remove = CurrentJob_.RemoveOnFinish_; CurrentJob_ = UploadJob (); if (!Queue_.isEmpty ()) StartJob (Queue_.takeFirst ()); if (error == CloudStorageError::NoError && remove) QFile::remove (localPath); if (!errorStr.isEmpty () && error != CloudStorageError::NoError) Core::Instance ().SendEntity (Util::MakeNotification ("LMP", tr ("Error uploading file %1 to cloud: %2.") .arg (QFileInfo (localPath).fileName ()) .arg (errorStr), PWarning_)); }
void VanillaProc::restartCheckpointedJob() { // For the same reason that we call recordFinalUsage() from the reaper // in normal exit cases, we should get the final usage of the checkpointed // process now, add it to the running total of checkpointed processes, // and then add the running total to the current when we publish the // update ad. FIXME (#4971) if( Starter->jic->uploadWorkingFiles() ) { notifySuccessfulPeriodicCheckpoint(); } else { // We assume this is a transient failure and will try // to transfer again after the next periodic checkpoint. dprintf( D_ALWAYS, "Failed to transfer checkpoint.\n" ); } // While it's arguably sensible to kill the process family // before we restart the job, that would mean that checkpointing // would behave differently during ssh-to-job, which seems bad. // killFamilyIfWarranted(); StartJob(); }
void PrePostProcessor::CheckPostQueue() { GuardedDownloadQueue downloadQueue = DownloadQueue::Guard(); size_t countBefore = m_activeJobs.size(); CheckRequestPar(downloadQueue); CleanupJobs(downloadQueue); bool changed = m_activeJobs.size() != countBefore; bool allowPar; while (CanRunMoreJobs(&allowPar) && !IsStopped()) { NzbInfo* postJob = PickNextJob(downloadQueue, allowPar); if (!postJob) { break; } m_activeJobs.push_back(postJob); PostInfo* postInfo = postJob->GetPostInfo(); if (postInfo->GetStage() == PostInfo::ptQueued && (!g_Options->GetPausePostProcess() || postInfo->GetNzbInfo()->GetForcePriority())) { StartJob(downloadQueue, postInfo, allowPar); CheckRequestPar(downloadQueue); CleanupJobs(downloadQueue); changed = true; } } if (changed) { downloadQueue->Save(); UpdatePauseState(); } Util::SetStandByMode(m_activeJobs.empty()); }
void PlayTest_Batch(unsigned int rollbacks, unsigned int plays, BoardDB* boarddb) { if(boarddb != NULL && rollbacks != 0) { std::cout << "PlayTest_Batch: Don't combine BoardDB with rollbacks!" << std::endl; exit(1); } std::mutex boarddb_mutex; HeuristicParameters parameters; GetDefaultHeuristicParameters(¶meters); std::vector<std::future<unsigned int> > scores(plays); for(unsigned int p = 0; p < plays; ++p) { std::cout << "Batch progress: " << 100 * p / plays << "%" << std::endl; scores[p] = StartJob(PlayTest, parameters, rollbacks, boarddb, &boarddb_mutex); } std::cout << "Finishing ..." << std::endl; for(unsigned int p = 0; p < plays; ++p) { scores[p].wait(); } std::cout << "scores = array([\n\t"; for(unsigned int p = 0; p < plays; ++p) { std::cout << scores[p].get(); if(p != plays - 1) { if(p % 20 == 19) std::cout << ",\n\t"; else std::cout << ", "; } } std::cout << "])" << std::endl; PrintExpectiMaxStats(); }
void PlayTest_Tune(unsigned int rollbacks, unsigned int plays, unsigned int population_size, unsigned int tournament_size, unsigned int latency) { std::mt19937 rng(RandomSeed()); // create initial population std::vector<TuneElement> population(population_size); for(unsigned int i = 0; i < population_size; ++i) { GetDefaultHeuristicParameters(&population[i].m_parameters); population[i].m_score = 20000; // guess, should be relatively low } // simulate plays std::vector<TuneElement> history(plays); std::vector<std::future<unsigned int> > futures(latency); for(unsigned int p = 0; p < plays + latency; ++p) { std::cout << "Tune progress: " << 100 * p / (plays + latency) << "%" << std::endl; // add completed play to the population if(p >= latency) { history[p - latency].m_score = futures[p % latency].get(); population[(p - latency) % population_size] = history[p - latency]; } // tournament selection TuneElement best1, best2; GetDefaultHeuristicParameters(&best1.m_parameters); GetDefaultHeuristicParameters(&best2.m_parameters); best1.m_score = 0; best2.m_score = 0; for(unsigned int t = 0; t < tournament_size; ++t) { unsigned int sel1 = rng() % population_size; if(population[sel1].m_score > best1.m_score) best1 = population[sel1]; unsigned int sel2 = rng() % population_size; if(population[sel2].m_score > best2.m_score) best2 = population[sel2]; } // create winner HeuristicParameters winner; std::cout << "Winner (" << best1.m_score << "|" << best2.m_score << "): "; for(unsigned int i = 0; i < PARAM_COUNT; ++i) { winner.m_values[i] = (best1.m_parameters.m_values[i] + best2.m_parameters.m_values[i] + (rng() & 1)) / 2; std::cout << winner.m_values[i] << " "; } std::cout << std::endl; if(p < plays) { // do some mutations for(unsigned int i = 0; i < PARAM_COUNT; ++i) { winner.m_values[i] = Mutate(winner.m_values[i], PARAMETERS_MIN[i], PARAMETERS_MAX[i], PARAMETERS_STEP[i], rng); } // start the job history[p].m_parameters = winner; futures[p % latency] = StartJob(PlayTest, winner, rollbacks, (BoardDB*) NULL, (std::mutex*) NULL); } } std::cout << "scores = array([\n\t"; for(unsigned int p = 0; p < plays; ++p) { std::cout << history[p].m_score; if(p != plays - 1) { if(p % 20 == 19) std::cout << ",\n\t"; else std::cout << ", "; } } std::cout << "])" << std::endl; // calculate population average HeuristicParameters population_average; std::cout << "Population average: "; for(unsigned int i = 0; i < PARAM_COUNT; ++i) { population_average.m_values[i] = 0; for(unsigned int p = 0; p < population_size; ++p) { population_average.m_values[i] += population[p].m_parameters.m_values[i]; } population_average.m_values[i] = (population_average.m_values[i] + population_size / 2) / population_size; std::cout << population_average.m_values[i] << " "; } std::cout << std::endl; }
void PrePostProcessor::CheckPostQueue() { DownloadQueue* pDownloadQueue = g_pQueueCoordinator->LockQueue(); if (!pDownloadQueue->GetPostQueue()->empty()) { PostInfo* pPostInfo = pDownloadQueue->GetPostQueue()->front(); if (!pPostInfo->GetWorking()) { #ifndef DISABLE_PARCHECK if (pPostInfo->GetRequestParCheck() && pPostInfo->GetNZBInfo()->GetParStatus() <= NZBInfo::psSkipped && g_pOptions->GetParCheck() != Options::pcManual) { pPostInfo->GetNZBInfo()->SetParStatus(NZBInfo::psNone); pPostInfo->SetRequestParCheck(false); pPostInfo->SetStage(PostInfo::ptQueued); pPostInfo->GetNZBInfo()->GetScriptStatuses()->Clear(); DeletePostThread(pPostInfo); } else if (pPostInfo->GetRequestParCheck() && pPostInfo->GetNZBInfo()->GetParStatus() <= NZBInfo::psSkipped && g_pOptions->GetParCheck() == Options::pcManual) { pPostInfo->SetRequestParCheck(false); pPostInfo->GetNZBInfo()->SetParStatus(NZBInfo::psManual); DeletePostThread(pPostInfo); FileInfo* pFileInfo = GetQueueGroup(pDownloadQueue, pPostInfo->GetNZBInfo()); if (pFileInfo) { info("Downloading all remaining files for manual par-check for %s", pPostInfo->GetNZBInfo()->GetName()); g_pQueueCoordinator->GetQueueEditor()->LockedEditEntry(pDownloadQueue, pFileInfo->GetID(), false, QueueEditor::eaGroupResume, 0, NULL); pPostInfo->SetStage(PostInfo::ptFinished); pPostInfo->GetNZBInfo()->SetPostProcess(false); } else { info("There are no par-files remain for download for %s", pPostInfo->GetNZBInfo()->GetName()); pPostInfo->SetStage(PostInfo::ptQueued); } } else if (pPostInfo->GetRequestParRename()) { pPostInfo->GetNZBInfo()->SetRenameStatus(NZBInfo::rsNone); pPostInfo->SetRequestParRename(false); pPostInfo->SetStage(PostInfo::ptQueued); DeletePostThread(pPostInfo); } #endif if (pPostInfo->GetDeleted()) { pPostInfo->SetStage(PostInfo::ptFinished); } if (pPostInfo->GetStage() == PostInfo::ptQueued && !g_pOptions->GetPausePostProcess()) { DeletePostThread(pPostInfo); StartJob(pDownloadQueue, pPostInfo); } else if (pPostInfo->GetStage() == PostInfo::ptFinished) { UpdatePauseState(false, NULL); JobCompleted(pDownloadQueue, pPostInfo); } else if (!g_pOptions->GetPausePostProcess()) { error("Internal error: invalid state in post-processor"); } } } g_pQueueCoordinator->UnlockQueue(); }
void PrePostProcessor::CheckPostQueue() { GuardedDownloadQueue downloadQueue = DownloadQueue::Guard(); if (!m_curJob && m_jobCount > 0) { m_curJob = GetNextJob(downloadQueue); } if (m_curJob) { PostInfo* postInfo = m_curJob->GetPostInfo(); if (!postInfo->GetWorking() && !IsNzbFileDownloading(m_curJob)) { #ifndef DISABLE_PARCHECK if (postInfo->GetRequestParCheck() && (postInfo->GetNzbInfo()->GetParStatus() <= NzbInfo::psSkipped || (postInfo->GetForceRepair() && !postInfo->GetNzbInfo()->GetParFull())) && g_Options->GetParCheck() != Options::pcManual) { postInfo->SetForceParFull(postInfo->GetNzbInfo()->GetParStatus() > NzbInfo::psSkipped); postInfo->GetNzbInfo()->SetParStatus(NzbInfo::psNone); postInfo->SetRequestParCheck(false); postInfo->SetStage(PostInfo::ptQueued); postInfo->GetNzbInfo()->GetScriptStatuses()->clear(); DeletePostThread(postInfo); } else if (postInfo->GetRequestParCheck() && postInfo->GetNzbInfo()->GetParStatus() <= NzbInfo::psSkipped && g_Options->GetParCheck() == Options::pcManual) { postInfo->SetRequestParCheck(false); postInfo->GetNzbInfo()->SetParStatus(NzbInfo::psManual); DeletePostThread(postInfo); if (!postInfo->GetNzbInfo()->GetFileList()->empty()) { postInfo->GetNzbInfo()->PrintMessage(Message::mkInfo, "Downloading all remaining files for manual par-check for %s", postInfo->GetNzbInfo()->GetName()); downloadQueue->EditEntry(postInfo->GetNzbInfo()->GetId(), DownloadQueue::eaGroupResume, 0, nullptr); postInfo->SetStage(PostInfo::ptFinished); } else { postInfo->GetNzbInfo()->PrintMessage(Message::mkInfo, "There are no par-files remain for download for %s", postInfo->GetNzbInfo()->GetName()); postInfo->SetStage(PostInfo::ptQueued); } } #endif if (postInfo->GetDeleted()) { postInfo->SetStage(PostInfo::ptFinished); } if (postInfo->GetStage() == PostInfo::ptQueued && (!g_Options->GetPausePostProcess() || postInfo->GetNzbInfo()->GetForcePriority())) { DeletePostThread(postInfo); StartJob(downloadQueue, postInfo); } else if (postInfo->GetStage() == PostInfo::ptFinished) { UpdatePauseState(false, nullptr); JobCompleted(downloadQueue, postInfo); } else if (!g_Options->GetPausePostProcess()) { error("Internal error: invalid state in post-processor"); // TODO: cancel (delete) current job } } } }