TEST_F(RunManagerTestFixture, ClearJobsPerformanceTest) { openstudio::Application::instance().application(false); double oldway = 0; { RunManager rm; populateJobs(rm); ASSERT_EQ(465, static_cast<int>(rm.getJobs().size())); boost::timer t; for (openstudio::runmanager::Job job : rm.getJobs()) { rm.remove(job); } oldway = t.elapsed(); ASSERT_TRUE(rm.getJobs().empty()); ASSERT_FALSE(rm.workPending()); } double newway = 0; { RunManager rm; populateJobs(rm); ASSERT_EQ(465, static_cast<int>(rm.getJobs().size())); boost::timer t; rm.clearJobs(); newway = t.elapsed(); ASSERT_TRUE(rm.getJobs().empty()); ASSERT_FALSE(rm.workPending()); } // Assert that new way is at least 2x faster than old way ASSERT_LT(newway * 2, oldway); LOG(Info, "Oldway: " << oldway << " Newway: " << newway); }
TEST_F(AnalysisDriverFixture,RuntimeBehavior_StopCustomAnalysis) { // Tests for stopping time < 20s. // RETRIEVE PROBLEM Problem problem = retrieveProblem("UserScriptContinuous",true,false); // DEFINE SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); // CREATE ANALYSIS Analysis analysis("Stop Custom Analysis", problem, seedModel); // generate 100 random points boost::mt19937 mt; typedef boost::uniform_real<> dist_type; typedef boost::variate_generator<boost::mt19937&, dist_type > gen_type; InputVariableVector variables = problem.variables(); ContinuousVariable cvar = variables[0].cast<ContinuousVariable>(); gen_type generator0(mt,dist_type(cvar.minimum().get(),cvar.maximum().get())); cvar = variables[1].cast<ContinuousVariable>(); gen_type generator1(mt,dist_type(cvar.minimum().get(),cvar.maximum().get())); cvar = variables[2].cast<ContinuousVariable>(); gen_type generator2(mt,dist_type(cvar.minimum().get(),cvar.maximum().get())); for (int i = 0, n = 100; i < n; ++i) { std::vector<QVariant> values; double value = generator0(); values.push_back(value); value = generator1(); values.push_back(value); value = generator2(); values.push_back(value); OptionalDataPoint dataPoint = problem.createDataPoint(values); ASSERT_TRUE(dataPoint); ASSERT_TRUE(analysis.addDataPoint(*dataPoint)); } // RUN ANALYSIS ProjectDatabase database = getCleanDatabase("StopCustomAnalysis"); AnalysisDriver analysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); runOptions.setQueueSize(2); StopWatcher watcher(analysisDriver); watcher.watch(analysis.uuid()); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_EQ(2,currentAnalysis.numQueuedJobs()); EXPECT_EQ(0,currentAnalysis.numQueuedDakotaJobs()); EXPECT_EQ(100,currentAnalysis.totalNumJobsInOSIteration()); EXPECT_EQ(0,currentAnalysis.numCompletedJobsInOSIteration()); analysisDriver.waitForFinished(); EXPECT_FALSE(analysisDriver.isRunning()); EXPECT_GE(watcher.nComplete(),watcher.stopNum()); EXPECT_LE(watcher.stoppingTime(),openstudio::Time(0,0,0,20)); // check conditions afterward RunManager runManager = analysisDriver.database().runManager(); EXPECT_FALSE(runManager.workPending()); BOOST_FOREACH(const Job& job,runManager.getJobs()) { EXPECT_FALSE(job.running()); EXPECT_FALSE(job.treeRunning()); } EXPECT_TRUE(currentAnalysis.numCompletedJobsInOSIteration() > 0); EXPECT_TRUE(currentAnalysis.analysis().dataPointsToQueue().size() > 0u); EXPECT_TRUE(currentAnalysis.analysis().dataPointsToQueue().size() < 100u); EXPECT_EQ(0u,analysisDriver.currentAnalyses().size()); }