TEST_F(AnalysisDriverFixture,RuntimeBehavior_StopAndRestartDakotaAnalysis) { // RETRIEVE PROBLEM Problem problem = retrieveProblem("SimpleHistogramBinUQ",true,false); // DEFINE SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); // CREATE ANALYSIS SamplingAlgorithmOptions algOptions; algOptions.setSamples(10); Analysis analysis("Stop and Restart Dakota Analysis", problem, SamplingAlgorithm(algOptions), seedModel); // RUN ANALYSIS if (!dakotaExePath().empty()) { ProjectDatabase database = getCleanDatabase("StopAndRestartDakotaAnalysis"); AnalysisDriver analysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); StopWatcher watcher(analysisDriver); watcher.watch(analysis.uuid()); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); analysisDriver.waitForFinished(); EXPECT_FALSE(analysisDriver.isRunning()); // check conditions afterward boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_FALSE(jobErrors->errors().empty()); EXPECT_FALSE(currentAnalysis.analysis().dataPoints().empty()); EXPECT_FALSE(currentAnalysis.analysis().dataPointsToQueue().empty()); EXPECT_FALSE(currentAnalysis.analysis().completeDataPoints().empty()); EXPECT_FALSE(currentAnalysis.analysis().successfulDataPoints().empty()); EXPECT_TRUE(currentAnalysis.analysis().failedDataPoints().empty()); EXPECT_FALSE(currentAnalysis.analysis().algorithm()->isComplete()); EXPECT_FALSE(currentAnalysis.analysis().algorithm()->failed()); EXPECT_EQ(0u,analysisDriver.currentAnalyses().size()); LOG(Debug,"After initial stop, there are " << currentAnalysis.analysis().dataPoints().size() << " data points, of which " << currentAnalysis.analysis().completeDataPoints().size() << " are complete."); // try to restart from database contents Analysis analysis = AnalysisRecord::getAnalysisRecords(database)[0].analysis(); ASSERT_TRUE(analysis.algorithm()); EXPECT_FALSE(analysis.algorithm()->isComplete()); EXPECT_FALSE(analysis.algorithm()->failed()); currentAnalysis = analysisDriver.run(analysis,runOptions); analysisDriver.waitForFinished(); EXPECT_EQ(10u,analysis.dataPoints().size()); EXPECT_EQ(0u,analysis.dataPointsToQueue().size()); EXPECT_EQ(10u,analysis.completeDataPoints().size()); EXPECT_EQ(10u,analysis.successfulDataPoints().size()); EXPECT_EQ(0u,analysis.failedDataPoints().size()); } }
TEST_F(AnalysisFixture, DataPoint_Selected) { // Create analysis Analysis analysis = analysis1(PreRun); // See how many to queue unsigned totalToRun = analysis.dataPointsToQueue().size(); ASSERT_LT(0u,totalToRun); // Turn one off ASSERT_FALSE(analysis.dataPoints().empty()); EXPECT_EQ(totalToRun,analysis.dataPoints().size()); DataPoint dataPoint = analysis.dataPoints()[0]; dataPoint.setSelected(false); EXPECT_FALSE(dataPoint.selected()); // Make sure shows up in "ToQueue" EXPECT_EQ(totalToRun - 1u,analysis.dataPointsToQueue().size()); }
TEST_F(AnalysisDriverFixture, DDACE_LatinHypercube_Continuous) { { // GET SIMPLE PROJECT SimpleProject project = getCleanSimpleProject("DDACE_LatinHypercube_Continuous"); Analysis analysis = project.analysis(); // SET PROBLEM Problem problem = retrieveProblem("Continuous",true,false); analysis.setProblem(problem); // DEFINE SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); analysis.setSeed(seedModel); // CREATE ANALYSIS DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::lhs); DDACEAlgorithm algorithm(algOptions); analysis.setAlgorithm(algorithm); // RUN ANALYSIS AnalysisDriver driver = project.analysisDriver(); AnalysisRunOptions runOptions = standardRunOptions(project.projectDir()); CurrentAnalysis currentAnalysis = driver.run(analysis,runOptions); EXPECT_TRUE(driver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_FALSE(jobErrors->errors().empty()); // require specification of number of samples EXPECT_TRUE(driver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(1u,summary.nRows()); // no points project.clearAllResults(); algOptions.setSamples(4); EXPECT_EQ(4,analysis.algorithm()->cast<DDACEAlgorithm>().ddaceAlgorithmOptions().samples()); currentAnalysis = driver.run(analysis,runOptions); EXPECT_TRUE(driver.waitForFinished()); jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(driver.currentAnalyses().empty()); summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(5u,summary.nRows()); summary.save(project.projectDir() / toPath("summary.csv")); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); // EXPECT_FALSE(dataPoint.responseValues().empty()); } ASSERT_TRUE(analysis.algorithm()); EXPECT_TRUE(analysis.algorithm()->isComplete()); EXPECT_FALSE(analysis.algorithm()->failed()); { AnalysisRecord analysisRecord = project.analysisRecord(); Analysis analysisCopy = analysisRecord.analysis(); ASSERT_TRUE(analysisCopy.algorithm()); EXPECT_TRUE(analysisCopy.algorithm()->isComplete()); EXPECT_FALSE(analysisCopy.algorithm()->failed()); } } LOG(Info,"Restart from existing project."); // Get existing project SimpleProject project = getSimpleProject("DDACE_LatinHypercube_Continuous"); EXPECT_FALSE(project.analysisIsLoaded()); // make sure starting fresh Analysis analysis = project.analysis(); EXPECT_FALSE(analysis.isDirty()); // Add custom data point std::vector<QVariant> values; values.push_back(0.0); values.push_back(0.8); values.push_back(int(0)); OptionalDataPoint dataPoint = analysis.problem().createDataPoint(values); ASSERT_TRUE(dataPoint); analysis.addDataPoint(*dataPoint); EXPECT_EQ(1u,analysis.dataPointsToQueue().size()); ASSERT_TRUE(analysis.algorithm()); EXPECT_TRUE(analysis.algorithm()->isComplete()); EXPECT_FALSE(analysis.algorithm()->failed()); EXPECT_TRUE(analysis.isDirty()); EXPECT_FALSE(analysis.resultsAreInvalid()); EXPECT_FALSE(analysis.dataPointsAreInvalid()); // get last modified time of a file in a completed data point to make sure nothing is re-run DataPointVector completePoints = analysis.completeDataPoints(); ASSERT_FALSE(completePoints.empty()); OptionalFileReference inputFileRef = completePoints[0].osmInputData(); ASSERT_TRUE(inputFileRef); QFileInfo inputFileInfo(toQString(inputFileRef->path())); QDateTime inputFileModifiedTestTime = inputFileInfo.lastModified(); EXPECT_EQ(1u,analysis.dataPointsToQueue().size()); AnalysisDriver driver = project.analysisDriver(); CurrentAnalysis currentAnalysis = driver.run( analysis, standardRunOptions(project.projectDir())); EXPECT_TRUE(driver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); EXPECT_FALSE(jobErrors); // should not try to re-run DakotaAlgorithm EXPECT_TRUE(driver.currentAnalyses().empty()); EXPECT_TRUE(analysis.dataPointsToQueue().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(6u,summary.nRows()); summary.save(project.projectDir() / toPath("summary_post_restart.csv")); // RunManager should not re-run any data points EXPECT_EQ(inputFileModifiedTestTime,inputFileInfo.lastModified()); }