TEST_F(AnalysisDriverFixture, DDACE_LatinHypercube_MixedOsmIdf_MoveProjectDatabase) { openstudio::path oldDir, newDir; { // GET SIMPLE PROJECT SimpleProject project = getCleanSimpleProject("DDACE_LatinHypercube_MixedOsmIdf"); Analysis analysis = project.analysis(); analysis.setName("DDACE Latin Hypercube Sampling - MixedOsmIdf"); // SET PROBLEM Problem problem = retrieveProblem("MixedOsmIdf",false,false); analysis.setProblem(problem); // SET SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); analysis.setSeed(seedModel); // SET ALGORITHM DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::lhs); algOptions.setSamples(12); // test reprinting results.out for copies of same point DDACEAlgorithm algorithm(algOptions); analysis.setAlgorithm(algorithm); // RUN ANALYSIS AnalysisDriver driver = project.analysisDriver(); AnalysisRunOptions runOptions = standardRunOptions(project.projectDir()); CurrentAnalysis currentAnalysis = driver.run(analysis,runOptions); EXPECT_TRUE(driver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(driver.currentAnalyses().empty()); Table summary = analysis.summaryTable(); EXPECT_EQ(5u,summary.nRows()); // 4 points (all combinations) summary.save(project.projectDir() / toPath("summary.csv")); EXPECT_EQ(4u,analysis.dataPoints().size()); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); EXPECT_TRUE(dataPoint.workspace()); // should be able to load data from disk } oldDir = project.projectDir(); newDir = project.projectDir().parent_path() / toPath("DDACELatinHypercubeMixedOsmIdfCopy"); // Make copy of project boost::filesystem::remove_all(newDir); ASSERT_TRUE(project.saveAs(newDir)); } // Blow away old project. // TODO: Reinstate. This was failing on Windows and isn't absolutely necessary. // try { // boost::filesystem::remove_all(oldDir); // } // catch (std::exception& e) { // EXPECT_TRUE(false) << "Boost filesystem was unable to delete the old folder, because " << e.what(); // } // Open new project SimpleProject project = getSimpleProject("DDACE_LatinHypercube_MixedOsmIdf_Copy"); EXPECT_TRUE(project.projectDir() == newDir); EXPECT_EQ(toString(newDir),toString(project.projectDir())); // After move, should be able to retrieve results. EXPECT_FALSE(project.analysisIsLoaded()); Analysis analysis = project.analysis(); EXPECT_TRUE(project.analysisIsLoaded()); EXPECT_EQ(4u,analysis.dataPoints().size()); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); LOG(Debug,"Attempting to load workspace for data point at '" << dataPoint.directory() << "'."); if (dataPoint.idfInputData()) { LOG(Debug,"Says there should be input data at " << toString(dataPoint.idfInputData()->path())); } EXPECT_TRUE(dataPoint.workspace()); // should be able to load data from disk if (!dataPoint.workspace()) { LOG(Debug,"Unsuccessful.") } } // Should be able to blow away results and run again project.removeAllDataPoints(); EXPECT_EQ(0u,analysis.dataPoints().size()); EXPECT_FALSE(analysis.algorithm()->isComplete()); EXPECT_FALSE(analysis.algorithm()->failed()); EXPECT_EQ(-1,analysis.algorithm()->iter()); EXPECT_FALSE(analysis.algorithm()->cast<DakotaAlgorithm>().restartFileReference()); EXPECT_FALSE(analysis.algorithm()->cast<DakotaAlgorithm>().outFileReference()); AnalysisRunOptions runOptions = standardRunOptions(project.projectDir()); AnalysisDriver driver = project.analysisDriver(); CurrentAnalysis currentAnalysis = driver.run(analysis,runOptions); EXPECT_TRUE(driver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(driver.currentAnalyses().empty()); Table summary = analysis.summaryTable(); EXPECT_EQ(5u,summary.nRows()); // 4 points (all combinations) summary.save(project.projectDir() / toPath("summary.csv")); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); EXPECT_TRUE(dataPoint.workspace()); // should be able to load data from disk } }
TEST_F(AnalysisDriverFixture, DDACE_LatinHypercube_Continuous) { { // GET SIMPLE PROJECT SimpleProject project = getCleanSimpleProject("DDACE_LatinHypercube_Continuous"); Analysis analysis = project.analysis(); // SET PROBLEM Problem problem = retrieveProblem("Continuous",true,false); analysis.setProblem(problem); // DEFINE SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); analysis.setSeed(seedModel); // CREATE ANALYSIS DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::lhs); DDACEAlgorithm algorithm(algOptions); analysis.setAlgorithm(algorithm); // RUN ANALYSIS AnalysisDriver driver = project.analysisDriver(); AnalysisRunOptions runOptions = standardRunOptions(project.projectDir()); CurrentAnalysis currentAnalysis = driver.run(analysis,runOptions); EXPECT_TRUE(driver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_FALSE(jobErrors->errors().empty()); // require specification of number of samples EXPECT_TRUE(driver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(1u,summary.nRows()); // no points project.clearAllResults(); algOptions.setSamples(4); EXPECT_EQ(4,analysis.algorithm()->cast<DDACEAlgorithm>().ddaceAlgorithmOptions().samples()); currentAnalysis = driver.run(analysis,runOptions); EXPECT_TRUE(driver.waitForFinished()); jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(driver.currentAnalyses().empty()); summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(5u,summary.nRows()); summary.save(project.projectDir() / toPath("summary.csv")); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); // EXPECT_FALSE(dataPoint.responseValues().empty()); } ASSERT_TRUE(analysis.algorithm()); EXPECT_TRUE(analysis.algorithm()->isComplete()); EXPECT_FALSE(analysis.algorithm()->failed()); { AnalysisRecord analysisRecord = project.analysisRecord(); Analysis analysisCopy = analysisRecord.analysis(); ASSERT_TRUE(analysisCopy.algorithm()); EXPECT_TRUE(analysisCopy.algorithm()->isComplete()); EXPECT_FALSE(analysisCopy.algorithm()->failed()); } } LOG(Info,"Restart from existing project."); // Get existing project SimpleProject project = getSimpleProject("DDACE_LatinHypercube_Continuous"); EXPECT_FALSE(project.analysisIsLoaded()); // make sure starting fresh Analysis analysis = project.analysis(); EXPECT_FALSE(analysis.isDirty()); // Add custom data point std::vector<QVariant> values; values.push_back(0.0); values.push_back(0.8); values.push_back(int(0)); OptionalDataPoint dataPoint = analysis.problem().createDataPoint(values); ASSERT_TRUE(dataPoint); analysis.addDataPoint(*dataPoint); EXPECT_EQ(1u,analysis.dataPointsToQueue().size()); ASSERT_TRUE(analysis.algorithm()); EXPECT_TRUE(analysis.algorithm()->isComplete()); EXPECT_FALSE(analysis.algorithm()->failed()); EXPECT_TRUE(analysis.isDirty()); EXPECT_FALSE(analysis.resultsAreInvalid()); EXPECT_FALSE(analysis.dataPointsAreInvalid()); // get last modified time of a file in a completed data point to make sure nothing is re-run DataPointVector completePoints = analysis.completeDataPoints(); ASSERT_FALSE(completePoints.empty()); OptionalFileReference inputFileRef = completePoints[0].osmInputData(); ASSERT_TRUE(inputFileRef); QFileInfo inputFileInfo(toQString(inputFileRef->path())); QDateTime inputFileModifiedTestTime = inputFileInfo.lastModified(); EXPECT_EQ(1u,analysis.dataPointsToQueue().size()); AnalysisDriver driver = project.analysisDriver(); CurrentAnalysis currentAnalysis = driver.run( analysis, standardRunOptions(project.projectDir())); EXPECT_TRUE(driver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); EXPECT_FALSE(jobErrors); // should not try to re-run DakotaAlgorithm EXPECT_TRUE(driver.currentAnalyses().empty()); EXPECT_TRUE(analysis.dataPointsToQueue().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(6u,summary.nRows()); summary.save(project.projectDir() / toPath("summary_post_restart.csv")); // RunManager should not re-run any data points EXPECT_EQ(inputFileModifiedTestTime,inputFileInfo.lastModified()); }