TEST_F(AnalysisDriverFixture, DDACE_MonteCarlo_Continuous) { // RETRIEVE PROBLEM Problem problem = retrieveProblem("Continuous",true,false); // DEFINE SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); // CREATE ANALYSIS DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::random); Analysis analysis("DDACE Monte Carlo Sampling", problem, DDACEAlgorithm(algOptions), seedModel); // RUN ANALYSIS { ProjectDatabase database = getCleanDatabase("DDACEMonteCarlo_Continuous_NoSamples"); AnalysisDriver analysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_TRUE(analysisDriver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_FALSE(jobErrors->errors().empty()); // require specification of number of samples EXPECT_TRUE(analysisDriver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(1u,summary.nRows()); // no points } { algOptions.setSamples(6); analysis = Analysis("DDACE MonteCarlo Sampling", problem, DDACEAlgorithm(algOptions), seedModel); ProjectDatabase database = getCleanDatabase("DDACEMonteCarlo_Continuous"); AnalysisDriver analysisDriver = AnalysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); runOptions.setQueueSize(4); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_TRUE(analysisDriver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(analysisDriver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(7u,summary.nRows()); // 6 points summary.save(analysisDriver.database().path().parent_path() / toPath("summary.csv")); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); // EXPECT_FALSE(dataPoint.responseValues().empty()); } } }
TEST_F(AnalysisDriverFixture, DDACE_MonteCarlo_MixedOsmIdf_ProjectDatabaseOpen) { // RETRIEVE PROBLEM Problem problem = retrieveProblem("MixedOsmIdf",false,false); // DEFINE SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); // CREATE ANALYSIS DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::oas); algOptions.setSamples(4); Analysis analysis("DDACE Monte Carlo Sampling", problem, DDACEAlgorithm(algOptions), seedModel); // RUN ANALYSIS { analysis = Analysis("DDACE Monte Carlo Sampling - MixedOsmIdf", problem, DDACEAlgorithm(algOptions), seedModel); ProjectDatabase database = getCleanDatabase("DDACEMonteCarlo_MixedOsmIdf"); AnalysisDriver analysisDriver = AnalysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_TRUE(analysisDriver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(analysisDriver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(5u,summary.nRows()); // 4 points summary.save(analysisDriver.database().path().parent_path() / toPath("summary.csv")); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); } } { project::OptionalProjectDatabase oDatabase = project::ProjectDatabase::open(toPath("AnalysisDriverFixtureData/DDACEMonteCarloMixedOsmIdf/DDACEMonteCarlo_MixedOsmIdf.osp")); ASSERT_TRUE(oDatabase); project::AnalysisRecordVector analysisRecords = project::AnalysisRecord::getAnalysisRecords(*oDatabase); EXPECT_EQ(1u,analysisRecords.size()); if (!analysisRecords.empty()) { EXPECT_NO_THROW(analysisRecords[0].analysis()); } } }
TEST_F(AnalysisDriverFixture, FSUDace_CVT_MixedOsmIdf) { // RETRIEVE PROBLEM Problem problem = retrieveProblem("MixedOsmIdf",false,false); // DEFINE SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); // CREATE ANALYSIS FSUDaceAlgorithmOptions algOptions(FSUDaceAlgorithmType::cvt); algOptions.setSamples(3); Analysis analysis("FSUDace CVT", problem, FSUDaceAlgorithm(algOptions), seedModel); // RUN ANALYSIS analysis = Analysis("FSUDace CVT - MixedOsmIdf", problem, FSUDaceAlgorithm(algOptions), seedModel); ProjectDatabase database = getCleanDatabase("FSUDaceCVT_MixedOsmIdf"); AnalysisDriver analysisDriver = AnalysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_TRUE(analysisDriver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(analysisDriver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); // EXPECT_EQ(10u,summary.nRows()); // 9 points summary.save(analysisDriver.database().path().parent_path() / toPath("summary.csv")); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); } }
TEST_F(AnalysisDriverFixture, DDACE_LatinHypercube_UserScriptContinuous) { // RETRIEVE PROBLEM Problem problem = retrieveProblem("UserScriptContinuous",true,false); // DEFINE SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); // CREATE ANALYSIS DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::lhs); algOptions.setSamples(10); // RUN ANALYSIS Analysis analysis("DDACE Latin Hypercube Sampling - UserScriptContinuous", problem, DDACEAlgorithm(algOptions), seedModel); ProjectDatabase database = getCleanDatabase("DDACELatinHypercube_UserScriptContinuous"); AnalysisDriver analysisDriver = AnalysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_TRUE(analysisDriver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(analysisDriver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(11u,summary.nRows()); summary.save(analysisDriver.database().path().parent_path() / toPath("summary.csv")); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); } }
TEST_F(AnalysisDriverFixture, DDACE_Grid_Continuous) { // RETRIEVE PROBLEM Problem problem = retrieveProblem("Continuous",true,false); // DEFINE SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); // CREATE ANALYSIS DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::grid); Analysis analysis("DDACE Grid Sampling", problem, DDACEAlgorithm(algOptions), seedModel); // RUN ANALYSIS { ProjectDatabase database = getCleanDatabase("DDACEGridSampling_NoSamples"); AnalysisDriver analysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); runOptions.setQueueSize(4); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_TRUE(analysisDriver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_FALSE(jobErrors->errors().empty()); // require specification of number of samples EXPECT_TRUE(analysisDriver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(1u,summary.nRows()); // no points } // ETH@20120120 At first tried just using the same database, analysisdriver, etc., but // it did not go well. Restarting an initially failed Dakota analysis should be part of // enabling Dakota restart more generally. { // algorithm rounds samples up to next one that fits n**(problem.numVariables()) algOptions.setSamples(6); analysis = Analysis("DDACE Grid Sampling - Wrong Samples", problem, DDACEAlgorithm(algOptions), seedModel); ProjectDatabase database = getCleanDatabase("DDACEGridSampling_WrongSamples"); AnalysisDriver analysisDriver = AnalysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_TRUE(analysisDriver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(analysisDriver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(10u,summary.nRows()); // 9 points summary.save(analysisDriver.database().path().parent_path() / toPath("summary.csv")); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); // EXPECT_FALSE(dataPoint.responseValues().empty()); } } { // algorithm rounds samples up to next one that fits n**(problem.numVariables()) algOptions.setSamplesForGrid(2,problem); analysis = Analysis("DDACE Grid Sampling - Correct Samples", problem, DDACEAlgorithm(algOptions), seedModel); ProjectDatabase database = getCleanDatabase("DDACEGridSampling_CorrectSamples"); AnalysisDriver analysisDriver = AnalysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_TRUE(analysisDriver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(analysisDriver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(5u,summary.nRows()); // 4 points summary.save(analysisDriver.database().path().parent_path() / toPath("summary.csv")); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); // EXPECT_FALSE(dataPoint.responseValues().empty()); } } }
TEST_F(AnalysisDriverFixture, DDACE_OrthogonalArray_Continuous) { // RETRIEVE PROBLEM Problem problem = retrieveProblem("Continuous",true,false); // DEFINE SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); // CREATE ANALYSIS DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::oas); Analysis analysis("DDACE Orthogonal Array Sampling", problem, DDACEAlgorithm(algOptions), seedModel); // RUN ANALYSIS { ProjectDatabase database = getCleanDatabase("DDACEOrthogonalArray_NoSamples"); AnalysisDriver analysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_TRUE(analysisDriver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_FALSE(jobErrors->errors().empty()); // require specification of number of samples EXPECT_TRUE(analysisDriver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(1u,summary.nRows()); // no points } { algOptions.setSamples(6); // symbols = 0 analysis = Analysis("DDACE Orthogonal Array Sampling - Wrong Samples", problem, DDACEAlgorithm(algOptions), seedModel); ProjectDatabase database = getCleanDatabase("DDACEOrthogonalArray_WrongSamples"); AnalysisDriver analysisDriver = AnalysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_TRUE(analysisDriver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(analysisDriver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(10u,summary.nRows()); // 9 points summary.save(analysisDriver.database().path().parent_path() / toPath("summary.csv")); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); // EXPECT_FALSE(dataPoint.responseValues().empty()); } } { bool ok = algOptions.setSamplesAndSymbolsForOrthogonalArray(6,1); EXPECT_FALSE(ok); ok = algOptions.setSamplesAndSymbolsForOrthogonalArray(2,2); // 2,3 not ok for no apparent reason EXPECT_TRUE(ok); ASSERT_TRUE(algOptions.symbols()); EXPECT_EQ(2,algOptions.symbols().get()); ASSERT_TRUE(algOptions.samples()); EXPECT_EQ(8,algOptions.samples().get()); analysis = Analysis("DDACE Orthogonal Array Sampling - Correct Samples", problem, DDACEAlgorithm(algOptions), seedModel); ProjectDatabase database = getCleanDatabase("DDACEOrthogonalArray_CorrectSamples"); AnalysisDriver analysisDriver = AnalysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_TRUE(analysisDriver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(analysisDriver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(9u,summary.nRows()); summary.save(analysisDriver.database().path().parent_path() / toPath("summary.csv")); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); } } }