TEST_F(AnalysisDriverFixture, DesignOfExperiments_MeshAnalysis) {

  openstudio::path rubyLibDirPath = openstudio::toPath(rubyLibDir());

  // GET SIMPLE PROJECT
  SimpleProject project = getCleanSimpleProject("DesignOfExperiments_MeshAnalysis");
  Analysis analysis = project.analysis();

  // SET PROBLEM
  Problem problem = retrieveProblem("MixedOsmIdf",false,false);
  analysis.setProblem(problem);

  // SET SEED
  Model model = model::exampleModel();
  openstudio::path p = toPath("./example.osm");
  model.save(p,true);
  FileReference seedModel(p);
  analysis.setSeed(seedModel);

  // SET ALGORITHM
  DesignOfExperimentsOptions algOptions(DesignOfExperimentsType::FullFactorial);
  DesignOfExperiments algorithm(algOptions);
  analysis.setAlgorithm(algorithm);

  // RUN ANALYSIS
  AnalysisDriver driver = project.analysisDriver();
  AnalysisRunOptions runOptions = standardRunOptions(project.projectDir());
  driver.run(analysis,runOptions);
  EXPECT_TRUE(driver.waitForFinished());

  // CHECK RESULTS
  AnalysisRecord analysisRecord = project.analysisRecord();
  EXPECT_EQ(4,analysisRecord.problemRecord().combinatorialSize(true).get());
  EXPECT_EQ(4u, analysisRecord.dataPointRecords().size());
  BOOST_FOREACH(const DataPointRecord& dataPointRecord, analysisRecord.dataPointRecords()) {
    EXPECT_TRUE(dataPointRecord.isComplete());
    EXPECT_FALSE(dataPointRecord.failed());
  }

  // get data points by perturbations and vice versa
  std::vector<DataPointRecord> testDataPoints;
  std::vector<QVariant> testVariableValues;

  // all data points are successful
  testDataPoints = analysisRecord.successfulDataPointRecords();
  EXPECT_EQ(4u,testDataPoints.size());

  // empty variableValues returns all data points
  testDataPoints = analysisRecord.getDataPointRecords(testVariableValues);
  EXPECT_EQ(4u, testDataPoints.size());

  // find the baseline
  testVariableValues.clear();
  testVariableValues.push_back(0);
  testVariableValues.push_back(QVariant(QVariant::Int)); // only one perturbation, null works too
  testVariableValues.push_back(0);
  ASSERT_TRUE(testVariableValues[1].isNull());
  testDataPoints = analysisRecord.getDataPointRecords(testVariableValues);
  ASSERT_EQ(1u, testDataPoints.size());

  // find model with improved wall and roof
  testVariableValues.clear();
  testVariableValues.push_back(1);
  testVariableValues.push_back(0);
  testVariableValues.push_back(1);
  testDataPoints = analysisRecord.getDataPointRecords(testVariableValues);
  ASSERT_EQ(1u, testDataPoints.size());
  DataPoint testDataPoint = testDataPoints[0].dataPoint();
  std::vector<OptionalDiscretePerturbation> perturbations =
      analysis.problem().getDiscretePerturbations(testVariableValues);
  ASSERT_EQ(3u,perturbations.size());
  ASSERT_TRUE(perturbations[0] && perturbations[1] && perturbations[2]);
  EXPECT_TRUE(perturbations[0]->uuid() == problem.variables()[0].cast<DiscreteVariable>().perturbations(false)[1].uuid());
  EXPECT_TRUE(perturbations[1]->uuid() == problem.variables()[1].cast<DiscreteVariable>().perturbations(false)[0].uuid());
  EXPECT_TRUE(perturbations[2]->uuid() == problem.variables()[2].cast<DiscreteVariable>().perturbations(false)[1].uuid());
  EXPECT_TRUE(perturbations[0]->optionalCast<RubyPerturbation>());
  EXPECT_TRUE(perturbations[1]->optionalCast<RubyPerturbation>());
  EXPECT_TRUE(perturbations[2]->optionalCast<RubyPerturbation>());

  // find models with improved wall
  testVariableValues.clear();
  testVariableValues.push_back(1);
  testDataPoints = analysisRecord.getDataPointRecords(testVariableValues);
  ASSERT_EQ(2u, testDataPoints.size());

  // infeasible
  testVariableValues.clear();
  testVariableValues.push_back(0);
  testVariableValues.push_back(0);
  testVariableValues.push_back(0);
  testVariableValues.push_back(0);
  testDataPoints = analysisRecord.getDataPointRecords(testVariableValues);
  ASSERT_EQ(0u, testDataPoints.size());
}
TEST_F(AnalysisDriverFixture, DDACE_LatinHypercube_Continuous) {
  {
    // GET SIMPLE PROJECT
    SimpleProject project = getCleanSimpleProject("DDACE_LatinHypercube_Continuous");
    Analysis analysis = project.analysis();

    // SET PROBLEM
    Problem problem = retrieveProblem("Continuous",true,false);
    analysis.setProblem(problem);

    // DEFINE SEED
    Model model = model::exampleModel();
    openstudio::path p = toPath("./example.osm");
    model.save(p,true);
    FileReference seedModel(p);
    analysis.setSeed(seedModel);

    // CREATE ANALYSIS
    DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::lhs);
    DDACEAlgorithm algorithm(algOptions);
    analysis.setAlgorithm(algorithm);

    // RUN ANALYSIS
    AnalysisDriver driver = project.analysisDriver();
    AnalysisRunOptions runOptions = standardRunOptions(project.projectDir());
    CurrentAnalysis currentAnalysis = driver.run(analysis,runOptions);
    EXPECT_TRUE(driver.waitForFinished());
    boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors();
    ASSERT_TRUE(jobErrors);
    EXPECT_FALSE(jobErrors->errors().empty()); // require specification of number of samples
    EXPECT_TRUE(driver.currentAnalyses().empty());
    Table summary = currentAnalysis.analysis().summaryTable();
    EXPECT_EQ(1u,summary.nRows()); // no points

    project.clearAllResults();
    algOptions.setSamples(4);
    EXPECT_EQ(4,analysis.algorithm()->cast<DDACEAlgorithm>().ddaceAlgorithmOptions().samples());
    currentAnalysis = driver.run(analysis,runOptions);
    EXPECT_TRUE(driver.waitForFinished());
    jobErrors = currentAnalysis.dakotaJobErrors();
    ASSERT_TRUE(jobErrors);
    EXPECT_TRUE(jobErrors->errors().empty());
    EXPECT_TRUE(driver.currentAnalyses().empty());
    summary = currentAnalysis.analysis().summaryTable();
    EXPECT_EQ(5u,summary.nRows());
    summary.save(project.projectDir() / toPath("summary.csv"));

    BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) {
      EXPECT_TRUE(dataPoint.isComplete());
      EXPECT_FALSE(dataPoint.failed());
      // EXPECT_FALSE(dataPoint.responseValues().empty());
    }

    ASSERT_TRUE(analysis.algorithm());
    EXPECT_TRUE(analysis.algorithm()->isComplete());
    EXPECT_FALSE(analysis.algorithm()->failed());

    {
      AnalysisRecord analysisRecord = project.analysisRecord();
      Analysis analysisCopy = analysisRecord.analysis();
      ASSERT_TRUE(analysisCopy.algorithm());
      EXPECT_TRUE(analysisCopy.algorithm()->isComplete());
      EXPECT_FALSE(analysisCopy.algorithm()->failed());
    }
  }

  LOG(Info,"Restart from existing project.");

  // Get existing project
  SimpleProject project = getSimpleProject("DDACE_LatinHypercube_Continuous");
  EXPECT_FALSE(project.analysisIsLoaded()); // make sure starting fresh
  Analysis analysis = project.analysis();
  EXPECT_FALSE(analysis.isDirty());

  // Add custom data point
  std::vector<QVariant> values;
  values.push_back(0.0);
  values.push_back(0.8);
  values.push_back(int(0));
  OptionalDataPoint dataPoint = analysis.problem().createDataPoint(values);
  ASSERT_TRUE(dataPoint);
  analysis.addDataPoint(*dataPoint);
  EXPECT_EQ(1u,analysis.dataPointsToQueue().size());
  ASSERT_TRUE(analysis.algorithm());
  EXPECT_TRUE(analysis.algorithm()->isComplete());
  EXPECT_FALSE(analysis.algorithm()->failed());
  EXPECT_TRUE(analysis.isDirty());
  EXPECT_FALSE(analysis.resultsAreInvalid());
  EXPECT_FALSE(analysis.dataPointsAreInvalid());

  // get last modified time of a file in a completed data point to make sure nothing is re-run
  DataPointVector completePoints = analysis.completeDataPoints();
  ASSERT_FALSE(completePoints.empty());
  OptionalFileReference inputFileRef = completePoints[0].osmInputData();
  ASSERT_TRUE(inputFileRef);
  QFileInfo inputFileInfo(toQString(inputFileRef->path()));
  QDateTime inputFileModifiedTestTime = inputFileInfo.lastModified();
  EXPECT_EQ(1u,analysis.dataPointsToQueue().size());

  AnalysisDriver driver = project.analysisDriver();
  CurrentAnalysis currentAnalysis = driver.run(
        analysis,
        standardRunOptions(project.projectDir()));
  EXPECT_TRUE(driver.waitForFinished());
  boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors();
  EXPECT_FALSE(jobErrors); // should not try to re-run DakotaAlgorithm
  EXPECT_TRUE(driver.currentAnalyses().empty());
  EXPECT_TRUE(analysis.dataPointsToQueue().empty());
  Table summary = currentAnalysis.analysis().summaryTable();
  EXPECT_EQ(6u,summary.nRows());
  summary.save(project.projectDir() / toPath("summary_post_restart.csv"));
  // RunManager should not re-run any data points
  EXPECT_EQ(inputFileModifiedTestTime,inputFileInfo.lastModified());
}