TEST_F(AnalysisDriverFixture, DDACE_LatinHypercube_Continuous) {
  {
    // GET SIMPLE PROJECT
    SimpleProject project = getCleanSimpleProject("DDACE_LatinHypercube_Continuous");
    Analysis analysis = project.analysis();

    // SET PROBLEM
    Problem problem = retrieveProblem("Continuous",true,false);
    analysis.setProblem(problem);

    // DEFINE SEED
    Model model = model::exampleModel();
    openstudio::path p = toPath("./example.osm");
    model.save(p,true);
    FileReference seedModel(p);
    analysis.setSeed(seedModel);

    // CREATE ANALYSIS
    DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::lhs);
    DDACEAlgorithm algorithm(algOptions);
    analysis.setAlgorithm(algorithm);

    // RUN ANALYSIS
    AnalysisDriver driver = project.analysisDriver();
    AnalysisRunOptions runOptions = standardRunOptions(project.projectDir());
    CurrentAnalysis currentAnalysis = driver.run(analysis,runOptions);
    EXPECT_TRUE(driver.waitForFinished());
    boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors();
    ASSERT_TRUE(jobErrors);
    EXPECT_FALSE(jobErrors->errors().empty()); // require specification of number of samples
    EXPECT_TRUE(driver.currentAnalyses().empty());
    Table summary = currentAnalysis.analysis().summaryTable();
    EXPECT_EQ(1u,summary.nRows()); // no points

    project.clearAllResults();
    algOptions.setSamples(4);
    EXPECT_EQ(4,analysis.algorithm()->cast<DDACEAlgorithm>().ddaceAlgorithmOptions().samples());
    currentAnalysis = driver.run(analysis,runOptions);
    EXPECT_TRUE(driver.waitForFinished());
    jobErrors = currentAnalysis.dakotaJobErrors();
    ASSERT_TRUE(jobErrors);
    EXPECT_TRUE(jobErrors->errors().empty());
    EXPECT_TRUE(driver.currentAnalyses().empty());
    summary = currentAnalysis.analysis().summaryTable();
    EXPECT_EQ(5u,summary.nRows());
    summary.save(project.projectDir() / toPath("summary.csv"));

    BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) {
      EXPECT_TRUE(dataPoint.isComplete());
      EXPECT_FALSE(dataPoint.failed());
      // EXPECT_FALSE(dataPoint.responseValues().empty());
    }

    ASSERT_TRUE(analysis.algorithm());
    EXPECT_TRUE(analysis.algorithm()->isComplete());
    EXPECT_FALSE(analysis.algorithm()->failed());

    {
      AnalysisRecord analysisRecord = project.analysisRecord();
      Analysis analysisCopy = analysisRecord.analysis();
      ASSERT_TRUE(analysisCopy.algorithm());
      EXPECT_TRUE(analysisCopy.algorithm()->isComplete());
      EXPECT_FALSE(analysisCopy.algorithm()->failed());
    }
  }

  LOG(Info,"Restart from existing project.");

  // Get existing project
  SimpleProject project = getSimpleProject("DDACE_LatinHypercube_Continuous");
  EXPECT_FALSE(project.analysisIsLoaded()); // make sure starting fresh
  Analysis analysis = project.analysis();
  EXPECT_FALSE(analysis.isDirty());

  // Add custom data point
  std::vector<QVariant> values;
  values.push_back(0.0);
  values.push_back(0.8);
  values.push_back(int(0));
  OptionalDataPoint dataPoint = analysis.problem().createDataPoint(values);
  ASSERT_TRUE(dataPoint);
  analysis.addDataPoint(*dataPoint);
  EXPECT_EQ(1u,analysis.dataPointsToQueue().size());
  ASSERT_TRUE(analysis.algorithm());
  EXPECT_TRUE(analysis.algorithm()->isComplete());
  EXPECT_FALSE(analysis.algorithm()->failed());
  EXPECT_TRUE(analysis.isDirty());
  EXPECT_FALSE(analysis.resultsAreInvalid());
  EXPECT_FALSE(analysis.dataPointsAreInvalid());

  // get last modified time of a file in a completed data point to make sure nothing is re-run
  DataPointVector completePoints = analysis.completeDataPoints();
  ASSERT_FALSE(completePoints.empty());
  OptionalFileReference inputFileRef = completePoints[0].osmInputData();
  ASSERT_TRUE(inputFileRef);
  QFileInfo inputFileInfo(toQString(inputFileRef->path()));
  QDateTime inputFileModifiedTestTime = inputFileInfo.lastModified();
  EXPECT_EQ(1u,analysis.dataPointsToQueue().size());

  AnalysisDriver driver = project.analysisDriver();
  CurrentAnalysis currentAnalysis = driver.run(
        analysis,
        standardRunOptions(project.projectDir()));
  EXPECT_TRUE(driver.waitForFinished());
  boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors();
  EXPECT_FALSE(jobErrors); // should not try to re-run DakotaAlgorithm
  EXPECT_TRUE(driver.currentAnalyses().empty());
  EXPECT_TRUE(analysis.dataPointsToQueue().empty());
  Table summary = currentAnalysis.analysis().summaryTable();
  EXPECT_EQ(6u,summary.nRows());
  summary.save(project.projectDir() / toPath("summary_post_restart.csv"));
  // RunManager should not re-run any data points
  EXPECT_EQ(inputFileModifiedTestTime,inputFileInfo.lastModified());
}
예제 #2
0
TEST_F(ProjectFixture,AnalysisRecord_SetProblem) {
  // create an analysis with data points
  Problem problem1("Minimal Problem",VariableVector(),runmanager::Workflow());
  Analysis analysis("Analysis",
                    problem1,
                    FileReferenceType::OSM);
  OptionalDataPoint dataPoint = problem1.createDataPoint(std::vector<QVariant>());
  ASSERT_TRUE(dataPoint);
  bool test = analysis.addDataPoint(*dataPoint);
  EXPECT_TRUE(test);

  // save to database, make sure everything is there, make sure clean signal filters down
  ProjectDatabase database = getCleanDatabase("AnalysisRecord_SetProblem");
  {
    bool transactionStarted = database.startTransaction();
    EXPECT_TRUE(transactionStarted);
    AnalysisRecord analysisRecord(analysis,database);
    database.save();
    test = database.commitTransaction();
    EXPECT_TRUE(test);
    ASSERT_NO_THROW(analysisRecord.problemRecord());
    ProblemRecord problemRecord = analysisRecord.problemRecord();
    EXPECT_EQ(problem1.uuid(),problemRecord.handle());
    EXPECT_EQ(problem1.versionUUID(),problemRecord.uuidLast());
    EXPECT_EQ(1u,analysisRecord.dataPointRecords().size());
  }
  analysis.clearDirtyFlag();
  EXPECT_FALSE(analysis.isDirty());
  EXPECT_FALSE(problem1.isDirty());
  BOOST_FOREACH(const DataPoint& dataPoint, analysis.dataPoints()) {
    EXPECT_FALSE(dataPoint.isDirty());
  }
  EXPECT_FALSE(analysis.dataPointsAreInvalid());

  // set analysis to have new problem
  // make sure dirty flag bubbles up, data points marked invalid
  Problem problem2 = problem1.clone().cast<Problem>();
  problem2.setName("Real Problem");
  EXPECT_NE(problem1.uuid(),problem2.uuid());
  analysis.setProblem(problem2);
  EXPECT_TRUE(analysis.isDirty());
  EXPECT_FALSE(problem1.isDirty());
  EXPECT_TRUE(problem2.isDirty());
  EXPECT_TRUE(analysis.dataPointsAreInvalid());
  RubyPerturbation userScript(toPath("measure.rb"),
                              FileReferenceType::IDF,
                              FileReferenceType::IDF,
                              true);
  RubyContinuousVariable cvar("Script Argument Variable",
                              ruleset::OSArgument::makeDoubleArgument("COP"),
                              userScript);
  test = problem2.push(cvar);
  EXPECT_FALSE(test); // IDF not compatible with seed
  EXPECT_TRUE(problem2.variables().empty());
  test = userScript.setPerturbationScript(toPath("measure.rb"),
                                          FileReferenceType::OSM,
                                          FileReferenceType::OSM,
                                          true);
  EXPECT_TRUE(test);
  test = problem2.push(cvar);
  EXPECT_TRUE(test);
  EXPECT_EQ(1u,analysis.problem().variables().size());
  EXPECT_EQ(1u,analysis.dataPoints().size());
  dataPoint = problem2.createDataPoint(std::vector<QVariant>(1u,3.5));
  ASSERT_TRUE(dataPoint);
  test = analysis.addDataPoint(*dataPoint);
  EXPECT_FALSE(test);

  // save to database, make sure dataPointsAreInvalid flag was saved, old problem
  // should still be there, new problem there, and clean signal won't go through
  Analysis analysisCopy = analysis; // will replace with deserialized version momentarily
  {
    bool transactionStarted = database.startTransaction();
    EXPECT_TRUE(transactionStarted);
    database.unloadUnusedCleanRecords(); // ETH@20130201 - Having to call this is awkward.
                                         // (As are the brackets) Where is the best place?
    AnalysisRecord analysisRecord(analysis,database);
    database.save();
    test = database.commitTransaction();
    EXPECT_TRUE(test);
    ASSERT_NO_THROW(analysisRecord.problemRecord());
    ProblemRecord problemRecord = analysisRecord.problemRecord();
    EXPECT_EQ(problem2.uuid(),problemRecord.handle());
    EXPECT_EQ(problem2.versionUUID(),problemRecord.uuidLast());
    EXPECT_EQ(2u,ProblemRecord::getProblemRecords(database).size()); // old one still around
    EXPECT_EQ(1u,analysisRecord.dataPointRecords().size());
    test = analysis.clearDirtyFlag();
    EXPECT_FALSE(test);
    Analysis analysisCopy = analysisRecord.analysis(); // should work b/c both problems around
  }
  EXPECT_EQ(analysis.uuid(),analysisCopy.uuid());
  EXPECT_EQ(analysis.versionUUID(),analysisCopy.versionUUID());
  EXPECT_TRUE(analysisCopy.isDirty());
  EXPECT_TRUE(analysisCopy.dataPointsAreInvalid());
  ASSERT_FALSE(analysisCopy.dataPoints().empty());
  EXPECT_NE(analysisCopy.problem().uuid(),analysisCopy.dataPoints()[0].problem().uuid());
  test = analysisCopy.clearDirtyFlag();
  EXPECT_FALSE(test);

  // remove data points, save again, make sure dataPointsAreInvalid flag a-okay, new problem
  // still there and ok, old problem is going to hang around because didn't get rid of it when
  // analysisRecord still pointing to it, clean signal filters all the way down
  analysisCopy.removeAllDataPoints();
  {
    bool transactionStarted = database.startTransaction();
    EXPECT_TRUE(transactionStarted);
    database.unloadUnusedCleanRecords(); // ETH@20130201 - Having to call this is awkward.
                                         // Where is the best place?
    AnalysisRecord analysisRecord(analysisCopy,database);
    database.save();
    test = database.commitTransaction();
    EXPECT_TRUE(test);
    ASSERT_NO_THROW(analysisRecord.problemRecord());
    ProblemRecord problemRecord = analysisRecord.problemRecord();
    EXPECT_EQ(problem2.uuid(),problemRecord.handle());
    EXPECT_EQ(problem2.versionUUID(),problemRecord.uuidLast());
    // old problem record still hanging around.
    // TODO: Have use count as child, resource, and be able to (selectively) purge.
    EXPECT_EQ(2u,ProblemRecord::getProblemRecords(database).size());
    EXPECT_TRUE(analysisRecord.dataPointRecords().empty());
    analysis = analysisRecord.analysis();
  }
  test = analysis.clearDirtyFlag();
  EXPECT_TRUE(test);
  EXPECT_FALSE(analysis.isDirty());
  EXPECT_FALSE(analysis.dataPointsAreInvalid());
  EXPECT_FALSE(analysis.resultsAreInvalid());
  EXPECT_TRUE(analysis.dataPoints().empty());
}