コード例 #1
0
ファイル: DakotaAlgorithm.cpp プロジェクト: Anto-F/OpenStudio
  boost::optional<DataPoint> DakotaAlgorithm_Impl::createNextDataPoint(
      Analysis& analysis,const DakotaParametersFile& params)
  {
    OS_ASSERT(analysis.algorithm().get() == getPublicObject<DakotaAlgorithm>());

    // TODO: Update iteration counter.
    OptionalDataPoint result = analysis.problem().createDataPoint(params,
                                                                  getPublicObject<DakotaAlgorithm>());
    if (result) {
      bool added = analysis.addDataPoint(*result);
      if (!added) {
        // get equivalent point already in analysis
        DataPointVector candidates = analysis.getDataPoints(result->variableValues());
        OS_ASSERT(candidates.size() == 1u);
        result = candidates[0];
      }
      std::stringstream ss;
      ss << name() << "_" << m_iter;
      result->addTag(ss.str());
    }
    return result;
  }
コード例 #2
0
TEST_F(AnalysisDriverFixture, DDACE_LatinHypercube_Continuous) {
  {
    // GET SIMPLE PROJECT
    SimpleProject project = getCleanSimpleProject("DDACE_LatinHypercube_Continuous");
    Analysis analysis = project.analysis();

    // SET PROBLEM
    Problem problem = retrieveProblem("Continuous",true,false);
    analysis.setProblem(problem);

    // DEFINE SEED
    Model model = model::exampleModel();
    openstudio::path p = toPath("./example.osm");
    model.save(p,true);
    FileReference seedModel(p);
    analysis.setSeed(seedModel);

    // CREATE ANALYSIS
    DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::lhs);
    DDACEAlgorithm algorithm(algOptions);
    analysis.setAlgorithm(algorithm);

    // RUN ANALYSIS
    AnalysisDriver driver = project.analysisDriver();
    AnalysisRunOptions runOptions = standardRunOptions(project.projectDir());
    CurrentAnalysis currentAnalysis = driver.run(analysis,runOptions);
    EXPECT_TRUE(driver.waitForFinished());
    boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors();
    ASSERT_TRUE(jobErrors);
    EXPECT_FALSE(jobErrors->errors().empty()); // require specification of number of samples
    EXPECT_TRUE(driver.currentAnalyses().empty());
    Table summary = currentAnalysis.analysis().summaryTable();
    EXPECT_EQ(1u,summary.nRows()); // no points

    project.clearAllResults();
    algOptions.setSamples(4);
    EXPECT_EQ(4,analysis.algorithm()->cast<DDACEAlgorithm>().ddaceAlgorithmOptions().samples());
    currentAnalysis = driver.run(analysis,runOptions);
    EXPECT_TRUE(driver.waitForFinished());
    jobErrors = currentAnalysis.dakotaJobErrors();
    ASSERT_TRUE(jobErrors);
    EXPECT_TRUE(jobErrors->errors().empty());
    EXPECT_TRUE(driver.currentAnalyses().empty());
    summary = currentAnalysis.analysis().summaryTable();
    EXPECT_EQ(5u,summary.nRows());
    summary.save(project.projectDir() / toPath("summary.csv"));

    BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) {
      EXPECT_TRUE(dataPoint.isComplete());
      EXPECT_FALSE(dataPoint.failed());
      // EXPECT_FALSE(dataPoint.responseValues().empty());
    }

    ASSERT_TRUE(analysis.algorithm());
    EXPECT_TRUE(analysis.algorithm()->isComplete());
    EXPECT_FALSE(analysis.algorithm()->failed());

    {
      AnalysisRecord analysisRecord = project.analysisRecord();
      Analysis analysisCopy = analysisRecord.analysis();
      ASSERT_TRUE(analysisCopy.algorithm());
      EXPECT_TRUE(analysisCopy.algorithm()->isComplete());
      EXPECT_FALSE(analysisCopy.algorithm()->failed());
    }
  }

  LOG(Info,"Restart from existing project.");

  // Get existing project
  SimpleProject project = getSimpleProject("DDACE_LatinHypercube_Continuous");
  EXPECT_FALSE(project.analysisIsLoaded()); // make sure starting fresh
  Analysis analysis = project.analysis();
  EXPECT_FALSE(analysis.isDirty());

  // Add custom data point
  std::vector<QVariant> values;
  values.push_back(0.0);
  values.push_back(0.8);
  values.push_back(int(0));
  OptionalDataPoint dataPoint = analysis.problem().createDataPoint(values);
  ASSERT_TRUE(dataPoint);
  analysis.addDataPoint(*dataPoint);
  EXPECT_EQ(1u,analysis.dataPointsToQueue().size());
  ASSERT_TRUE(analysis.algorithm());
  EXPECT_TRUE(analysis.algorithm()->isComplete());
  EXPECT_FALSE(analysis.algorithm()->failed());
  EXPECT_TRUE(analysis.isDirty());
  EXPECT_FALSE(analysis.resultsAreInvalid());
  EXPECT_FALSE(analysis.dataPointsAreInvalid());

  // get last modified time of a file in a completed data point to make sure nothing is re-run
  DataPointVector completePoints = analysis.completeDataPoints();
  ASSERT_FALSE(completePoints.empty());
  OptionalFileReference inputFileRef = completePoints[0].osmInputData();
  ASSERT_TRUE(inputFileRef);
  QFileInfo inputFileInfo(toQString(inputFileRef->path()));
  QDateTime inputFileModifiedTestTime = inputFileInfo.lastModified();
  EXPECT_EQ(1u,analysis.dataPointsToQueue().size());

  AnalysisDriver driver = project.analysisDriver();
  CurrentAnalysis currentAnalysis = driver.run(
        analysis,
        standardRunOptions(project.projectDir()));
  EXPECT_TRUE(driver.waitForFinished());
  boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors();
  EXPECT_FALSE(jobErrors); // should not try to re-run DakotaAlgorithm
  EXPECT_TRUE(driver.currentAnalyses().empty());
  EXPECT_TRUE(analysis.dataPointsToQueue().empty());
  Table summary = currentAnalysis.analysis().summaryTable();
  EXPECT_EQ(6u,summary.nRows());
  summary.save(project.projectDir() / toPath("summary_post_restart.csv"));
  // RunManager should not re-run any data points
  EXPECT_EQ(inputFileModifiedTestTime,inputFileInfo.lastModified());
}
コード例 #3
0
TEST_F(AnalysisDriverFixture,DataPersistence_DataPointErrors) {
  {
    // Create and populate project
    SimpleProject project = getCleanSimpleProject("DataPersistence_DataPointErrors");
    Analysis analysis = project.analysis();
    Problem problem = retrieveProblem(AnalysisDriverFixtureProblem::BuggyBCLMeasure,
                                      true,
                                      false);
    EXPECT_EQ(5u,problem.workflow().size());
    analysis.setProblem(problem);
    model::Model model =fastExampleModel();
    openstudio::path p = toPath("./example.osm");
    model.save(p,true);
    FileReference seedModel(p);
    project.setSeed(seedModel);
    DataPoint dataPoint = problem.createDataPoint(std::vector<QVariant>(problem.numVariables(),0)).get();
    analysis.addDataPoint(dataPoint);

    // Run analysis
    AnalysisRunOptions runOptions = standardRunOptions(project.projectDir());
    project.analysisDriver().run(analysis,runOptions);
    project.analysisDriver().waitForFinished();

    // Check DataPoint job and error information
    ASSERT_EQ(1u,analysis.dataPoints().size());
    dataPoint = analysis.dataPoints()[0];
    EXPECT_TRUE(dataPoint.isComplete());
    EXPECT_TRUE(dataPoint.failed());
    EXPECT_TRUE(dataPoint.topLevelJob());
    WorkflowStepJobVector jobResults = problem.getJobsByWorkflowStep(dataPoint);
    EXPECT_EQ(problem.workflow().size(),jobResults.size());
    ASSERT_EQ(5u,jobResults.size());

    WorkflowStepJob jobResult = jobResults[0];
    ASSERT_TRUE(jobResult.job);
    EXPECT_TRUE(jobResult.measure);
    Job job = jobResult.job.get();
    ASSERT_TRUE(jobResult.mergedJobIndex);
    EXPECT_EQ(0u,jobResult.mergedJobIndex.get());
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate());
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());
    JobErrors treeErrors = job.treeErrors(); // get all tree errors now, test later
    JobErrors errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::NA),errors.result);
    EXPECT_TRUE(errors.succeeded());
    EXPECT_TRUE(errors.errors().empty());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_FALSE(errors.infos().empty());

    jobResult = jobResults[1];
    ASSERT_TRUE(jobResult.job);
    EXPECT_FALSE(jobResult.measure);
    ASSERT_TRUE(jobResult.step.isWorkItem());
    EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType());
    job = jobResult.job.get();
    ASSERT_TRUE(jobResult.mergedJobIndex);
    EXPECT_EQ(1u,jobResult.mergedJobIndex.get());
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate());
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Success),errors.result);
    EXPECT_TRUE(errors.succeeded());
    EXPECT_TRUE(errors.errors().empty());
    EXPECT_FALSE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    jobResult = jobResults[2];
    ASSERT_TRUE(jobResult.job);
    EXPECT_TRUE(jobResult.measure);
    job = jobResult.job.get();
    ASSERT_TRUE(jobResult.mergedJobIndex);
    EXPECT_EQ(2u,jobResult.mergedJobIndex.get());
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate());
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
    EXPECT_FALSE(errors.succeeded());
    EXPECT_FALSE(errors.errors().empty());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    jobResult = jobResults[3];
    ASSERT_TRUE(jobResult.job);
    EXPECT_FALSE(jobResult.measure);
    ASSERT_TRUE(jobResult.step.isWorkItem());
    EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType());
    job = jobResult.job.get();
    ASSERT_TRUE(jobResult.mergedJobIndex);
    EXPECT_EQ(3u,jobResult.mergedJobIndex.get());
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate()); // now all four scripts are in same job
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());    // now all four scripts are in same job
    errors = jobResult.errors().get();
    // this script not actually run, so result in default state
    EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
    EXPECT_FALSE(errors.succeeded());
    EXPECT_EQ(1u, errors.errors().size());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    jobResult = jobResults[4];
    ASSERT_TRUE(jobResult.job);
    EXPECT_FALSE(jobResult.measure);
    ASSERT_TRUE(jobResult.step.isWorkItem());
    EXPECT_EQ(JobType(JobType::ModelToIdf),jobResult.step.workItemType());
    job = jobResult.job.get();
    EXPECT_FALSE(jobResult.mergedJobIndex);
    EXPECT_TRUE(job.outOfDate()); // never run
    EXPECT_FALSE(job.canceled());
    EXPECT_FALSE(job.lastRun());
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
    EXPECT_FALSE(errors.succeeded());
    EXPECT_TRUE(errors.errors().empty());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    EXPECT_EQ(OSResultValue(OSResultValue::Fail),treeErrors.result);
    EXPECT_FALSE(treeErrors.succeeded());
    EXPECT_FALSE(treeErrors.errors().empty());
    EXPECT_FALSE(treeErrors.warnings().empty());
    EXPECT_FALSE(treeErrors.infos().empty());
  }

  {
    // Re-open project
    SimpleProject project = getSimpleProject("DataPersistence_DataPointErrors");
    Analysis analysis = project.analysis();
    Problem problem = analysis.problem();

    // Verify job and error information still there
    // Check DataPoint job and error information
    ASSERT_EQ(1u,analysis.dataPoints().size());
    DataPoint dataPoint = analysis.dataPoints()[0];
    EXPECT_TRUE(dataPoint.isComplete());
    EXPECT_TRUE(dataPoint.failed());
    EXPECT_TRUE(dataPoint.topLevelJob());
    WorkflowStepJobVector jobResults = problem.getJobsByWorkflowStep(dataPoint);
    EXPECT_EQ(problem.workflow().size(),jobResults.size());
    ASSERT_EQ(5u,jobResults.size());

    WorkflowStepJob jobResult = jobResults[0];
    ASSERT_TRUE(jobResult.job);
    EXPECT_TRUE(jobResult.measure);
    Job job = jobResult.job.get();
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate());
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());
    JobErrors treeErrors = job.treeErrors(); // get all tree errors now, test later
    JobErrors errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::NA),errors.result);
    EXPECT_TRUE(errors.succeeded());
    EXPECT_TRUE(errors.errors().empty());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_FALSE(errors.infos().empty());

    jobResult = jobResults[1];
    ASSERT_TRUE(jobResult.job);
    EXPECT_FALSE(jobResult.measure);
    ASSERT_TRUE(jobResult.step.isWorkItem());
    EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType());
    job = jobResult.job.get();
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate());
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Success),errors.result);
    EXPECT_TRUE(errors.succeeded());
    EXPECT_TRUE(errors.errors().empty());
    EXPECT_FALSE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    jobResult = jobResults[2];
    ASSERT_TRUE(jobResult.job);
    EXPECT_TRUE(jobResult.measure);
    job = jobResult.job.get();
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate());
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
    EXPECT_FALSE(errors.succeeded());
    EXPECT_FALSE(errors.errors().empty());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    jobResult = jobResults[3];
    ASSERT_TRUE(jobResult.job);
    EXPECT_FALSE(jobResult.measure);
    ASSERT_TRUE(jobResult.step.isWorkItem());
    EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType());
    job = jobResult.job.get();
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate()); // now all four scripts are in same job
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());    // now all four scripts are in same job
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
    EXPECT_FALSE(errors.succeeded());
    EXPECT_EQ(1u, errors.errors().size());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    jobResult = jobResults[4];
    ASSERT_TRUE(jobResult.job);
    EXPECT_FALSE(jobResult.measure);
    ASSERT_TRUE(jobResult.step.isWorkItem());
    EXPECT_EQ(JobType(JobType::ModelToIdf),jobResult.step.workItemType());
    job = jobResult.job.get();
    EXPECT_TRUE(job.outOfDate()); // never run
    EXPECT_FALSE(job.canceled());
    EXPECT_FALSE(job.lastRun());
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
    EXPECT_FALSE(errors.succeeded());
    EXPECT_TRUE(errors.errors().empty());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    EXPECT_EQ(OSResultValue(OSResultValue::Fail),treeErrors.result);
    EXPECT_FALSE(treeErrors.succeeded());
    EXPECT_FALSE(treeErrors.errors().empty());
    EXPECT_FALSE(treeErrors.warnings().empty());
    EXPECT_FALSE(treeErrors.infos().empty());
  }

}
コード例 #4
0
  int DesignOfExperiments_Impl::createNextIteration(Analysis& analysis) {
    int result(0);

    // to make sure problem type check has already occurred. this is stated usage in header.
    OS_ASSERT(analysis.algorithm().get() == getPublicObject<DesignOfExperiments>());
    // nothing else is supported yet
    DesignOfExperimentsOptions options = designOfExperimentsOptions();
    OS_ASSERT(options.designType() == DesignOfExperimentsType::FullFactorial);

    if (isComplete()) {
      LOG(Info,"Algorithm is already marked as complete. Returning without creating new points.");
      return result;
    }

    if (options.maxIter() && options.maxIter().get() < 1) {
      LOG(Info,"Maximum iterations set to less than one. No DataPoints will be added to Analysis '"
          << analysis.name() << "', and the Algorithm will be marked complete.");
      markComplete();
      return result;
    }

    OptionalInt mxSim = options.maxSims();
    DataPointVector dataPoints = analysis.getDataPoints("DOE");
    int totPoints = dataPoints.size();
    if (mxSim && (totPoints >= *mxSim)) {
      LOG(Info,"Analysis '" << analysis.name() << "' already contains " << totPoints
          << " DataPoints added by the DesignOfExperiments algorithm, which meets or exceeds the "
          << "maximum number specified in this algorithm's options object, " << *mxSim << ". "
          << "No data points will be added and the Algorithm will be marked complete.");
      markComplete();
      return result;
    } 

    m_iter = 1;

    // determine all combinations
    std::vector< std::vector<QVariant> > variableValues;
    for (const Variable& variable : analysis.problem().variables()) {
      // variable must be DiscreteVariable, otherwise !isCompatibleProblemType(analysis.problem())
      DiscreteVariable discreteVariable = variable.cast<DiscreteVariable>();
      IntVector dvValues = discreteVariable.validValues(true);
      std::vector< std::vector<QVariant> > currentValues = variableValues;
      for (IntVector::const_iterator it = dvValues.begin(), itEnd = dvValues.end();
           it != itEnd; ++it)
      {
        std::vector< std::vector<QVariant> > nextSet = currentValues;
        if (currentValues.empty()) {
          variableValues.push_back(std::vector<QVariant>(1u,QVariant(*it)));
        }
        else {
          for (std::vector<QVariant>& point : nextSet) {
            point.push_back(QVariant(*it));
          }
          if (it == dvValues.begin()) {
            variableValues = nextSet;
          }
          else {
            variableValues.insert(variableValues.end(),nextSet.begin(),nextSet.end());
          }
        }
      }
    }

    // create data points and add to analysis
    for (const std::vector<QVariant>& value : variableValues) {
      DataPoint dataPoint = analysis.problem().createDataPoint(value).get();
      dataPoint.addTag("DOE");
      bool added = analysis.addDataPoint(dataPoint);
      if (added) {
        ++result;
        ++totPoints;
        if (mxSim && (totPoints == mxSim.get())) {
          break;
        }
      }
    }

    if (result == 0) {
      LOG(Trace,"No new points were added, so marking this DesignOfExperiments complete.");
      markComplete();
    }

    return result;
  }