コード例 #1
0
TEST_F(AnalysisDriverFixture,SimpleProject_EditProblemWithTwoWorkflows) {
  // Written to reproduce Bug 1189
  {
    // Create PAT project with an IDF measure
    SimpleProject project = getCleanPATProject("SimpleProject_EditProblemWithTwoWorkflows");

    Problem problem = project.analysis().problem();
    OptionalInt index = problem.getWorkflowStepIndexByJobType(JobType::EnergyPlusPreProcess);
    ASSERT_TRUE(index);
    MeasureGroup dv("Idf Measure",MeasureVector(1u,NullMeasure()));
    problem.insert(*index,dv);
    BOOST_FOREACH(const BCLMeasure& measure,BCLMeasure::patApplicationMeasures()) {
      if (measure.inputFileType() == FileReferenceType::IDF) {
        RubyMeasure measure(measure);
        dv.push(measure);
        break;
      }
    }

    project.save();

    ProjectDatabase database = project.projectDatabase();
    RunManager runManager = project.runManager();
    WorkflowRecordVector workflowRecords = WorkflowRecord::getWorkflowRecords(database);
    EXPECT_EQ(2u,workflowRecords.size());
    BOOST_FOREACH(const WorkflowRecord& wr,workflowRecords) {
      EXPECT_NO_THROW(runManager.loadWorkflow(wr.runManagerWorkflowKey()));
    }
  }
コード例 #2
0
TEST_F(AnalysisDriverFixture,PATProject_CreateOpenAndSaveAs) {
  openstudio::path projectDir;
  {
    // Create a PAT project
    SimpleProject patProject = getCleanPATProject("PATProject_CreateOpenAndSaveAs");

    // Populate it
    patProject.analysis().setName("PAT Project: Create, Open, and Save As");

    Model model = model::exampleModel();
    openstudio::path p = toPath("./example.osm");
    model.save(p,true);
    FileReference seedModel(p);
    patProject.setSeed(seedModel);

    // (feature not in PAT, but not incompatible)
    DesignOfExperimentsOptions algOptions(DesignOfExperimentsType::FullFactorial);
    DesignOfExperiments algorithm(algOptions);
    patProject.analysis().setAlgorithm(algorithm);

    // Save it
    patProject.save();

    projectDir = patProject.projectDir();
  }

  // Open Project
  OptionalSimpleProject project = openPATProject(projectDir);
  ASSERT_TRUE(project);
  EXPECT_TRUE(project->isPATProject());

  // Save Project As
  openstudio::path newProjectDir = toPath("AnalysisDriverFixtureData") / toPath("PATProjectCreateOpenAndSaveAsCopy");
  boost::filesystem::remove_all(newProjectDir);
  ASSERT_TRUE(project->saveAs(newProjectDir));
  OptionalSimpleProject projectCopy = openPATProject(newProjectDir);
  ASSERT_TRUE(projectCopy);
  EXPECT_TRUE(project->isPATProject());
  EXPECT_TRUE(project->analysis().algorithm());
}
コード例 #3
0
TEST_F(AnalysisDriverFixture, DesignOfExperiments_MeshAnalysis) {

  openstudio::path rubyLibDirPath = openstudio::toPath(rubyLibDir());

  // GET SIMPLE PROJECT
  SimpleProject project = getCleanSimpleProject("DesignOfExperiments_MeshAnalysis");
  Analysis analysis = project.analysis();

  // SET PROBLEM
  Problem problem = retrieveProblem("MixedOsmIdf",false,false);
  analysis.setProblem(problem);

  // SET SEED
  Model model = model::exampleModel();
  openstudio::path p = toPath("./example.osm");
  model.save(p,true);
  FileReference seedModel(p);
  analysis.setSeed(seedModel);

  // SET ALGORITHM
  DesignOfExperimentsOptions algOptions(DesignOfExperimentsType::FullFactorial);
  DesignOfExperiments algorithm(algOptions);
  analysis.setAlgorithm(algorithm);

  // RUN ANALYSIS
  AnalysisDriver driver = project.analysisDriver();
  AnalysisRunOptions runOptions = standardRunOptions(project.projectDir());
  driver.run(analysis,runOptions);
  EXPECT_TRUE(driver.waitForFinished());

  // CHECK RESULTS
  AnalysisRecord analysisRecord = project.analysisRecord();
  EXPECT_EQ(4,analysisRecord.problemRecord().combinatorialSize(true).get());
  EXPECT_EQ(4u, analysisRecord.dataPointRecords().size());
  BOOST_FOREACH(const DataPointRecord& dataPointRecord, analysisRecord.dataPointRecords()) {
    EXPECT_TRUE(dataPointRecord.isComplete());
    EXPECT_FALSE(dataPointRecord.failed());
  }

  // get data points by perturbations and vice versa
  std::vector<DataPointRecord> testDataPoints;
  std::vector<QVariant> testVariableValues;

  // all data points are successful
  testDataPoints = analysisRecord.successfulDataPointRecords();
  EXPECT_EQ(4u,testDataPoints.size());

  // empty variableValues returns all data points
  testDataPoints = analysisRecord.getDataPointRecords(testVariableValues);
  EXPECT_EQ(4u, testDataPoints.size());

  // find the baseline
  testVariableValues.clear();
  testVariableValues.push_back(0);
  testVariableValues.push_back(QVariant(QVariant::Int)); // only one perturbation, null works too
  testVariableValues.push_back(0);
  ASSERT_TRUE(testVariableValues[1].isNull());
  testDataPoints = analysisRecord.getDataPointRecords(testVariableValues);
  ASSERT_EQ(1u, testDataPoints.size());

  // find model with improved wall and roof
  testVariableValues.clear();
  testVariableValues.push_back(1);
  testVariableValues.push_back(0);
  testVariableValues.push_back(1);
  testDataPoints = analysisRecord.getDataPointRecords(testVariableValues);
  ASSERT_EQ(1u, testDataPoints.size());
  DataPoint testDataPoint = testDataPoints[0].dataPoint();
  std::vector<OptionalDiscretePerturbation> perturbations =
      analysis.problem().getDiscretePerturbations(testVariableValues);
  ASSERT_EQ(3u,perturbations.size());
  ASSERT_TRUE(perturbations[0] && perturbations[1] && perturbations[2]);
  EXPECT_TRUE(perturbations[0]->uuid() == problem.variables()[0].cast<DiscreteVariable>().perturbations(false)[1].uuid());
  EXPECT_TRUE(perturbations[1]->uuid() == problem.variables()[1].cast<DiscreteVariable>().perturbations(false)[0].uuid());
  EXPECT_TRUE(perturbations[2]->uuid() == problem.variables()[2].cast<DiscreteVariable>().perturbations(false)[1].uuid());
  EXPECT_TRUE(perturbations[0]->optionalCast<RubyPerturbation>());
  EXPECT_TRUE(perturbations[1]->optionalCast<RubyPerturbation>());
  EXPECT_TRUE(perturbations[2]->optionalCast<RubyPerturbation>());

  // find models with improved wall
  testVariableValues.clear();
  testVariableValues.push_back(1);
  testDataPoints = analysisRecord.getDataPointRecords(testVariableValues);
  ASSERT_EQ(2u, testDataPoints.size());

  // infeasible
  testVariableValues.clear();
  testVariableValues.push_back(0);
  testVariableValues.push_back(0);
  testVariableValues.push_back(0);
  testVariableValues.push_back(0);
  testDataPoints = analysisRecord.getDataPointRecords(testVariableValues);
  ASSERT_EQ(0u, testDataPoints.size());
}
コード例 #4
0
TEST_F(AnalysisDriverFixture, DDACE_LatinHypercube_MixedOsmIdf_MoveProjectDatabase) {
  openstudio::path oldDir, newDir;
  {
    // GET SIMPLE PROJECT
    SimpleProject project = getCleanSimpleProject("DDACE_LatinHypercube_MixedOsmIdf");
    Analysis analysis = project.analysis();
    analysis.setName("DDACE Latin Hypercube Sampling - MixedOsmIdf");

    // SET PROBLEM
    Problem problem = retrieveProblem("MixedOsmIdf",false,false);
    analysis.setProblem(problem);

    // SET SEED
    Model model = model::exampleModel();
    openstudio::path p = toPath("./example.osm");
    model.save(p,true);
    FileReference seedModel(p);
    analysis.setSeed(seedModel);

    // SET ALGORITHM
    DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::lhs);
    algOptions.setSamples(12); // test reprinting results.out for copies of same point
    DDACEAlgorithm algorithm(algOptions);
    analysis.setAlgorithm(algorithm);

    // RUN ANALYSIS
    AnalysisDriver driver = project.analysisDriver();
    AnalysisRunOptions runOptions = standardRunOptions(project.projectDir());
    CurrentAnalysis currentAnalysis = driver.run(analysis,runOptions);
    EXPECT_TRUE(driver.waitForFinished());
    boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors();
    ASSERT_TRUE(jobErrors);
    EXPECT_TRUE(jobErrors->errors().empty());
    EXPECT_TRUE(driver.currentAnalyses().empty());
    Table summary = analysis.summaryTable();
    EXPECT_EQ(5u,summary.nRows()); // 4 points (all combinations)
    summary.save(project.projectDir() / toPath("summary.csv"));

    EXPECT_EQ(4u,analysis.dataPoints().size());
    BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) {
      EXPECT_TRUE(dataPoint.isComplete());
      EXPECT_FALSE(dataPoint.failed());
      EXPECT_TRUE(dataPoint.workspace()); // should be able to load data from disk
    }

    oldDir = project.projectDir();
    newDir = project.projectDir().parent_path() / toPath("DDACELatinHypercubeMixedOsmIdfCopy");
    // Make copy of project
    boost::filesystem::remove_all(newDir);
    ASSERT_TRUE(project.saveAs(newDir));
  }
  // Blow away old project.
  // TODO: Reinstate. This was failing on Windows and isn't absolutely necessary.
//     try {
//       boost::filesystem::remove_all(oldDir);
//     }
//     catch (std::exception& e) {
//       EXPECT_TRUE(false) << "Boost filesystem was unable to delete the old folder, because " << e.what();
//     }

  // Open new project
  SimpleProject project = getSimpleProject("DDACE_LatinHypercube_MixedOsmIdf_Copy");
  EXPECT_TRUE(project.projectDir() == newDir);
  EXPECT_EQ(toString(newDir),toString(project.projectDir()));

  // After move, should be able to retrieve results.
  EXPECT_FALSE(project.analysisIsLoaded());
  Analysis analysis = project.analysis();
  EXPECT_TRUE(project.analysisIsLoaded());
  EXPECT_EQ(4u,analysis.dataPoints().size());
  BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) {
    EXPECT_TRUE(dataPoint.isComplete());
    EXPECT_FALSE(dataPoint.failed());
    LOG(Debug,"Attempting to load workspace for data point at '" << dataPoint.directory() << "'.");
    if (dataPoint.idfInputData()) {
      LOG(Debug,"Says there should be input data at " << toString(dataPoint.idfInputData()->path()));
    }
    EXPECT_TRUE(dataPoint.workspace()); // should be able to load data from disk
    if (!dataPoint.workspace()) {
      LOG(Debug,"Unsuccessful.")
    }
  }

  // Should be able to blow away results and run again
  project.removeAllDataPoints();
  EXPECT_EQ(0u,analysis.dataPoints().size());
  EXPECT_FALSE(analysis.algorithm()->isComplete());
  EXPECT_FALSE(analysis.algorithm()->failed());
  EXPECT_EQ(-1,analysis.algorithm()->iter());
  EXPECT_FALSE(analysis.algorithm()->cast<DakotaAlgorithm>().restartFileReference());
  EXPECT_FALSE(analysis.algorithm()->cast<DakotaAlgorithm>().outFileReference());
  AnalysisRunOptions runOptions = standardRunOptions(project.projectDir());
  AnalysisDriver driver = project.analysisDriver();
  CurrentAnalysis currentAnalysis = driver.run(analysis,runOptions);
  EXPECT_TRUE(driver.waitForFinished());
  boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors();
  ASSERT_TRUE(jobErrors);
  EXPECT_TRUE(jobErrors->errors().empty());
  EXPECT_TRUE(driver.currentAnalyses().empty());
  Table summary = analysis.summaryTable();
  EXPECT_EQ(5u,summary.nRows()); // 4 points (all combinations)
  summary.save(project.projectDir() / toPath("summary.csv"));

  BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) {
    EXPECT_TRUE(dataPoint.isComplete());
    EXPECT_FALSE(dataPoint.failed());
    EXPECT_TRUE(dataPoint.workspace()); // should be able to load data from disk
  }
}
コード例 #5
0
TEST_F(AnalysisDriverFixture, DDACE_LatinHypercube_Continuous) {
  {
    // GET SIMPLE PROJECT
    SimpleProject project = getCleanSimpleProject("DDACE_LatinHypercube_Continuous");
    Analysis analysis = project.analysis();

    // SET PROBLEM
    Problem problem = retrieveProblem("Continuous",true,false);
    analysis.setProblem(problem);

    // DEFINE SEED
    Model model = model::exampleModel();
    openstudio::path p = toPath("./example.osm");
    model.save(p,true);
    FileReference seedModel(p);
    analysis.setSeed(seedModel);

    // CREATE ANALYSIS
    DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::lhs);
    DDACEAlgorithm algorithm(algOptions);
    analysis.setAlgorithm(algorithm);

    // RUN ANALYSIS
    AnalysisDriver driver = project.analysisDriver();
    AnalysisRunOptions runOptions = standardRunOptions(project.projectDir());
    CurrentAnalysis currentAnalysis = driver.run(analysis,runOptions);
    EXPECT_TRUE(driver.waitForFinished());
    boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors();
    ASSERT_TRUE(jobErrors);
    EXPECT_FALSE(jobErrors->errors().empty()); // require specification of number of samples
    EXPECT_TRUE(driver.currentAnalyses().empty());
    Table summary = currentAnalysis.analysis().summaryTable();
    EXPECT_EQ(1u,summary.nRows()); // no points

    project.clearAllResults();
    algOptions.setSamples(4);
    EXPECT_EQ(4,analysis.algorithm()->cast<DDACEAlgorithm>().ddaceAlgorithmOptions().samples());
    currentAnalysis = driver.run(analysis,runOptions);
    EXPECT_TRUE(driver.waitForFinished());
    jobErrors = currentAnalysis.dakotaJobErrors();
    ASSERT_TRUE(jobErrors);
    EXPECT_TRUE(jobErrors->errors().empty());
    EXPECT_TRUE(driver.currentAnalyses().empty());
    summary = currentAnalysis.analysis().summaryTable();
    EXPECT_EQ(5u,summary.nRows());
    summary.save(project.projectDir() / toPath("summary.csv"));

    BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) {
      EXPECT_TRUE(dataPoint.isComplete());
      EXPECT_FALSE(dataPoint.failed());
      // EXPECT_FALSE(dataPoint.responseValues().empty());
    }

    ASSERT_TRUE(analysis.algorithm());
    EXPECT_TRUE(analysis.algorithm()->isComplete());
    EXPECT_FALSE(analysis.algorithm()->failed());

    {
      AnalysisRecord analysisRecord = project.analysisRecord();
      Analysis analysisCopy = analysisRecord.analysis();
      ASSERT_TRUE(analysisCopy.algorithm());
      EXPECT_TRUE(analysisCopy.algorithm()->isComplete());
      EXPECT_FALSE(analysisCopy.algorithm()->failed());
    }
  }

  LOG(Info,"Restart from existing project.");

  // Get existing project
  SimpleProject project = getSimpleProject("DDACE_LatinHypercube_Continuous");
  EXPECT_FALSE(project.analysisIsLoaded()); // make sure starting fresh
  Analysis analysis = project.analysis();
  EXPECT_FALSE(analysis.isDirty());

  // Add custom data point
  std::vector<QVariant> values;
  values.push_back(0.0);
  values.push_back(0.8);
  values.push_back(int(0));
  OptionalDataPoint dataPoint = analysis.problem().createDataPoint(values);
  ASSERT_TRUE(dataPoint);
  analysis.addDataPoint(*dataPoint);
  EXPECT_EQ(1u,analysis.dataPointsToQueue().size());
  ASSERT_TRUE(analysis.algorithm());
  EXPECT_TRUE(analysis.algorithm()->isComplete());
  EXPECT_FALSE(analysis.algorithm()->failed());
  EXPECT_TRUE(analysis.isDirty());
  EXPECT_FALSE(analysis.resultsAreInvalid());
  EXPECT_FALSE(analysis.dataPointsAreInvalid());

  // get last modified time of a file in a completed data point to make sure nothing is re-run
  DataPointVector completePoints = analysis.completeDataPoints();
  ASSERT_FALSE(completePoints.empty());
  OptionalFileReference inputFileRef = completePoints[0].osmInputData();
  ASSERT_TRUE(inputFileRef);
  QFileInfo inputFileInfo(toQString(inputFileRef->path()));
  QDateTime inputFileModifiedTestTime = inputFileInfo.lastModified();
  EXPECT_EQ(1u,analysis.dataPointsToQueue().size());

  AnalysisDriver driver = project.analysisDriver();
  CurrentAnalysis currentAnalysis = driver.run(
        analysis,
        standardRunOptions(project.projectDir()));
  EXPECT_TRUE(driver.waitForFinished());
  boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors();
  EXPECT_FALSE(jobErrors); // should not try to re-run DakotaAlgorithm
  EXPECT_TRUE(driver.currentAnalyses().empty());
  EXPECT_TRUE(analysis.dataPointsToQueue().empty());
  Table summary = currentAnalysis.analysis().summaryTable();
  EXPECT_EQ(6u,summary.nRows());
  summary.save(project.projectDir() / toPath("summary_post_restart.csv"));
  // RunManager should not re-run any data points
  EXPECT_EQ(inputFileModifiedTestTime,inputFileInfo.lastModified());
}
コード例 #6
0
TEST_F(AnalysisDriverFixture,SimpleProject_NonPATToPATProject) {
  {
    // create project with one discrete variable
    SimpleProject project = getCleanSimpleProject("SimpleProject_NonPATToPATProject");
    openstudio::path measuresDir = resourcesPath() / toPath("/utilities/BCL/Measures");
    openstudio::path dir = measuresDir / toPath("SetWindowToWallRatioByFacade");
    BCLMeasure measure = BCLMeasure::load(dir).get();
    RubyMeasure rmeasure(measure);
    project.analysis().problem().push(MeasureGroup("My Variable",MeasureVector(1u,rmeasure)));
    DataPoint baseline = project.baselineDataPoint();
    EXPECT_EQ(1u,baseline.variableValues().size());
    EXPECT_TRUE(project.analysis().isDirty());
    EXPECT_EQ(1u,project.analysis().dataPoints().size());
    project.save();
  }

  {
    SimpleProject project = getPATProject("SimpleProject_NonPATToPATProject");
    DataPoint baseline = project.baselineDataPoint();
    EXPECT_EQ(2u,baseline.variableValues().size());
    EXPECT_FALSE(project.analysis().isDirty()); // openPATProject calls save in this case
    EXPECT_EQ(1u,project.analysis().dataPoints().size());
    project.save();
  }

  {
    SimpleProject project = getPATProject("SimpleProject_NonPATToPATProject");
    DataPoint baseline = project.baselineDataPoint();
    EXPECT_EQ(2u,baseline.variableValues().size());
    EXPECT_FALSE(project.analysis().isDirty()); // nothing to change in this case
    EXPECT_EQ(1u,project.analysis().dataPoints().size());
  }
}
コード例 #7
0
TEST_F(AnalysisDriverFixture,SimpleProject_UpdateMeasure) {
  // create a new project
  SimpleProject project = getCleanSimpleProject("SimpleProject_UpdateMeasure");
  Problem problem = project.analysis().problem();

  // insert a measure into the project, extract and register its arguments
  openstudio::path measuresDir = resourcesPath() / toPath("/utilities/BCL/Measures");
  openstudio::path dir = measuresDir / toPath("SetWindowToWallRatioByFacade");
  BCLMeasure measure = BCLMeasure::load(dir).get();
  BCLMeasure projectMeasure = project.insertMeasure(measure);
  OSArgumentVector args = argumentGetter->getArguments(projectMeasure,
                                                       project.seedModel(),
                                                       project.seedIdf());
  project.registerArguments(projectMeasure,args);
  EXPECT_EQ(1u,project.measures().size());

  // use the measure to create a new variable/ruby measure
  MeasureGroup dv("New Measure Group",MeasureVector());
  EXPECT_TRUE(problem.push(dv));
  RubyMeasure rp(projectMeasure);
  rp.setArguments(args);
  EXPECT_TRUE(dv.push(rp));
  EXPECT_EQ(args.size(),rp.arguments().size());
  EXPECT_TRUE(rp.hasIncompleteArguments());
  BOOST_FOREACH(const OSArgument& arg,args) {
    if (arg.name() == "wwr") {
      OSArgument temp = arg.clone();
      temp.setValue(0.6);
      rp.setArgument(temp);
    }
    if (arg.name() == "sillHeight") {
      OSArgument temp = arg.clone();
      temp.setValue(1.0);
      rp.setArgument(temp);
    }
    if (arg.name() == "facade") {
      OSArgument temp = arg.clone();
      temp.setValue("South");
      rp.setArgument(temp);
    }
  }
  EXPECT_FALSE(rp.hasIncompleteArguments());

  openstudio::path tempDir = measuresDir / toPath(toString(createUUID()));
  {
    // create fake new version of the measure
    BCLMeasure newVersion = measure.clone(tempDir).get();
    newVersion.incrementVersionId();
    newVersion.save();
    OSArgumentVector newArgs = args;
    newArgs.push_back(OSArgument::makeDoubleArgument("frame_width"));

    // update the measure
    project.updateMeasure(newVersion,newArgs);

    // verify the final state of SimpleProject and RubyMeasure
    EXPECT_EQ(1u,project.measures().size());
    BCLMeasure retrievedMeasure = project.getMeasureByUUID(measure.uuid()).get();
    EXPECT_NE(measure.versionUUID(),retrievedMeasure.versionUUID());
    EXPECT_EQ(newVersion.versionUUID(),retrievedMeasure.versionUUID());
    ASSERT_TRUE(project.hasStoredArguments(retrievedMeasure));
    OSArgumentVector retrievedArgs = project.getStoredArguments(retrievedMeasure);
    EXPECT_EQ(args.size() + 1u,retrievedArgs.size());

    EXPECT_EQ(retrievedArgs.size(),rp.arguments().size());
    EXPECT_TRUE(rp.hasIncompleteArguments());
  }
  boost::filesystem::remove_all(tempDir);
}
コード例 #8
0
TEST_F(AnalysisDriverFixture,SimpleProject_InsertMeasure) {
  // create a new project
  SimpleProject project = getCleanSimpleProject("SimpleProject_InsertMeasure");

  // open a measure
  openstudio::path dir = resourcesPath() / toPath("/utilities/BCL/Measures/SetWindowToWallRatioByFacade");
  BCLMeasure measure = BCLMeasure::load(dir).get();

  // insert it into the project
  BCLMeasure projectMeasure = project.insertMeasure(measure);

  // verify that there is a project-specific copy
  EXPECT_NE(toString(measure.directory()),toString(projectMeasure.directory()));
  openstudio::path scriptsDir = project.projectDir() / toPath("scripts");
  EXPECT_EQ(toString(completeAndNormalize(scriptsDir)),
            toString(completeAndNormalize(projectMeasure.directory().parent_path())));
  EXPECT_EQ(measure.uuid(),projectMeasure.uuid());
  EXPECT_EQ(measure.versionUUID(),projectMeasure.versionUUID()); // DLM: should this be not equal?

  // verify that it can be retrieved from the project, but its arguments cannot
  ASSERT_TRUE(project.getMeasureByUUID(measure.uuid()));
  BCLMeasure temp = project.getMeasureByUUID(measure.uuid()).get();
  EXPECT_EQ(toString(projectMeasure.directory()),toString(temp.directory()));
  EXPECT_EQ(projectMeasure.uuid(),temp.uuid());
  EXPECT_EQ(projectMeasure.versionUUID(),temp.versionUUID());
  EXPECT_EQ(1u,project.measures().size());

  // use embedded Ruby to extract the arguments
  // (see AnalysisDriverFixture.cpp for interpreter set-up)
  OSArgumentVector args = argumentGetter->getArguments(projectMeasure,
                                                       project.seedModel(),
                                                       project.seedIdf());
  EXPECT_FALSE(args.empty());

  // register and subsequently retrieve those arguments
  project.registerArguments(measure,args); // doesn't matter which copy of the measure we use
  EXPECT_TRUE(project.hasStoredArguments(temp));
  OSArgumentVector argsCopy = project.getStoredArguments(projectMeasure);
  ASSERT_EQ(args.size(),argsCopy.size());
  for (int i = 0, n = args.size(); i < n; ++i) {
    EXPECT_EQ(args[i].name(),argsCopy[i].name());
    EXPECT_EQ(args[i].type(),argsCopy[i].type());
  }

  // use this measure to create a new variable/ruby measure
  // now it is important to use the copy in the project
  MeasureGroup dv("New Measure Group",MeasureVector());
  Problem problem = project.analysis().problem();
  problem.push(dv);
  // here, expect this test to pass.
  // in pat, if it fails, let user know and call problem.erase(dv).
  EXPECT_TRUE(problem.fileTypesAreCompatible(dv,
                                             projectMeasure.inputFileType(),
                                             projectMeasure.outputFileType()));
  EXPECT_TRUE(dv.push(RubyMeasure(projectMeasure)));
}
コード例 #9
0
TEST_F(AnalysisDriverFixture,DataPersistence_DataPointErrors) {
  {
    // Create and populate project
    SimpleProject project = getCleanSimpleProject("DataPersistence_DataPointErrors");
    Analysis analysis = project.analysis();
    Problem problem = retrieveProblem(AnalysisDriverFixtureProblem::BuggyBCLMeasure,
                                      true,
                                      false);
    EXPECT_EQ(5u,problem.workflow().size());
    analysis.setProblem(problem);
    model::Model model =fastExampleModel();
    openstudio::path p = toPath("./example.osm");
    model.save(p,true);
    FileReference seedModel(p);
    project.setSeed(seedModel);
    DataPoint dataPoint = problem.createDataPoint(std::vector<QVariant>(problem.numVariables(),0)).get();
    analysis.addDataPoint(dataPoint);

    // Run analysis
    AnalysisRunOptions runOptions = standardRunOptions(project.projectDir());
    project.analysisDriver().run(analysis,runOptions);
    project.analysisDriver().waitForFinished();

    // Check DataPoint job and error information
    ASSERT_EQ(1u,analysis.dataPoints().size());
    dataPoint = analysis.dataPoints()[0];
    EXPECT_TRUE(dataPoint.isComplete());
    EXPECT_TRUE(dataPoint.failed());
    EXPECT_TRUE(dataPoint.topLevelJob());
    WorkflowStepJobVector jobResults = problem.getJobsByWorkflowStep(dataPoint);
    EXPECT_EQ(problem.workflow().size(),jobResults.size());
    ASSERT_EQ(5u,jobResults.size());

    WorkflowStepJob jobResult = jobResults[0];
    ASSERT_TRUE(jobResult.job);
    EXPECT_TRUE(jobResult.measure);
    Job job = jobResult.job.get();
    ASSERT_TRUE(jobResult.mergedJobIndex);
    EXPECT_EQ(0u,jobResult.mergedJobIndex.get());
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate());
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());
    JobErrors treeErrors = job.treeErrors(); // get all tree errors now, test later
    JobErrors errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::NA),errors.result);
    EXPECT_TRUE(errors.succeeded());
    EXPECT_TRUE(errors.errors().empty());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_FALSE(errors.infos().empty());

    jobResult = jobResults[1];
    ASSERT_TRUE(jobResult.job);
    EXPECT_FALSE(jobResult.measure);
    ASSERT_TRUE(jobResult.step.isWorkItem());
    EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType());
    job = jobResult.job.get();
    ASSERT_TRUE(jobResult.mergedJobIndex);
    EXPECT_EQ(1u,jobResult.mergedJobIndex.get());
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate());
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Success),errors.result);
    EXPECT_TRUE(errors.succeeded());
    EXPECT_TRUE(errors.errors().empty());
    EXPECT_FALSE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    jobResult = jobResults[2];
    ASSERT_TRUE(jobResult.job);
    EXPECT_TRUE(jobResult.measure);
    job = jobResult.job.get();
    ASSERT_TRUE(jobResult.mergedJobIndex);
    EXPECT_EQ(2u,jobResult.mergedJobIndex.get());
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate());
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
    EXPECT_FALSE(errors.succeeded());
    EXPECT_FALSE(errors.errors().empty());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    jobResult = jobResults[3];
    ASSERT_TRUE(jobResult.job);
    EXPECT_FALSE(jobResult.measure);
    ASSERT_TRUE(jobResult.step.isWorkItem());
    EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType());
    job = jobResult.job.get();
    ASSERT_TRUE(jobResult.mergedJobIndex);
    EXPECT_EQ(3u,jobResult.mergedJobIndex.get());
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate()); // now all four scripts are in same job
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());    // now all four scripts are in same job
    errors = jobResult.errors().get();
    // this script not actually run, so result in default state
    EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
    EXPECT_FALSE(errors.succeeded());
    EXPECT_EQ(1u, errors.errors().size());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    jobResult = jobResults[4];
    ASSERT_TRUE(jobResult.job);
    EXPECT_FALSE(jobResult.measure);
    ASSERT_TRUE(jobResult.step.isWorkItem());
    EXPECT_EQ(JobType(JobType::ModelToIdf),jobResult.step.workItemType());
    job = jobResult.job.get();
    EXPECT_FALSE(jobResult.mergedJobIndex);
    EXPECT_TRUE(job.outOfDate()); // never run
    EXPECT_FALSE(job.canceled());
    EXPECT_FALSE(job.lastRun());
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
    EXPECT_FALSE(errors.succeeded());
    EXPECT_TRUE(errors.errors().empty());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    EXPECT_EQ(OSResultValue(OSResultValue::Fail),treeErrors.result);
    EXPECT_FALSE(treeErrors.succeeded());
    EXPECT_FALSE(treeErrors.errors().empty());
    EXPECT_FALSE(treeErrors.warnings().empty());
    EXPECT_FALSE(treeErrors.infos().empty());
  }

  {
    // Re-open project
    SimpleProject project = getSimpleProject("DataPersistence_DataPointErrors");
    Analysis analysis = project.analysis();
    Problem problem = analysis.problem();

    // Verify job and error information still there
    // Check DataPoint job and error information
    ASSERT_EQ(1u,analysis.dataPoints().size());
    DataPoint dataPoint = analysis.dataPoints()[0];
    EXPECT_TRUE(dataPoint.isComplete());
    EXPECT_TRUE(dataPoint.failed());
    EXPECT_TRUE(dataPoint.topLevelJob());
    WorkflowStepJobVector jobResults = problem.getJobsByWorkflowStep(dataPoint);
    EXPECT_EQ(problem.workflow().size(),jobResults.size());
    ASSERT_EQ(5u,jobResults.size());

    WorkflowStepJob jobResult = jobResults[0];
    ASSERT_TRUE(jobResult.job);
    EXPECT_TRUE(jobResult.measure);
    Job job = jobResult.job.get();
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate());
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());
    JobErrors treeErrors = job.treeErrors(); // get all tree errors now, test later
    JobErrors errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::NA),errors.result);
    EXPECT_TRUE(errors.succeeded());
    EXPECT_TRUE(errors.errors().empty());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_FALSE(errors.infos().empty());

    jobResult = jobResults[1];
    ASSERT_TRUE(jobResult.job);
    EXPECT_FALSE(jobResult.measure);
    ASSERT_TRUE(jobResult.step.isWorkItem());
    EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType());
    job = jobResult.job.get();
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate());
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Success),errors.result);
    EXPECT_TRUE(errors.succeeded());
    EXPECT_TRUE(errors.errors().empty());
    EXPECT_FALSE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    jobResult = jobResults[2];
    ASSERT_TRUE(jobResult.job);
    EXPECT_TRUE(jobResult.measure);
    job = jobResult.job.get();
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate());
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
    EXPECT_FALSE(errors.succeeded());
    EXPECT_FALSE(errors.errors().empty());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    jobResult = jobResults[3];
    ASSERT_TRUE(jobResult.job);
    EXPECT_FALSE(jobResult.measure);
    ASSERT_TRUE(jobResult.step.isWorkItem());
    EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType());
    job = jobResult.job.get();
    EXPECT_FALSE(job.running());
    EXPECT_FALSE(job.outOfDate()); // now all four scripts are in same job
    EXPECT_FALSE(job.canceled());
    EXPECT_TRUE(job.lastRun());    // now all four scripts are in same job
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
    EXPECT_FALSE(errors.succeeded());
    EXPECT_EQ(1u, errors.errors().size());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    jobResult = jobResults[4];
    ASSERT_TRUE(jobResult.job);
    EXPECT_FALSE(jobResult.measure);
    ASSERT_TRUE(jobResult.step.isWorkItem());
    EXPECT_EQ(JobType(JobType::ModelToIdf),jobResult.step.workItemType());
    job = jobResult.job.get();
    EXPECT_TRUE(job.outOfDate()); // never run
    EXPECT_FALSE(job.canceled());
    EXPECT_FALSE(job.lastRun());
    errors = jobResult.errors().get();
    EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
    EXPECT_FALSE(errors.succeeded());
    EXPECT_TRUE(errors.errors().empty());
    EXPECT_TRUE(errors.warnings().empty());
    EXPECT_TRUE(errors.infos().empty());

    EXPECT_EQ(OSResultValue(OSResultValue::Fail),treeErrors.result);
    EXPECT_FALSE(treeErrors.succeeded());
    EXPECT_FALSE(treeErrors.errors().empty());
    EXPECT_FALSE(treeErrors.warnings().empty());
    EXPECT_FALSE(treeErrors.infos().empty());
  }

}
コード例 #10
0
TEST_F(AnalysisDriverFixture,DataPersistence_DakotaErrors) {

    {
        // Create and populate project
        SimpleProject project = getCleanSimpleProject("DataPersistence_DakotaErrors");
        Analysis analysis = project.analysis();
        Problem problem = retrieveProblem(AnalysisDriverFixtureProblem::BuggyBCLMeasure,
                                          true,
                                          false);
        analysis.setProblem(problem);
        model::Model model = model::exampleModel();
        openstudio::path p = toPath("./example.osm");
        model.save(p,true);
        FileReference seedModel(p);
        project.setSeed(seedModel);
        DDACEAlgorithmOptions algOpts(DDACEAlgorithmType::lhs);
        // do not set samples so Dakota job will have errors
        DDACEAlgorithm alg(algOpts);
        analysis.setAlgorithm(alg);

        // Run analysis
        AnalysisRunOptions runOptions = standardRunOptions(project.projectDir());
        project.analysisDriver().run(analysis,runOptions);
        project.analysisDriver().waitForFinished();

        // Check Dakota job and error information
        ASSERT_TRUE(alg.job());
        Job job = alg.job().get();
        EXPECT_FALSE(job.running());
        EXPECT_FALSE(job.outOfDate());
        EXPECT_FALSE(job.canceled());
        EXPECT_TRUE(job.lastRun());
        JobErrors errors = job.errors();
        EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
        EXPECT_FALSE(errors.succeeded());
        EXPECT_FALSE(errors.errors().empty());
        EXPECT_TRUE(errors.warnings().empty());
        EXPECT_TRUE(errors.infos().empty());
    }

    {
        // Re-open project
        SimpleProject project = getSimpleProject("DataPersistence_DakotaErrors");
        Analysis analysis = project.analysis();
        DDACEAlgorithm alg = analysis.algorithm()->cast<DDACEAlgorithm>();

        // Verify job and error information still there
        ASSERT_TRUE(alg.job());
        Job job = alg.job().get();
        EXPECT_FALSE(job.running());
        EXPECT_FALSE(job.outOfDate());
        EXPECT_FALSE(job.canceled());
        EXPECT_TRUE(job.lastRun());
        JobErrors errors = job.errors();
        EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result);
        EXPECT_FALSE(errors.succeeded());
        EXPECT_FALSE(errors.errors().empty());
        EXPECT_TRUE(errors.warnings().empty());
        EXPECT_TRUE(errors.infos().empty());
    }

}
コード例 #11
0
TEST_F(CloudFixture,CloudAnalysisDriver_RunPrototypeProject) {
  {
    // open prototype project
    openstudio::path projectDir = vagrantServerPath().parent_path().parent_path() / 
                                   toPath("prototype/pat/PATTest");  
    SimpleProjectOptions options;
    options.setLogLevel(Debug);
    SimpleProject project = openPATProject(projectDir,options).get();

    // save as into new folder
    projectDir = AnalysisDriverTestLibrary::instance().outputDataDirectory() / toPath("CloudAnalysisDriver_RunPrototypeProject");
    if (boost::filesystem::exists(projectDir)) {
      boost::filesystem::remove_all(projectDir);
    }
    OptionalSimpleProject temp = saveAs(project,projectDir);
    ASSERT_TRUE(temp);
    project = temp.get();

    // run it
    CloudAnalysisDriver driver(provider->session(),project);
    driver.run();
    EXPECT_TRUE(driver.lastRunSuccess());

    // check data points
    BOOST_FOREACH(const DataPoint& dataPoint,project.analysis().dataPoints()) {
      EXPECT_TRUE(dataPoint.isComplete());
      EXPECT_TRUE(dataPoint.runType() == DataPointRunType::CloudSlim);
      EXPECT_FALSE(dataPoint.outputAttributes().empty());
      EXPECT_TRUE(dataPoint.directory().empty());
      std::vector<WorkflowStepJob> jobsByStep = project.analysis().problem().getJobsByWorkflowStep(dataPoint);
      unsigned jobCount(0);
      unsigned messageCount(0);
      BOOST_FOREACH(const WorkflowStepJob& jobStep,jobsByStep) {
        if (jobStep.job) {
          ++jobCount;
          messageCount += jobStep.job.get().errors().errors().size();
          messageCount += jobStep.job.get().errors().warnings().size();
          messageCount += jobStep.job.get().errors().infos().size();
          messageCount += jobStep.job.get().errors().initialConditions().size();
          messageCount += jobStep.job.get().errors().finalConditions().size();
        }
      }
      EXPECT_GT(jobCount,0u);
      EXPECT_GT(messageCount,0u);
    }
  }

  {
    // reopen prototype project and make sure data is still there
    openstudio::path projectDir = AnalysisDriverTestLibrary::instance().outputDataDirectory() / toPath("CloudAnalysisDriver_RunPrototypeProject");
    SimpleProjectOptions options;
    options.setLogLevel(Debug);
    SimpleProject project = openPATProject(projectDir,options).get();

    // check data points
    BOOST_FOREACH(const DataPoint& dataPoint,project.analysis().dataPoints()) {
      EXPECT_TRUE(dataPoint.isComplete());
      EXPECT_TRUE(dataPoint.runType() == DataPointRunType::CloudSlim);
      EXPECT_FALSE(dataPoint.outputAttributes().empty());
      EXPECT_TRUE(dataPoint.directory().empty());
      std::vector<WorkflowStepJob> jobsByStep = project.analysis().problem().getJobsByWorkflowStep(dataPoint);
      unsigned jobCount(0);
      unsigned messageCount(0);
      BOOST_FOREACH(const WorkflowStepJob& jobStep,jobsByStep) {
        if (jobStep.job) {
          ++jobCount;
          messageCount += jobStep.job.get().errors().errors().size();
          messageCount += jobStep.job.get().errors().warnings().size();
          messageCount += jobStep.job.get().errors().infos().size();
          messageCount += jobStep.job.get().errors().initialConditions().size();
          messageCount += jobStep.job.get().errors().finalConditions().size();
        }
      }
      EXPECT_GT(jobCount,0u);
      EXPECT_GT(messageCount,0u);
    }

    // now request detailed results
    CloudAnalysisDriver driver(provider->session(),project);
    DataPointVector dataPoints = project.analysis().dataPoints();
    BOOST_FOREACH(DataPoint& dataPoint,dataPoints) {
      driver.requestDownloadDetailedResults(dataPoint);
    }
    driver.waitForFinished();
    EXPECT_TRUE(driver.lastDownloadDetailedResultsSuccess());
    // check outcome
    BOOST_FOREACH(const DataPoint& dataPoint,project.analysis().dataPoints()) {
      EXPECT_TRUE(dataPoint.isComplete());
      EXPECT_TRUE(dataPoint.runType() == DataPointRunType::CloudDetailed);
      EXPECT_FALSE(dataPoint.outputAttributes().empty());
      EXPECT_FALSE(dataPoint.directory().empty());
      EXPECT_TRUE(dataPoint.model());
      if (OptionalModel model = dataPoint.model()) {
        LOG(Debug,"DataPoint '" << dataPoint.name() << "' has " << model->numObjects() 
            << " in its OpenStudio Model.");
      }
    }
  }