TEST_F(AnalysisDriverFixture,SimpleProject_Create) { openstudio::path projectDir = toPath("AnalysisDriverFixtureData"); if (!boost::filesystem::exists(projectDir)) { boost::filesystem::create_directory(projectDir); } projectDir = projectDir / toPath("NewProject"); boost::filesystem::remove_all(projectDir); OptionalSimpleProject project = SimpleProject::create(projectDir); ASSERT_TRUE(project); EXPECT_TRUE(boost::filesystem::exists(projectDir)); EXPECT_TRUE(boost::filesystem::is_directory(projectDir)); EXPECT_TRUE(boost::filesystem::exists(projectDir / toPath("project.osp"))); EXPECT_TRUE(boost::filesystem::exists(projectDir / toPath("run.db"))); EXPECT_TRUE(boost::filesystem::exists(projectDir / toPath("project.log"))); Analysis analysis = project->analysis(); EXPECT_EQ(0,analysis.problem().numVariables()); EXPECT_FALSE(analysis.algorithm()); EXPECT_EQ(0u,analysis.dataPoints().size()); AnalysisRecord analysisRecord = project->analysisRecord(); EXPECT_EQ(0u,analysisRecord.problemRecord().inputVariableRecords().size()); EXPECT_EQ(0u,analysisRecord.dataPointRecords().size()); }
TEST_F(ProjectFixture,Profile_ProblemSave) { Analysis analysis = getAnalysisToRun(100,0); // time the process of saving to database ptime start = microsec_clock::local_time(); ProjectDatabase db = getCleanDatabase(toPath("./ProblemSave")); ASSERT_TRUE(db.startTransaction()); ProblemRecord record = ProblemRecord::factoryFromProblem(analysis.problem(),db); db.save(); ASSERT_TRUE(db.commitTransaction()); time_duration saveTime = microsec_clock::local_time() - start; std::cout << "Time: " << to_simple_string(saveTime) << std::endl; }
boost::optional<DataPoint> DakotaAlgorithm_Impl::createNextDataPoint( Analysis& analysis,const DakotaParametersFile& params) { OS_ASSERT(analysis.algorithm().get() == getPublicObject<DakotaAlgorithm>()); // TODO: Update iteration counter. OptionalDataPoint result = analysis.problem().createDataPoint(params, getPublicObject<DakotaAlgorithm>()); if (result) { bool added = analysis.addDataPoint(*result); if (!added) { // get equivalent point already in analysis DataPointVector candidates = analysis.getDataPoints(result->variableValues()); OS_ASSERT(candidates.size() == 1u); result = candidates[0]; } std::stringstream ss; ss << name() << "_" << m_iter; result->addTag(ss.str()); } return result; }
TEST_F(AnalysisDriverFixture, DesignOfExperiments_MeshAnalysis) { openstudio::path rubyLibDirPath = openstudio::toPath(rubyLibDir()); // GET SIMPLE PROJECT SimpleProject project = getCleanSimpleProject("DesignOfExperiments_MeshAnalysis"); Analysis analysis = project.analysis(); // SET PROBLEM Problem problem = retrieveProblem("MixedOsmIdf",false,false); analysis.setProblem(problem); // SET SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); analysis.setSeed(seedModel); // SET ALGORITHM DesignOfExperimentsOptions algOptions(DesignOfExperimentsType::FullFactorial); DesignOfExperiments algorithm(algOptions); analysis.setAlgorithm(algorithm); // RUN ANALYSIS AnalysisDriver driver = project.analysisDriver(); AnalysisRunOptions runOptions = standardRunOptions(project.projectDir()); driver.run(analysis,runOptions); EXPECT_TRUE(driver.waitForFinished()); // CHECK RESULTS AnalysisRecord analysisRecord = project.analysisRecord(); EXPECT_EQ(4,analysisRecord.problemRecord().combinatorialSize(true).get()); EXPECT_EQ(4u, analysisRecord.dataPointRecords().size()); BOOST_FOREACH(const DataPointRecord& dataPointRecord, analysisRecord.dataPointRecords()) { EXPECT_TRUE(dataPointRecord.isComplete()); EXPECT_FALSE(dataPointRecord.failed()); } // get data points by perturbations and vice versa std::vector<DataPointRecord> testDataPoints; std::vector<QVariant> testVariableValues; // all data points are successful testDataPoints = analysisRecord.successfulDataPointRecords(); EXPECT_EQ(4u,testDataPoints.size()); // empty variableValues returns all data points testDataPoints = analysisRecord.getDataPointRecords(testVariableValues); EXPECT_EQ(4u, testDataPoints.size()); // find the baseline testVariableValues.clear(); testVariableValues.push_back(0); testVariableValues.push_back(QVariant(QVariant::Int)); // only one perturbation, null works too testVariableValues.push_back(0); ASSERT_TRUE(testVariableValues[1].isNull()); testDataPoints = analysisRecord.getDataPointRecords(testVariableValues); ASSERT_EQ(1u, testDataPoints.size()); // find model with improved wall and roof testVariableValues.clear(); testVariableValues.push_back(1); testVariableValues.push_back(0); testVariableValues.push_back(1); testDataPoints = analysisRecord.getDataPointRecords(testVariableValues); ASSERT_EQ(1u, testDataPoints.size()); DataPoint testDataPoint = testDataPoints[0].dataPoint(); std::vector<OptionalDiscretePerturbation> perturbations = analysis.problem().getDiscretePerturbations(testVariableValues); ASSERT_EQ(3u,perturbations.size()); ASSERT_TRUE(perturbations[0] && perturbations[1] && perturbations[2]); EXPECT_TRUE(perturbations[0]->uuid() == problem.variables()[0].cast<DiscreteVariable>().perturbations(false)[1].uuid()); EXPECT_TRUE(perturbations[1]->uuid() == problem.variables()[1].cast<DiscreteVariable>().perturbations(false)[0].uuid()); EXPECT_TRUE(perturbations[2]->uuid() == problem.variables()[2].cast<DiscreteVariable>().perturbations(false)[1].uuid()); EXPECT_TRUE(perturbations[0]->optionalCast<RubyPerturbation>()); EXPECT_TRUE(perturbations[1]->optionalCast<RubyPerturbation>()); EXPECT_TRUE(perturbations[2]->optionalCast<RubyPerturbation>()); // find models with improved wall testVariableValues.clear(); testVariableValues.push_back(1); testDataPoints = analysisRecord.getDataPointRecords(testVariableValues); ASSERT_EQ(2u, testDataPoints.size()); // infeasible testVariableValues.clear(); testVariableValues.push_back(0); testVariableValues.push_back(0); testVariableValues.push_back(0); testVariableValues.push_back(0); testDataPoints = analysisRecord.getDataPointRecords(testVariableValues); ASSERT_EQ(0u, testDataPoints.size()); }
TEST_F(AnalysisDriverFixture, DDACE_LatinHypercube_Continuous) { { // GET SIMPLE PROJECT SimpleProject project = getCleanSimpleProject("DDACE_LatinHypercube_Continuous"); Analysis analysis = project.analysis(); // SET PROBLEM Problem problem = retrieveProblem("Continuous",true,false); analysis.setProblem(problem); // DEFINE SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); analysis.setSeed(seedModel); // CREATE ANALYSIS DDACEAlgorithmOptions algOptions(DDACEAlgorithmType::lhs); DDACEAlgorithm algorithm(algOptions); analysis.setAlgorithm(algorithm); // RUN ANALYSIS AnalysisDriver driver = project.analysisDriver(); AnalysisRunOptions runOptions = standardRunOptions(project.projectDir()); CurrentAnalysis currentAnalysis = driver.run(analysis,runOptions); EXPECT_TRUE(driver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_FALSE(jobErrors->errors().empty()); // require specification of number of samples EXPECT_TRUE(driver.currentAnalyses().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(1u,summary.nRows()); // no points project.clearAllResults(); algOptions.setSamples(4); EXPECT_EQ(4,analysis.algorithm()->cast<DDACEAlgorithm>().ddaceAlgorithmOptions().samples()); currentAnalysis = driver.run(analysis,runOptions); EXPECT_TRUE(driver.waitForFinished()); jobErrors = currentAnalysis.dakotaJobErrors(); ASSERT_TRUE(jobErrors); EXPECT_TRUE(jobErrors->errors().empty()); EXPECT_TRUE(driver.currentAnalyses().empty()); summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(5u,summary.nRows()); summary.save(project.projectDir() / toPath("summary.csv")); BOOST_FOREACH(const DataPoint& dataPoint,analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isComplete()); EXPECT_FALSE(dataPoint.failed()); // EXPECT_FALSE(dataPoint.responseValues().empty()); } ASSERT_TRUE(analysis.algorithm()); EXPECT_TRUE(analysis.algorithm()->isComplete()); EXPECT_FALSE(analysis.algorithm()->failed()); { AnalysisRecord analysisRecord = project.analysisRecord(); Analysis analysisCopy = analysisRecord.analysis(); ASSERT_TRUE(analysisCopy.algorithm()); EXPECT_TRUE(analysisCopy.algorithm()->isComplete()); EXPECT_FALSE(analysisCopy.algorithm()->failed()); } } LOG(Info,"Restart from existing project."); // Get existing project SimpleProject project = getSimpleProject("DDACE_LatinHypercube_Continuous"); EXPECT_FALSE(project.analysisIsLoaded()); // make sure starting fresh Analysis analysis = project.analysis(); EXPECT_FALSE(analysis.isDirty()); // Add custom data point std::vector<QVariant> values; values.push_back(0.0); values.push_back(0.8); values.push_back(int(0)); OptionalDataPoint dataPoint = analysis.problem().createDataPoint(values); ASSERT_TRUE(dataPoint); analysis.addDataPoint(*dataPoint); EXPECT_EQ(1u,analysis.dataPointsToQueue().size()); ASSERT_TRUE(analysis.algorithm()); EXPECT_TRUE(analysis.algorithm()->isComplete()); EXPECT_FALSE(analysis.algorithm()->failed()); EXPECT_TRUE(analysis.isDirty()); EXPECT_FALSE(analysis.resultsAreInvalid()); EXPECT_FALSE(analysis.dataPointsAreInvalid()); // get last modified time of a file in a completed data point to make sure nothing is re-run DataPointVector completePoints = analysis.completeDataPoints(); ASSERT_FALSE(completePoints.empty()); OptionalFileReference inputFileRef = completePoints[0].osmInputData(); ASSERT_TRUE(inputFileRef); QFileInfo inputFileInfo(toQString(inputFileRef->path())); QDateTime inputFileModifiedTestTime = inputFileInfo.lastModified(); EXPECT_EQ(1u,analysis.dataPointsToQueue().size()); AnalysisDriver driver = project.analysisDriver(); CurrentAnalysis currentAnalysis = driver.run( analysis, standardRunOptions(project.projectDir())); EXPECT_TRUE(driver.waitForFinished()); boost::optional<runmanager::JobErrors> jobErrors = currentAnalysis.dakotaJobErrors(); EXPECT_FALSE(jobErrors); // should not try to re-run DakotaAlgorithm EXPECT_TRUE(driver.currentAnalyses().empty()); EXPECT_TRUE(analysis.dataPointsToQueue().empty()); Table summary = currentAnalysis.analysis().summaryTable(); EXPECT_EQ(6u,summary.nRows()); summary.save(project.projectDir() / toPath("summary_post_restart.csv")); // RunManager should not re-run any data points EXPECT_EQ(inputFileModifiedTestTime,inputFileInfo.lastModified()); }
// Test not yet to scale re: total data points. TEST_F(ProjectFixture,Profile_UpdateAnalysis) { Analysis analysis = getAnalysisToRun(100,500); // save to database ProjectDatabase db = getCleanDatabase(toPath("./UpdateAnalysis")); ASSERT_TRUE(db.startTransaction()); AnalysisRecord record(analysis,db); db.save(); ASSERT_TRUE(db.commitTransaction()); // add output data to 1 data point DataPointVector dataPoints = analysis.dataPoints(); boost::mt19937 mt; typedef boost::uniform_real<> uniform_dist_type; typedef boost::variate_generator<boost::mt19937&, uniform_dist_type> uniform_gen_type; uniform_gen_type responseGenerator(mt,uniform_dist_type(50.0,500.0)); for (int i = 0; i < 1; ++i) { std::stringstream ss; ss << "dataPoint" << i + 1; DoubleVector responseValues; for (int j = 0, n = analysis.problem().responses().size(); j < n; ++j) { responseValues.push_back(responseGenerator()); } openstudio::path runDir = toPath(ss.str()); dataPoints[i] = DataPoint(dataPoints[i].uuid(), createUUID(), dataPoints[i].name(), dataPoints[i].displayName(), dataPoints[i].description(), analysis.problem(), true, false, true, DataPointRunType::Local, dataPoints[i].variableValues(), responseValues, runDir, FileReference(runDir / toPath("ModelToIdf/in.osm")), FileReference(runDir / toPath("ModelToIdf/out.idf")), FileReference(runDir / toPath("EnergyPlus/eplusout.sql")), FileReferenceVector(1u,FileReference(runDir / toPath("Ruby/report.xml"))), boost::optional<runmanager::Job>(), std::vector<openstudio::path>(), TagVector(), AttributeVector()); dataPoints[i].setName(dataPoints[i].name()); // set dirty } analysis = Analysis(analysis.uuid(), analysis.versionUUID(), analysis.name(), analysis.displayName(), analysis.description(), analysis.problem(), analysis.algorithm(), analysis.seed(), analysis.weatherFile(), dataPoints, false, false); analysis.setName(analysis.name()); // set dirty // time the process of updating the database ptime start = microsec_clock::local_time(); db.unloadUnusedCleanRecords(); ASSERT_TRUE(db.startTransaction()); record = AnalysisRecord(analysis,db); db.save(); ASSERT_TRUE(db.commitTransaction()); time_duration updateTime = microsec_clock::local_time() - start; std::cout << "Time: " << to_simple_string(updateTime) << std::endl; }
TEST_F(AnalysisDriverFixture,DataPersistence_DataPointErrors) { { // Create and populate project SimpleProject project = getCleanSimpleProject("DataPersistence_DataPointErrors"); Analysis analysis = project.analysis(); Problem problem = retrieveProblem(AnalysisDriverFixtureProblem::BuggyBCLMeasure, true, false); EXPECT_EQ(5u,problem.workflow().size()); analysis.setProblem(problem); model::Model model =fastExampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); project.setSeed(seedModel); DataPoint dataPoint = problem.createDataPoint(std::vector<QVariant>(problem.numVariables(),0)).get(); analysis.addDataPoint(dataPoint); // Run analysis AnalysisRunOptions runOptions = standardRunOptions(project.projectDir()); project.analysisDriver().run(analysis,runOptions); project.analysisDriver().waitForFinished(); // Check DataPoint job and error information ASSERT_EQ(1u,analysis.dataPoints().size()); dataPoint = analysis.dataPoints()[0]; EXPECT_TRUE(dataPoint.isComplete()); EXPECT_TRUE(dataPoint.failed()); EXPECT_TRUE(dataPoint.topLevelJob()); WorkflowStepJobVector jobResults = problem.getJobsByWorkflowStep(dataPoint); EXPECT_EQ(problem.workflow().size(),jobResults.size()); ASSERT_EQ(5u,jobResults.size()); WorkflowStepJob jobResult = jobResults[0]; ASSERT_TRUE(jobResult.job); EXPECT_TRUE(jobResult.measure); Job job = jobResult.job.get(); ASSERT_TRUE(jobResult.mergedJobIndex); EXPECT_EQ(0u,jobResult.mergedJobIndex.get()); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); JobErrors treeErrors = job.treeErrors(); // get all tree errors now, test later JobErrors errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::NA),errors.result); EXPECT_TRUE(errors.succeeded()); EXPECT_TRUE(errors.errors().empty()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_FALSE(errors.infos().empty()); jobResult = jobResults[1]; ASSERT_TRUE(jobResult.job); EXPECT_FALSE(jobResult.measure); ASSERT_TRUE(jobResult.step.isWorkItem()); EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType()); job = jobResult.job.get(); ASSERT_TRUE(jobResult.mergedJobIndex); EXPECT_EQ(1u,jobResult.mergedJobIndex.get()); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Success),errors.result); EXPECT_TRUE(errors.succeeded()); EXPECT_TRUE(errors.errors().empty()); EXPECT_FALSE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); jobResult = jobResults[2]; ASSERT_TRUE(jobResult.job); EXPECT_TRUE(jobResult.measure); job = jobResult.job.get(); ASSERT_TRUE(jobResult.mergedJobIndex); EXPECT_EQ(2u,jobResult.mergedJobIndex.get()); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result); EXPECT_FALSE(errors.succeeded()); EXPECT_FALSE(errors.errors().empty()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); jobResult = jobResults[3]; ASSERT_TRUE(jobResult.job); EXPECT_FALSE(jobResult.measure); ASSERT_TRUE(jobResult.step.isWorkItem()); EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType()); job = jobResult.job.get(); ASSERT_TRUE(jobResult.mergedJobIndex); EXPECT_EQ(3u,jobResult.mergedJobIndex.get()); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); // now all four scripts are in same job EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); // now all four scripts are in same job errors = jobResult.errors().get(); // this script not actually run, so result in default state EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result); EXPECT_FALSE(errors.succeeded()); EXPECT_EQ(1u, errors.errors().size()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); jobResult = jobResults[4]; ASSERT_TRUE(jobResult.job); EXPECT_FALSE(jobResult.measure); ASSERT_TRUE(jobResult.step.isWorkItem()); EXPECT_EQ(JobType(JobType::ModelToIdf),jobResult.step.workItemType()); job = jobResult.job.get(); EXPECT_FALSE(jobResult.mergedJobIndex); EXPECT_TRUE(job.outOfDate()); // never run EXPECT_FALSE(job.canceled()); EXPECT_FALSE(job.lastRun()); errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result); EXPECT_FALSE(errors.succeeded()); EXPECT_TRUE(errors.errors().empty()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); EXPECT_EQ(OSResultValue(OSResultValue::Fail),treeErrors.result); EXPECT_FALSE(treeErrors.succeeded()); EXPECT_FALSE(treeErrors.errors().empty()); EXPECT_FALSE(treeErrors.warnings().empty()); EXPECT_FALSE(treeErrors.infos().empty()); } { // Re-open project SimpleProject project = getSimpleProject("DataPersistence_DataPointErrors"); Analysis analysis = project.analysis(); Problem problem = analysis.problem(); // Verify job and error information still there // Check DataPoint job and error information ASSERT_EQ(1u,analysis.dataPoints().size()); DataPoint dataPoint = analysis.dataPoints()[0]; EXPECT_TRUE(dataPoint.isComplete()); EXPECT_TRUE(dataPoint.failed()); EXPECT_TRUE(dataPoint.topLevelJob()); WorkflowStepJobVector jobResults = problem.getJobsByWorkflowStep(dataPoint); EXPECT_EQ(problem.workflow().size(),jobResults.size()); ASSERT_EQ(5u,jobResults.size()); WorkflowStepJob jobResult = jobResults[0]; ASSERT_TRUE(jobResult.job); EXPECT_TRUE(jobResult.measure); Job job = jobResult.job.get(); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); JobErrors treeErrors = job.treeErrors(); // get all tree errors now, test later JobErrors errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::NA),errors.result); EXPECT_TRUE(errors.succeeded()); EXPECT_TRUE(errors.errors().empty()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_FALSE(errors.infos().empty()); jobResult = jobResults[1]; ASSERT_TRUE(jobResult.job); EXPECT_FALSE(jobResult.measure); ASSERT_TRUE(jobResult.step.isWorkItem()); EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType()); job = jobResult.job.get(); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Success),errors.result); EXPECT_TRUE(errors.succeeded()); EXPECT_TRUE(errors.errors().empty()); EXPECT_FALSE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); jobResult = jobResults[2]; ASSERT_TRUE(jobResult.job); EXPECT_TRUE(jobResult.measure); job = jobResult.job.get(); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result); EXPECT_FALSE(errors.succeeded()); EXPECT_FALSE(errors.errors().empty()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); jobResult = jobResults[3]; ASSERT_TRUE(jobResult.job); EXPECT_FALSE(jobResult.measure); ASSERT_TRUE(jobResult.step.isWorkItem()); EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType()); job = jobResult.job.get(); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); // now all four scripts are in same job EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); // now all four scripts are in same job errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result); EXPECT_FALSE(errors.succeeded()); EXPECT_EQ(1u, errors.errors().size()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); jobResult = jobResults[4]; ASSERT_TRUE(jobResult.job); EXPECT_FALSE(jobResult.measure); ASSERT_TRUE(jobResult.step.isWorkItem()); EXPECT_EQ(JobType(JobType::ModelToIdf),jobResult.step.workItemType()); job = jobResult.job.get(); EXPECT_TRUE(job.outOfDate()); // never run EXPECT_FALSE(job.canceled()); EXPECT_FALSE(job.lastRun()); errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result); EXPECT_FALSE(errors.succeeded()); EXPECT_TRUE(errors.errors().empty()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); EXPECT_EQ(OSResultValue(OSResultValue::Fail),treeErrors.result); EXPECT_FALSE(treeErrors.succeeded()); EXPECT_FALSE(treeErrors.errors().empty()); EXPECT_FALSE(treeErrors.warnings().empty()); EXPECT_FALSE(treeErrors.infos().empty()); } }
TEST_F(ProjectFixture,AnalysisRecord_SetProblem) { // create an analysis with data points Problem problem1("Minimal Problem",VariableVector(),runmanager::Workflow()); Analysis analysis("Analysis", problem1, FileReferenceType::OSM); OptionalDataPoint dataPoint = problem1.createDataPoint(std::vector<QVariant>()); ASSERT_TRUE(dataPoint); bool test = analysis.addDataPoint(*dataPoint); EXPECT_TRUE(test); // save to database, make sure everything is there, make sure clean signal filters down ProjectDatabase database = getCleanDatabase("AnalysisRecord_SetProblem"); { bool transactionStarted = database.startTransaction(); EXPECT_TRUE(transactionStarted); AnalysisRecord analysisRecord(analysis,database); database.save(); test = database.commitTransaction(); EXPECT_TRUE(test); ASSERT_NO_THROW(analysisRecord.problemRecord()); ProblemRecord problemRecord = analysisRecord.problemRecord(); EXPECT_EQ(problem1.uuid(),problemRecord.handle()); EXPECT_EQ(problem1.versionUUID(),problemRecord.uuidLast()); EXPECT_EQ(1u,analysisRecord.dataPointRecords().size()); } analysis.clearDirtyFlag(); EXPECT_FALSE(analysis.isDirty()); EXPECT_FALSE(problem1.isDirty()); BOOST_FOREACH(const DataPoint& dataPoint, analysis.dataPoints()) { EXPECT_FALSE(dataPoint.isDirty()); } EXPECT_FALSE(analysis.dataPointsAreInvalid()); // set analysis to have new problem // make sure dirty flag bubbles up, data points marked invalid Problem problem2 = problem1.clone().cast<Problem>(); problem2.setName("Real Problem"); EXPECT_NE(problem1.uuid(),problem2.uuid()); analysis.setProblem(problem2); EXPECT_TRUE(analysis.isDirty()); EXPECT_FALSE(problem1.isDirty()); EXPECT_TRUE(problem2.isDirty()); EXPECT_TRUE(analysis.dataPointsAreInvalid()); RubyPerturbation userScript(toPath("measure.rb"), FileReferenceType::IDF, FileReferenceType::IDF, true); RubyContinuousVariable cvar("Script Argument Variable", ruleset::OSArgument::makeDoubleArgument("COP"), userScript); test = problem2.push(cvar); EXPECT_FALSE(test); // IDF not compatible with seed EXPECT_TRUE(problem2.variables().empty()); test = userScript.setPerturbationScript(toPath("measure.rb"), FileReferenceType::OSM, FileReferenceType::OSM, true); EXPECT_TRUE(test); test = problem2.push(cvar); EXPECT_TRUE(test); EXPECT_EQ(1u,analysis.problem().variables().size()); EXPECT_EQ(1u,analysis.dataPoints().size()); dataPoint = problem2.createDataPoint(std::vector<QVariant>(1u,3.5)); ASSERT_TRUE(dataPoint); test = analysis.addDataPoint(*dataPoint); EXPECT_FALSE(test); // save to database, make sure dataPointsAreInvalid flag was saved, old problem // should still be there, new problem there, and clean signal won't go through Analysis analysisCopy = analysis; // will replace with deserialized version momentarily { bool transactionStarted = database.startTransaction(); EXPECT_TRUE(transactionStarted); database.unloadUnusedCleanRecords(); // ETH@20130201 - Having to call this is awkward. // (As are the brackets) Where is the best place? AnalysisRecord analysisRecord(analysis,database); database.save(); test = database.commitTransaction(); EXPECT_TRUE(test); ASSERT_NO_THROW(analysisRecord.problemRecord()); ProblemRecord problemRecord = analysisRecord.problemRecord(); EXPECT_EQ(problem2.uuid(),problemRecord.handle()); EXPECT_EQ(problem2.versionUUID(),problemRecord.uuidLast()); EXPECT_EQ(2u,ProblemRecord::getProblemRecords(database).size()); // old one still around EXPECT_EQ(1u,analysisRecord.dataPointRecords().size()); test = analysis.clearDirtyFlag(); EXPECT_FALSE(test); Analysis analysisCopy = analysisRecord.analysis(); // should work b/c both problems around } EXPECT_EQ(analysis.uuid(),analysisCopy.uuid()); EXPECT_EQ(analysis.versionUUID(),analysisCopy.versionUUID()); EXPECT_TRUE(analysisCopy.isDirty()); EXPECT_TRUE(analysisCopy.dataPointsAreInvalid()); ASSERT_FALSE(analysisCopy.dataPoints().empty()); EXPECT_NE(analysisCopy.problem().uuid(),analysisCopy.dataPoints()[0].problem().uuid()); test = analysisCopy.clearDirtyFlag(); EXPECT_FALSE(test); // remove data points, save again, make sure dataPointsAreInvalid flag a-okay, new problem // still there and ok, old problem is going to hang around because didn't get rid of it when // analysisRecord still pointing to it, clean signal filters all the way down analysisCopy.removeAllDataPoints(); { bool transactionStarted = database.startTransaction(); EXPECT_TRUE(transactionStarted); database.unloadUnusedCleanRecords(); // ETH@20130201 - Having to call this is awkward. // Where is the best place? AnalysisRecord analysisRecord(analysisCopy,database); database.save(); test = database.commitTransaction(); EXPECT_TRUE(test); ASSERT_NO_THROW(analysisRecord.problemRecord()); ProblemRecord problemRecord = analysisRecord.problemRecord(); EXPECT_EQ(problem2.uuid(),problemRecord.handle()); EXPECT_EQ(problem2.versionUUID(),problemRecord.uuidLast()); // old problem record still hanging around. // TODO: Have use count as child, resource, and be able to (selectively) purge. EXPECT_EQ(2u,ProblemRecord::getProblemRecords(database).size()); EXPECT_TRUE(analysisRecord.dataPointRecords().empty()); analysis = analysisRecord.analysis(); } test = analysis.clearDirtyFlag(); EXPECT_TRUE(test); EXPECT_FALSE(analysis.isDirty()); EXPECT_FALSE(analysis.dataPointsAreInvalid()); EXPECT_FALSE(analysis.resultsAreInvalid()); EXPECT_TRUE(analysis.dataPoints().empty()); }
int DesignOfExperiments_Impl::createNextIteration(Analysis& analysis) { int result(0); // to make sure problem type check has already occurred. this is stated usage in header. OS_ASSERT(analysis.algorithm().get() == getPublicObject<DesignOfExperiments>()); // nothing else is supported yet DesignOfExperimentsOptions options = designOfExperimentsOptions(); OS_ASSERT(options.designType() == DesignOfExperimentsType::FullFactorial); if (isComplete()) { LOG(Info,"Algorithm is already marked as complete. Returning without creating new points."); return result; } if (options.maxIter() && options.maxIter().get() < 1) { LOG(Info,"Maximum iterations set to less than one. No DataPoints will be added to Analysis '" << analysis.name() << "', and the Algorithm will be marked complete."); markComplete(); return result; } OptionalInt mxSim = options.maxSims(); DataPointVector dataPoints = analysis.getDataPoints("DOE"); int totPoints = dataPoints.size(); if (mxSim && (totPoints >= *mxSim)) { LOG(Info,"Analysis '" << analysis.name() << "' already contains " << totPoints << " DataPoints added by the DesignOfExperiments algorithm, which meets or exceeds the " << "maximum number specified in this algorithm's options object, " << *mxSim << ". " << "No data points will be added and the Algorithm will be marked complete."); markComplete(); return result; } m_iter = 1; // determine all combinations std::vector< std::vector<QVariant> > variableValues; for (const Variable& variable : analysis.problem().variables()) { // variable must be DiscreteVariable, otherwise !isCompatibleProblemType(analysis.problem()) DiscreteVariable discreteVariable = variable.cast<DiscreteVariable>(); IntVector dvValues = discreteVariable.validValues(true); std::vector< std::vector<QVariant> > currentValues = variableValues; for (IntVector::const_iterator it = dvValues.begin(), itEnd = dvValues.end(); it != itEnd; ++it) { std::vector< std::vector<QVariant> > nextSet = currentValues; if (currentValues.empty()) { variableValues.push_back(std::vector<QVariant>(1u,QVariant(*it))); } else { for (std::vector<QVariant>& point : nextSet) { point.push_back(QVariant(*it)); } if (it == dvValues.begin()) { variableValues = nextSet; } else { variableValues.insert(variableValues.end(),nextSet.begin(),nextSet.end()); } } } } // create data points and add to analysis for (const std::vector<QVariant>& value : variableValues) { DataPoint dataPoint = analysis.problem().createDataPoint(value).get(); dataPoint.addTag("DOE"); bool added = analysis.addDataPoint(dataPoint); if (added) { ++result; ++totPoints; if (mxSim && (totPoints == mxSim.get())) { break; } } } if (result == 0) { LOG(Trace,"No new points were added, so marking this DesignOfExperiments complete."); markComplete(); } return result; }