TEST_F(ProjectFixture,AnalysisRecord_AddAndRemoveDataPoints) { // create an analysis with data points, no results Analysis analysis("My Analysis", Problem("My Problem",VariableVector(),runmanager::Workflow()), FileReferenceType::OSM); Problem problem = analysis.problem(); DiscretePerturbationVector perturbations; int pi = 1; std::stringstream ss; for (int i = 0; i < 3; ++i) { perturbations.push_back(NullPerturbation()); for (int j = 0; j < 4; ++j) { ss << "measure" << pi << ".rb"; perturbations.push_back(RubyPerturbation(toPath(ss.str()), FileReferenceType::OSM, FileReferenceType::OSM,true)); ss.str(""); ++pi; } ss << "Variable " << i+1; problem.push(DiscreteVariable(ss.str(),perturbations)); perturbations.clear(); ss.str(""); } EXPECT_EQ(3u,analysis.problem().variables().size()); ASSERT_FALSE(problem.variables().empty()); EXPECT_EQ(5u,problem.variables()[0].cast<DiscreteVariable>().numPerturbations(true)); problem.pushResponse( LinearFunction("Energy Use", VariableVector(1u,OutputAttributeVariable("Energy Use","Total.Energy.Use")))); EXPECT_EQ(1u,problem.responses().size()); std::vector<QVariant> values(3u,0); OptionalDataPoint dataPoint = problem.createDataPoint(values); ASSERT_TRUE(dataPoint); bool test = analysis.addDataPoint(*dataPoint); EXPECT_TRUE(test); values[1] = 3; dataPoint = problem.createDataPoint(values); ASSERT_TRUE(dataPoint); test = analysis.addDataPoint(*dataPoint); EXPECT_TRUE(test); values[0] = 1; values[1] = 2; values[2] = 4; dataPoint = problem.createDataPoint(values); ASSERT_TRUE(dataPoint); test = analysis.addDataPoint(*dataPoint); EXPECT_TRUE(test); EXPECT_TRUE(analysis.isDirty()); EXPECT_TRUE(problem.isDirty()); BOOST_FOREACH(const Variable& variable, problem.variables()) { EXPECT_TRUE(variable.isDirty()); ASSERT_TRUE(variable.optionalCast<DiscreteVariable>()); BOOST_FOREACH(const DiscretePerturbation& perturbation,variable.cast<DiscreteVariable>().perturbations(false)) { EXPECT_TRUE(perturbation.isDirty()); } } BOOST_FOREACH(const Function& response, problem.responses()) { EXPECT_TRUE(response.isDirty()); } BOOST_FOREACH(const DataPoint& dataPoint, analysis.dataPoints()) { EXPECT_TRUE(dataPoint.isDirty()); } // save to database, make sure everything is there, make sure clean signal filters down ProjectDatabase database = getCleanDatabase("AnalysisRecord_AddAndRemoveDataPoints"); { bool transactionStarted = database.startTransaction(); EXPECT_TRUE(transactionStarted); AnalysisRecord analysisRecord(analysis,database); database.save(); test = database.commitTransaction(); EXPECT_TRUE(test); EXPECT_EQ(analysis.uuid(),analysisRecord.handle()); EXPECT_EQ(analysis.versionUUID(),analysisRecord.uuidLast()); ASSERT_NO_THROW(analysisRecord.problemRecord()); ProblemRecord problemRecord = analysisRecord.problemRecord(); InputVariableRecordVector variableRecords = problemRecord.inputVariableRecords(); EXPECT_EQ(3u,variableRecords.size()); ASSERT_FALSE(variableRecords.empty()); ASSERT_TRUE(variableRecords[0].optionalCast<DiscreteVariableRecord>()); DiscretePerturbationRecordVector perturbationRecords = variableRecords[0].cast<DiscreteVariableRecord>().discretePerturbationRecords(false); EXPECT_EQ(5u,perturbationRecords.size()); ASSERT_TRUE(perturbationRecords.size() > 1); EXPECT_TRUE(perturbationRecords[0].optionalCast<NullPerturbationRecord>()); EXPECT_TRUE(perturbationRecords[1].optionalCast<RubyPerturbationRecord>()); EXPECT_EQ(3u,analysisRecord.dataPointRecords().size()); EXPECT_TRUE(analysisRecord.completeDataPointRecords().empty()); } analysis.clearDirtyFlag(); EXPECT_FALSE(analysis.isDirty()); EXPECT_FALSE(problem.isDirty()); BOOST_FOREACH(const Variable& variable, problem.variables()) { EXPECT_FALSE(variable.isDirty()); ASSERT_TRUE(variable.optionalCast<DiscreteVariable>()); BOOST_FOREACH(const DiscretePerturbation& perturbation,variable.cast<DiscreteVariable>().perturbations(false)) { EXPECT_FALSE(perturbation.isDirty()); } } BOOST_FOREACH(const Function& response, problem.responses()) { EXPECT_FALSE(response.isDirty()); } BOOST_FOREACH(const DataPoint& dataPoint, analysis.dataPoints()) { EXPECT_FALSE(dataPoint.isDirty()); } // make some more data points and make sure dirty flag bubbles up values[0] = 3; values[1] = 2; values[2] = 2; dataPoint = problem.createDataPoint(values); ASSERT_TRUE(dataPoint); test = analysis.addDataPoint(*dataPoint); EXPECT_TRUE(test); values[0] = 0; values[1] = 4; values[2] = 0; dataPoint = problem.createDataPoint(values); ASSERT_TRUE(dataPoint); test = analysis.addDataPoint(*dataPoint); EXPECT_TRUE(test); EXPECT_EQ(5u,analysis.dataPoints().size()); EXPECT_TRUE(analysis.isDirty()); EXPECT_FALSE(problem.isDirty()); BOOST_FOREACH(const Variable& variable, problem.variables()) { EXPECT_FALSE(variable.isDirty()); ASSERT_TRUE(variable.optionalCast<DiscreteVariable>()); BOOST_FOREACH(const DiscretePerturbation& perturbation,variable.cast<DiscreteVariable>().perturbations(false)) { EXPECT_FALSE(perturbation.isDirty()); } } BOOST_FOREACH(const Function& response, problem.responses()) { EXPECT_FALSE(response.isDirty()); } int i = 0; BOOST_FOREACH(const DataPoint& dataPoint, analysis.dataPoints()) { if (i < 3) { EXPECT_FALSE(dataPoint.isDirty()); } else { EXPECT_TRUE(dataPoint.isDirty()); } ++i; } // save to database, make sure changes made it in { bool transactionStarted = database.startTransaction(); EXPECT_TRUE(transactionStarted); database.unloadUnusedCleanRecords(); // ETH@20130201 - Having to call this is awkward. // Where is the best place? AnalysisRecord analysisRecord(analysis,database); database.save(); test = database.commitTransaction(); EXPECT_TRUE(test); EXPECT_EQ(analysis.uuid(),analysisRecord.handle()); EXPECT_EQ(analysis.versionUUID(),analysisRecord.uuidLast()); ASSERT_NO_THROW(analysisRecord.problemRecord()); ProblemRecord problemRecord = analysisRecord.problemRecord(); EXPECT_EQ(problemRecord.handle(),analysisRecord.problemRecord().handle()); EXPECT_EQ(problemRecord.uuidLast(),analysisRecord.problemRecord().uuidLast()); InputVariableRecordVector variableRecords = problemRecord.inputVariableRecords(); EXPECT_EQ(3u,variableRecords.size()); ASSERT_FALSE(variableRecords.empty()); ASSERT_TRUE(variableRecords[0].optionalCast<DiscreteVariableRecord>()); DiscretePerturbationRecordVector perturbationRecords = variableRecords[0].cast<DiscreteVariableRecord>().discretePerturbationRecords(false); EXPECT_EQ(5u,perturbationRecords.size()); ASSERT_TRUE(perturbationRecords.size() > 1); EXPECT_TRUE(perturbationRecords[0].optionalCast<NullPerturbationRecord>()); EXPECT_TRUE(perturbationRecords[1].optionalCast<RubyPerturbationRecord>()); EXPECT_EQ(5u,analysisRecord.dataPointRecords().size()); EXPECT_TRUE(analysisRecord.completeDataPointRecords().empty()); } analysis.clearDirtyFlag(); EXPECT_FALSE(analysis.isDirty()); EXPECT_FALSE(problem.isDirty()); BOOST_FOREACH(const Variable& variable, problem.variables()) { EXPECT_FALSE(variable.isDirty()); ASSERT_TRUE(variable.optionalCast<DiscreteVariable>()); BOOST_FOREACH(const DiscretePerturbation& perturbation,variable.cast<DiscreteVariable>().perturbations(false)) { EXPECT_FALSE(perturbation.isDirty()); } } BOOST_FOREACH(const Function& response, problem.responses()) { EXPECT_FALSE(response.isDirty()); } BOOST_FOREACH(const DataPoint& dataPoint, analysis.dataPoints()) { EXPECT_FALSE(dataPoint.isDirty()); } // remove a data point and save database. make sure point actually disappears DataPoint toRemove = analysis.dataPoints()[3]; test = analysis.removeDataPoint(toRemove); EXPECT_TRUE(test); EXPECT_TRUE(analysis.isDirty()); { bool transactionStarted = database.startTransaction(); EXPECT_TRUE(transactionStarted); database.unloadUnusedCleanRecords(); // ETH@20130201 - Having to call this is awkward. // Where is the best place? AnalysisRecord analysisRecord(analysis,database); database.save(); test = database.commitTransaction(); EXPECT_TRUE(test); EXPECT_EQ(analysis.uuid(),analysisRecord.handle()); EXPECT_EQ(analysis.versionUUID(),analysisRecord.uuidLast()); ASSERT_NO_THROW(analysisRecord.problemRecord()); ProblemRecord problemRecord = analysisRecord.problemRecord(); EXPECT_EQ(problemRecord.handle(),analysisRecord.problemRecord().handle()); EXPECT_EQ(problemRecord.uuidLast(),analysisRecord.problemRecord().uuidLast()); InputVariableRecordVector variableRecords = problemRecord.inputVariableRecords(); EXPECT_EQ(3u,variableRecords.size()); ASSERT_FALSE(variableRecords.empty()); ASSERT_TRUE(variableRecords[0].optionalCast<DiscreteVariableRecord>()); DiscretePerturbationRecordVector perturbationRecords = variableRecords[0].cast<DiscreteVariableRecord>().discretePerturbationRecords(false); EXPECT_EQ(5u,perturbationRecords.size()); ASSERT_TRUE(perturbationRecords.size() > 1); EXPECT_TRUE(perturbationRecords[0].optionalCast<NullPerturbationRecord>()); EXPECT_TRUE(perturbationRecords[1].optionalCast<RubyPerturbationRecord>()); EXPECT_EQ(4u,analysisRecord.dataPointRecords().size()); OptionalDataPointRecord searchResult = database.getObjectRecordByHandle<DataPointRecord>(toRemove.uuid()); EXPECT_FALSE(searchResult); EXPECT_EQ(4u,DataPointRecord::getDataPointRecords(database).size()); } EXPECT_EQ(4u,analysis.dataPoints().size()); test = analysis.clearDirtyFlag(); EXPECT_TRUE(test); }
TEST_F(AnalysisDriverFixture,DataPersistence_DataPointErrors) { { // Create and populate project SimpleProject project = getCleanSimpleProject("DataPersistence_DataPointErrors"); Analysis analysis = project.analysis(); Problem problem = retrieveProblem(AnalysisDriverFixtureProblem::BuggyBCLMeasure, true, false); EXPECT_EQ(5u,problem.workflow().size()); analysis.setProblem(problem); model::Model model =fastExampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); project.setSeed(seedModel); DataPoint dataPoint = problem.createDataPoint(std::vector<QVariant>(problem.numVariables(),0)).get(); analysis.addDataPoint(dataPoint); // Run analysis AnalysisRunOptions runOptions = standardRunOptions(project.projectDir()); project.analysisDriver().run(analysis,runOptions); project.analysisDriver().waitForFinished(); // Check DataPoint job and error information ASSERT_EQ(1u,analysis.dataPoints().size()); dataPoint = analysis.dataPoints()[0]; EXPECT_TRUE(dataPoint.isComplete()); EXPECT_TRUE(dataPoint.failed()); EXPECT_TRUE(dataPoint.topLevelJob()); WorkflowStepJobVector jobResults = problem.getJobsByWorkflowStep(dataPoint); EXPECT_EQ(problem.workflow().size(),jobResults.size()); ASSERT_EQ(5u,jobResults.size()); WorkflowStepJob jobResult = jobResults[0]; ASSERT_TRUE(jobResult.job); EXPECT_TRUE(jobResult.measure); Job job = jobResult.job.get(); ASSERT_TRUE(jobResult.mergedJobIndex); EXPECT_EQ(0u,jobResult.mergedJobIndex.get()); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); JobErrors treeErrors = job.treeErrors(); // get all tree errors now, test later JobErrors errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::NA),errors.result); EXPECT_TRUE(errors.succeeded()); EXPECT_TRUE(errors.errors().empty()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_FALSE(errors.infos().empty()); jobResult = jobResults[1]; ASSERT_TRUE(jobResult.job); EXPECT_FALSE(jobResult.measure); ASSERT_TRUE(jobResult.step.isWorkItem()); EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType()); job = jobResult.job.get(); ASSERT_TRUE(jobResult.mergedJobIndex); EXPECT_EQ(1u,jobResult.mergedJobIndex.get()); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Success),errors.result); EXPECT_TRUE(errors.succeeded()); EXPECT_TRUE(errors.errors().empty()); EXPECT_FALSE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); jobResult = jobResults[2]; ASSERT_TRUE(jobResult.job); EXPECT_TRUE(jobResult.measure); job = jobResult.job.get(); ASSERT_TRUE(jobResult.mergedJobIndex); EXPECT_EQ(2u,jobResult.mergedJobIndex.get()); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result); EXPECT_FALSE(errors.succeeded()); EXPECT_FALSE(errors.errors().empty()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); jobResult = jobResults[3]; ASSERT_TRUE(jobResult.job); EXPECT_FALSE(jobResult.measure); ASSERT_TRUE(jobResult.step.isWorkItem()); EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType()); job = jobResult.job.get(); ASSERT_TRUE(jobResult.mergedJobIndex); EXPECT_EQ(3u,jobResult.mergedJobIndex.get()); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); // now all four scripts are in same job EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); // now all four scripts are in same job errors = jobResult.errors().get(); // this script not actually run, so result in default state EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result); EXPECT_FALSE(errors.succeeded()); EXPECT_EQ(1u, errors.errors().size()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); jobResult = jobResults[4]; ASSERT_TRUE(jobResult.job); EXPECT_FALSE(jobResult.measure); ASSERT_TRUE(jobResult.step.isWorkItem()); EXPECT_EQ(JobType(JobType::ModelToIdf),jobResult.step.workItemType()); job = jobResult.job.get(); EXPECT_FALSE(jobResult.mergedJobIndex); EXPECT_TRUE(job.outOfDate()); // never run EXPECT_FALSE(job.canceled()); EXPECT_FALSE(job.lastRun()); errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result); EXPECT_FALSE(errors.succeeded()); EXPECT_TRUE(errors.errors().empty()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); EXPECT_EQ(OSResultValue(OSResultValue::Fail),treeErrors.result); EXPECT_FALSE(treeErrors.succeeded()); EXPECT_FALSE(treeErrors.errors().empty()); EXPECT_FALSE(treeErrors.warnings().empty()); EXPECT_FALSE(treeErrors.infos().empty()); } { // Re-open project SimpleProject project = getSimpleProject("DataPersistence_DataPointErrors"); Analysis analysis = project.analysis(); Problem problem = analysis.problem(); // Verify job and error information still there // Check DataPoint job and error information ASSERT_EQ(1u,analysis.dataPoints().size()); DataPoint dataPoint = analysis.dataPoints()[0]; EXPECT_TRUE(dataPoint.isComplete()); EXPECT_TRUE(dataPoint.failed()); EXPECT_TRUE(dataPoint.topLevelJob()); WorkflowStepJobVector jobResults = problem.getJobsByWorkflowStep(dataPoint); EXPECT_EQ(problem.workflow().size(),jobResults.size()); ASSERT_EQ(5u,jobResults.size()); WorkflowStepJob jobResult = jobResults[0]; ASSERT_TRUE(jobResult.job); EXPECT_TRUE(jobResult.measure); Job job = jobResult.job.get(); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); JobErrors treeErrors = job.treeErrors(); // get all tree errors now, test later JobErrors errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::NA),errors.result); EXPECT_TRUE(errors.succeeded()); EXPECT_TRUE(errors.errors().empty()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_FALSE(errors.infos().empty()); jobResult = jobResults[1]; ASSERT_TRUE(jobResult.job); EXPECT_FALSE(jobResult.measure); ASSERT_TRUE(jobResult.step.isWorkItem()); EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType()); job = jobResult.job.get(); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Success),errors.result); EXPECT_TRUE(errors.succeeded()); EXPECT_TRUE(errors.errors().empty()); EXPECT_FALSE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); jobResult = jobResults[2]; ASSERT_TRUE(jobResult.job); EXPECT_TRUE(jobResult.measure); job = jobResult.job.get(); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result); EXPECT_FALSE(errors.succeeded()); EXPECT_FALSE(errors.errors().empty()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); jobResult = jobResults[3]; ASSERT_TRUE(jobResult.job); EXPECT_FALSE(jobResult.measure); ASSERT_TRUE(jobResult.step.isWorkItem()); EXPECT_EQ(JobType(JobType::UserScript),jobResult.step.workItemType()); job = jobResult.job.get(); EXPECT_FALSE(job.running()); EXPECT_FALSE(job.outOfDate()); // now all four scripts are in same job EXPECT_FALSE(job.canceled()); EXPECT_TRUE(job.lastRun()); // now all four scripts are in same job errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result); EXPECT_FALSE(errors.succeeded()); EXPECT_EQ(1u, errors.errors().size()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); jobResult = jobResults[4]; ASSERT_TRUE(jobResult.job); EXPECT_FALSE(jobResult.measure); ASSERT_TRUE(jobResult.step.isWorkItem()); EXPECT_EQ(JobType(JobType::ModelToIdf),jobResult.step.workItemType()); job = jobResult.job.get(); EXPECT_TRUE(job.outOfDate()); // never run EXPECT_FALSE(job.canceled()); EXPECT_FALSE(job.lastRun()); errors = jobResult.errors().get(); EXPECT_EQ(OSResultValue(OSResultValue::Fail),errors.result); EXPECT_FALSE(errors.succeeded()); EXPECT_TRUE(errors.errors().empty()); EXPECT_TRUE(errors.warnings().empty()); EXPECT_TRUE(errors.infos().empty()); EXPECT_EQ(OSResultValue(OSResultValue::Fail),treeErrors.result); EXPECT_FALSE(treeErrors.succeeded()); EXPECT_FALSE(treeErrors.errors().empty()); EXPECT_FALSE(treeErrors.warnings().empty()); EXPECT_FALSE(treeErrors.infos().empty()); } }
TEST_F(AnalysisDriverFixture,RuntimeBehavior_StopCustomAnalysis) { // Tests for stopping time < 20s. // RETRIEVE PROBLEM Problem problem = retrieveProblem("UserScriptContinuous",true,false); // DEFINE SEED Model model = model::exampleModel(); openstudio::path p = toPath("./example.osm"); model.save(p,true); FileReference seedModel(p); // CREATE ANALYSIS Analysis analysis("Stop Custom Analysis", problem, seedModel); // generate 100 random points boost::mt19937 mt; typedef boost::uniform_real<> dist_type; typedef boost::variate_generator<boost::mt19937&, dist_type > gen_type; InputVariableVector variables = problem.variables(); ContinuousVariable cvar = variables[0].cast<ContinuousVariable>(); gen_type generator0(mt,dist_type(cvar.minimum().get(),cvar.maximum().get())); cvar = variables[1].cast<ContinuousVariable>(); gen_type generator1(mt,dist_type(cvar.minimum().get(),cvar.maximum().get())); cvar = variables[2].cast<ContinuousVariable>(); gen_type generator2(mt,dist_type(cvar.minimum().get(),cvar.maximum().get())); for (int i = 0, n = 100; i < n; ++i) { std::vector<QVariant> values; double value = generator0(); values.push_back(value); value = generator1(); values.push_back(value); value = generator2(); values.push_back(value); OptionalDataPoint dataPoint = problem.createDataPoint(values); ASSERT_TRUE(dataPoint); ASSERT_TRUE(analysis.addDataPoint(*dataPoint)); } // RUN ANALYSIS ProjectDatabase database = getCleanDatabase("StopCustomAnalysis"); AnalysisDriver analysisDriver(database); AnalysisRunOptions runOptions = standardRunOptions(analysisDriver.database().path().parent_path()); runOptions.setQueueSize(2); StopWatcher watcher(analysisDriver); watcher.watch(analysis.uuid()); CurrentAnalysis currentAnalysis = analysisDriver.run(analysis,runOptions); EXPECT_EQ(2,currentAnalysis.numQueuedJobs()); EXPECT_EQ(0,currentAnalysis.numQueuedDakotaJobs()); EXPECT_EQ(100,currentAnalysis.totalNumJobsInOSIteration()); EXPECT_EQ(0,currentAnalysis.numCompletedJobsInOSIteration()); analysisDriver.waitForFinished(); EXPECT_FALSE(analysisDriver.isRunning()); EXPECT_GE(watcher.nComplete(),watcher.stopNum()); EXPECT_LE(watcher.stoppingTime(),openstudio::Time(0,0,0,20)); // check conditions afterward RunManager runManager = analysisDriver.database().runManager(); EXPECT_FALSE(runManager.workPending()); BOOST_FOREACH(const Job& job,runManager.getJobs()) { EXPECT_FALSE(job.running()); EXPECT_FALSE(job.treeRunning()); } EXPECT_TRUE(currentAnalysis.numCompletedJobsInOSIteration() > 0); EXPECT_TRUE(currentAnalysis.analysis().dataPointsToQueue().size() > 0u); EXPECT_TRUE(currentAnalysis.analysis().dataPointsToQueue().size() < 100u); EXPECT_EQ(0u,analysisDriver.currentAnalyses().size()); }