void tst_QDBusPerformance::oneWayVariant() { QFETCH(QVariant, data); QFETCH(int, size); QVERIFY(executeTest("size", size, qVariantFromValue(QDBusVariant(data)))); }
void tst_QDBusPerformance::oneWay() { QFETCH(QVariant, data); QFETCH(int, size); QVERIFY(executeTest("size", size, data)); }
void tst_QDBusPerformance::roundTripVariant() { QFETCH(QVariant, data); QFETCH(int, size); QVERIFY(executeTest("echo", size, qVariantFromValue(QDBusVariant(data)))); }
void tst_QDBusPerformance::roundTrip() { QFETCH(QVariant, data); QFETCH(int, size); QVERIFY(executeTest("echo", size, data)); }
void EvalRunner::runConstraintsPerThread(int algo) { string name = "ConstraintsPerThread"; paramHandler->resetToDefault(); for (int i=1; i<=20; i++) { paramHandler->setConstraintsPerThread(i); executeTest(algo, 0, 9, name, i); } }
void EvalRunner::runInterfacesScenario(int algo) { string name = "Interfaces"; paramHandler->resetToDefault(); for (int i=10; i<=100; i+=10) { paramHandler->setNumIf(i); executeTest(algo, 0, 9, name, i); } }
void EvalRunner::runValuesScenario(int algo) { string name = "Values"; paramHandler->resetToDefault(); for (int i=1; i<=200; ) { paramHandler->setNumAttVal(i*100); executeTest(algo, 0, 9, name, i); if (i<10) i+=3; else i+=10; } }
void EvalRunner::runTypeScenario(int algo) { string name = "Type"; paramHandler->resetToDefault(); workloadGenerator->resetNames(); for (int i=0; i<=100; i+=10) { paramHandler->setNumIntNames(i); workloadGenerator->resetStringValues(); executeTest(algo, 0, 9, name, i); } }
void EvalRunner::runAttributeScenario(int algo) { string name = "Attribute"; paramHandler->resetToDefault(); workloadGenerator->resetNames(); workloadGenerator->resetStringValues(); for (int i=1; i<=9; i++) { paramHandler->setMinAttr(i); paramHandler->setMaxAttr(i); executeTest(algo, 0, 9, name, i); } }
void EvalRunner::runOperatorScenario(int algo) { string name = "Operator"; paramHandler->resetToDefault(); for (int i=0; i<=100; i+=10) { paramHandler->setPercIntEq(i); int perc = (100-i)/3; paramHandler->setPercIntGt(perc); paramHandler->setPercIntLt(perc); paramHandler->setPercIntDf(perc); executeTest(algo, 0, 9, name, i); } }
void EvalRunner::runNamesScenario(int algo) { string name = "Names"; paramHandler->resetToDefault(); for (int i=10; i<=1000; ) { paramHandler->setNumNames(i); paramHandler->setNumIntNames(i); workloadGenerator->resetNames(); executeTest(algo, 0, 9, name, i); if (i<100) i+=10; else i+=100; } }
void EvalRunner::runConstraintsPerFilterScenario(int algo) { string name = "ConstraintsPerFilter"; paramHandler->resetToDefault(); for (int constr=1; constr<=9; constr++) { paramHandler->setMinConstr(constr-1); paramHandler->setMaxConstr(constr+1); if (constr==1) { paramHandler->setMinConstr(1); paramHandler->setMaxConstr(1); } executeTest(algo, 0, 9, name, constr); } }
void EvalRunner::runInterfacesFixedScenario(int algo, int numConstraints) { stringstream stream; stream << numConstraints; string stringConstraints = stream.str(); string name = "InterfacesFixed_" + stringConstraints; paramHandler->resetToDefault(); for (int i=10; i<=100; i+=10) { paramHandler->setNumIf(i); int numFilters = numConstraints/i; paramHandler->setMinFilters(numFilters); paramHandler->setMaxFilters(numFilters); executeTest(algo, 0, 9, name, i); } }
void EvalRunner::runFiltersPerInterfaceScenario(int algo) { string name = "FiltersPerInterface"; paramHandler->resetToDefault(); for (int filter=100; filter<=250000; ) { int filterPerc = filter/10; paramHandler->setMinFilters(filter-filterPerc); paramHandler->setMaxFilters(filter+filterPerc); executeTest(algo, 0, 9, name, filter); if (filter<1000) filter+=300; else if (filter<10000) filter+=3000; else if (filter<100000) filter+=30000; else filter+=50000; } }
/// /// Start simulation. /// void simulation::start(int argc, char ** argv){ arguments testArguments; // Set version revision number testArguments.versionNumber = (char*) malloc (10); strcpy(testArguments.versionNumber, "1.0.2"); testArguments.parseArguments(argc, argv); ///< Parse user arguments testArguments.inputCNF.parseCNF( testArguments.filename); ///< Parse DIMACS CNF testArguments = executeTest(testArguments); ///< Execute algorithm testArguments.writeOutput(); ///< Write output results // Print debug arguments if(testArguments.debug){ testArguments.printArguments(); } }
void EvalRunner::runConstraintsPerFilterFixedScenario(int algo, int numConstraints) { stringstream stream; stream << numConstraints; string stringConstraints = stream.str(); string name = "ConstraintsPerFilterFixed_" + stringConstraints; paramHandler->resetToDefault(); for (int constr=1; constr<=9; constr++) { int numFilters = numConstraints/(constr*paramHandler->getNumIf()); paramHandler->setMinFilters(numFilters); paramHandler->setMaxFilters(numFilters); paramHandler->setMinConstr(constr-1); paramHandler->setMaxConstr(constr+1); if (constr==1) { paramHandler->setMinConstr(1); paramHandler->setMaxConstr(1); } executeTest(algo, 0, 9, name, constr); } }
int main(int argc, char** argv) { QString style = "keramik"; // KApplication app(argc, argv); KAboutData about("decobenchmark", "DecoBenchmark", "0.1", "kwin decoration performance tester...", KAboutData::License_LGPL, "(C) 2005 Sandro Giessl"); KCmdLineArgs::init(argc, argv, &about); KCmdLineArgs::addCmdLineOptions( options ); KCmdLineArgs *args = KCmdLineArgs::parsedArgs(); if (args->count() != 3) KCmdLineArgs::usage("Wrong number of arguments!"); QString library = QString(args->arg(0) ); QString t = QString(args->arg(1) ); int count = QString(args->arg(2) ).toInt(); Tests test; if (t == "all") test = AllTests; else if (t == "repaint") test = RepaintTest; else if (t == "caption") test = CaptionTest; else if (t == "resize") test = ResizeTest; else if (t == "recreation") test = RecreationTest; else KCmdLineArgs::usage("Specify a valid test!"); DecoBenchApplication app(library, test, count); QTimer::singleShot(0, &app, SLOT(executeTest())); app.exec(); }
void EvalRunner::runZipfScenario(int algo) { string name = "Zipf"; paramHandler->resetToDefault(); paramHandler->setZipfNames(true); executeTest(algo, 0, 9, name, 2); }
void EvalRunner::runDefaultScenario(int algo) { string name = "Default"; paramHandler->resetToDefault(); executeTest(algo, 0, 9, name, 2); }
returnValue SIMexport::exportAndRun( const String& dirName, const String& initStates, const String& controls, const String& results, const String& ref ) { set( GENERATE_TEST_FILE, 1 ); Grid integrationGrid; modelData.getIntegrationGrid(integrationGrid); std::vector<Grid> outputGrids; modelData.getOutputGrids(outputGrids); int measGrid; get( MEASUREMENT_GRID, measGrid ); if( (MeasurementGrid)measGrid == ONLINE_GRID || ((MeasurementGrid)measGrid == EQUIDISTANT_SUBGRID && !modelData.hasEquidistantIntegrationGrid()) ) return ACADOERROR( RET_INVALID_OPTION ); _initStates = initStates; _controls = controls; _results = results; _ref = ref; uint i, j; Vector meas( (uint)outputGrids.size() ); Vector measRef( (uint)outputGrids.size() ); for( i = 0; i < outputGrids.size(); i++ ) { meas(i) = (double)outputGrids[i].getNumIntervals(); measRef(i) = (double)outputGrids[i].getNumIntervals()*factorRef; } Vector intGrid( integrationGrid.getNumIntervals()+1 ); Vector refIntGrid( factorRef*integrationGrid.getNumIntervals()+1 ); if( !modelData.hasEquidistantIntegrationGrid() ) { intGrid(0) = integrationGrid.getTime( 0 ); refIntGrid(0) = integrationGrid.getTime( 0 ); for( i = 0; i < integrationGrid.getNumIntervals(); i++ ) { intGrid(i+1) = integrationGrid.getTime( i+1 ); double step = (integrationGrid.getTime( i+1 ) - integrationGrid.getTime( i ))/factorRef; for( j = 0; j < factorRef; j++ ) { refIntGrid(i*factorRef+1+j) = refIntGrid(i*factorRef+j) + step; } } } int numSteps; get( NUM_INTEGRATOR_STEPS, numSteps ); timingCalls = (uint) ceil((double)(timingSteps*modelData.getN())/((double) numSteps) - 10.0*EPS); timingSteps = (uint) ceil((double)timingCalls*((double) numSteps/((double) modelData.getN())) - 10.0*EPS); if( !referenceProvided ) { // REFERENCE: if( !modelData.hasEquidistantIntegrationGrid() ) { modelData.setMeasurements( meas ); // EQUIDISTANT_GRID option is used modelData.setIntegrationGrid( refIntGrid ); exportCode( dirName ); exportTest( dirName, String( "test.c" ), _ref, _refOutputFiles, BT_FALSE, 1 ); } else if( (MeasurementGrid)measGrid == EQUIDISTANT_GRID ) { modelData.setMeasurements( meas ); set( NUM_INTEGRATOR_STEPS, (int)factorRef*numSteps ); exportCode( dirName ); exportTest( dirName, String( "test.c" ), _ref, _refOutputFiles, BT_FALSE, 1 ); } else { modelData.setMeasurements( measRef ); set( NUM_INTEGRATOR_STEPS, (int)factorRef*numSteps ); exportCode( dirName ); exportTest( dirName, String( "test.c" ), _ref, _refOutputFiles, BT_FALSE, factorRef ); } executeTest( dirName ); } modelData.clearIntegrationGrid(); // THE INTEGRATOR: modelData.setMeasurements( meas ); set( NUM_INTEGRATOR_STEPS, numSteps ); if( !modelData.hasEquidistantIntegrationGrid() ) { modelData.setIntegrationGrid( intGrid ); } exportCode( dirName ); if(timingSteps > 0 && timingCalls > 0) exportTest( dirName, String( "test.c" ), _results, _outputFiles, BT_TRUE, 1 ); else exportTest( dirName, String( "test.c" ), _results, _outputFiles, BT_FALSE, 1 ); executeTest( dirName ); // THE EVALUATION: int nil; nil = system( (String(dirName) << "/./compare").getName() ); return SUCCESSFUL_RETURN; }
TEST_F(TestWindowedSum, test_min_last_row) { static int testIndex = 0; executeTest(allTests[testIndex]); }
TEST_F(TestGeneratedPlans, test_join) { static int testIndex = 1; executeTest(allTests[testIndex]); }
TEST_F(TestGeneratedPlans, test_order_by) { static int testIndex = 0; executeTest(allTests[testIndex]); }
TEST(ColouringValidator, AcceptsCorrectSolutionForC50) { executeTest("resources/test/complete50.adjl", "resources/test/C50.csol", true); }
TEST(ColouringValidator, RejectsIncorrectSolutionForSTG) { executeTest("resources/test/SimpleTestGraph.adjl", "resources/test/STG_incorrect.csol", false); }
TEST(ColouringValidator, AcceptsCorrectSolutionForSTG) { executeTest("resources/test/SimpleTestGraph.adjl", "resources/test/STG.csol", true); }
TEST_F(TestWindowedCount, test_count) { static int testIndex = 1; executeTest(allTests[testIndex]); }
bool TestManager::executeFilteredTests(const std::vector<std::string> & included_tags, const std::vector<std::string> & excluded_tags) { bool final_result = true; bool include_all = included_tags.size() == 0; bool no_excludes = excluded_tags.size() == 0; size_t test_index = 0; for (auto ti : _registered_tests) { // Build text for headers std::string full_test_desc = BuildFullTestDescription(ti, test_index++, _registered_tests.size()); // // apply test filter // bool should_run = false; if (no_excludes && include_all) { // no filter should_run = true; } else if (ti->tags) { // has tags, split test tags by space std::vector<std::string> test_tags = detail::SplitString(std::string(ti->tags), ' '); if (test_tags.size() > 0) { bool is_included = false; if (!include_all) { for (std::string incl : included_tags) { if (std::find(test_tags.begin(), test_tags.end(), incl) != test_tags.end()) { is_included = true; // found tag from included vector break; } } } else { is_included = true; } bool is_excluded = false; for (std::string excl : excluded_tags) { if (std::find(test_tags.begin(), test_tags.end(), excl) != test_tags.end()) { is_excluded = true; // found tag from excluded vector break; } } should_run = is_included && !is_excluded; } else { // test tags string has wrong format, assume that there's no tag at all should_run = include_all; } } else { // test has no tags, if included is not present, then ignore this test should_run = include_all; } if (should_run) { bool test_result = executeTest(ti, full_test_desc); if (test_result) { tl().addPassedTest(); } else { tl().addFailedTest(); } final_result = final_result && test_result; } else { std::string skipped = full_test_desc + " ::: SKIPPED"; logMessage(skipped); tl().addSkippedTest(); } } return final_result; }