int main( void ) { using namespace std; Test t; t.Run(); return 0; }
void Benchmarker::RunAll(){ double total; std::printf("============ Benchmarking %d tests =============\n\n", Instance()->configs.size()); for (auto config : Instance()->configs){ Test *test = config->NewTest(); total = 0; #ifdef _DEBUG config->runs = 1; config->iters = Math::Max(1, config->iters / 40); #endif for (uint i = 0; i < config->runs; i++) total += test->Run(config->iters); std::printf("%-20s\t\t%f ms\n", config->name, total / config->runs); delete test; } std::printf("\n============ All tests finished =============\n"); }
//#undef DEBUG //#undef _DEBUG int WINAPI WinMain(HINSTANCE hInstance,HINSTANCE hPrevInstance, PSTR cmdLine, int nShowCmd) { #if !defined(DEBUG)&!defined(_DEBUG) try #endif { Test* game = new Test(hInstance,nShowCmd,L"Test",100,50); game->Initialize(); game->Run(); delete game; } #if !defined(DEBUG)&!defined(_DEBUG) catch(exception e) { MessageBoxA(NULL,e.what(),"Exception!!!",MB_OK|MB_ICONERROR); } #endif return 0; }
int main() { Test b; b.Run(); Graph<char, int> a; a.InsertNode('A'); a.InsertNode('B'); a.InsertNode('C'); a.InsertNode('D'); a.InsertNode('E'); a.InsertEdge('A','B',4,1); a.InsertEdge('A','C',3,1); a.InsertEdge('B','C',1,1); a.InsertEdge('D','A',6,1); a.InsertEdge('B','A',4,1); a.InsertEdge('C','E',10,1); a.InsertEdge('E','D',2,1); a.Print(); Dijkstra(a,'A','E'); return 0; }
int main(int argc, char const *argv[]) { for( int i = 1; i < argc; i++ ) { if( strncmp( argv[i], "--vutpp:", 8 ) == 0 ) { std::string strVutppParam = argv[i] + 8; const size_t seperator = strVutppParam.find( ',' ); if( seperator == std::string::npos ) return -1; HANDLE readPipe, writePipe; sscanf( strVutppParam.substr( 0, seperator ).c_str(), "%d", &readPipe ); sscanf( strVutppParam.substr( seperator+1 ).c_str(), "%d", &writePipe ); char readBuffer[1024], writeBuffer[1024]; DWORD dwSize = 0; strcpy( writeBuffer, "connect" ); if( WriteFile( writePipe, writeBuffer, 1024, &dwSize, NULL ) == false || dwSize != 1024 ) return -1; TestList& rTestList = Test::GetTestList(); while( true ) { if( ReadFile( readPipe, readBuffer, 1024, &dwSize, NULL ) == false || dwSize != 1024 ) return -1; if( strncmp( readBuffer, "__VUTPP_FINISH__", 16 ) == 0 ) break; const char* pSeperator = strchr( readBuffer, ',' ); std::string suiteName( readBuffer, pSeperator - readBuffer ), testName( pSeperator+1 ); bool bRun = false; Test* pTest = rTestList.GetHead(); while( pTest != NULL ) { if( pTest->m_details.testName == testName && pTest->m_details.suiteName == suiteName ) { VUTPP_Reporter reporter( writePipe ); TestResults testResult( &reporter ); CurrentTest::Results() = &testResult; pTest->Run(); strcpy( writeBuffer, "-1," ); bRun = true; if( WriteFile( writePipe, writeBuffer, 1024, &dwSize, NULL ) == false || dwSize != 1024 ) return -1; break; } pTest = pTest->next; } if( bRun == false ) { sprintf( writeBuffer, "%d,,%s", -2, "can't find test" ); if( WriteFile( writePipe, writeBuffer, 1024, &dwSize, NULL ) == false || dwSize != 1024 ) return -1; } } return 0; } } return UnitTest::RunAllTests(); }
/// Run all benchmarking tests. static void RunAllTests() { Benchmarker& instance = Instance(); std::vector<Outputter*>& outputters = instance._outputters; // Get the tests for execution. std::vector<TestDescriptor*> tests = instance.GetTests(); // Begin output. for (std::size_t outputterIndex = 0; outputterIndex < outputters.size(); outputterIndex++) outputters[outputterIndex]->Begin(tests.size()); // Run through all the tests in ascending order. std::size_t index = 0; while (index < tests.size()) { // Get the test descriptor. TestDescriptor* descriptor = tests[index++]; // Check if test matches include filters if (instance._include.size() > 0) { bool included = false; std::string name = descriptor->FixtureName + "." + descriptor->TestName + descriptor->Parameters; for (std::size_t i = 0; i <instance._include.size(); i++) { if (name.find(instance._include[i]) != std::string::npos) { included = true; break; } } if (!included) continue; } // Describe the beginning of the run. for (std::size_t outputterIndex = 0; outputterIndex < outputters.size(); outputterIndex++) outputters[outputterIndex]->BeginTest( descriptor->FixtureName, descriptor->TestName, descriptor->Parameters, descriptor->Runs, descriptor->Iterations ); // Execute each individual run. int64_t timeTotal = 0, timeRunMin = std::numeric_limits<int64_t>::max(), timeRunMax = std::numeric_limits<int64_t>::min(); std::size_t run = descriptor->Runs; while (run--) { // Construct a test instance. Test* test = descriptor->Factory->CreateTest(); // Run the test. int64_t time = test->Run(descriptor->Iterations); // Store the test time. timeTotal += time; if (timeRunMin > time) timeRunMin = time; if (timeRunMax < time) timeRunMax = time; // Dispose of the test instance. delete test; } // Calculate the test result. TestResult testResult(descriptor->Runs, descriptor->Iterations, timeTotal, timeRunMin, timeRunMax); // Describe the end of the run. for (std::size_t outputterIndex = 0; outputterIndex < outputters.size(); outputterIndex++) outputters[outputterIndex]->EndTest( descriptor->FixtureName, descriptor->TestName, descriptor->Parameters, testResult ); } // Begin output. for (std::size_t outputterIndex = 0; outputterIndex < outputters.size(); outputterIndex++) outputters[outputterIndex]->End(tests.size()); }
int main(int argc, char** argv) { // add all tests AddTest(new CamerasTest::CamerasTest); AddTest(new ConnectionsTest::ConnectionsTest); AddTest(new DisplayTest::DisplayTest); AddTest(new EventBindingsTest::EventBindingsTest); AddTest(new EventsTest::EventsTest); AddTest(new FollowPathTest::FollowPathTest); AddTest(new GuiTest::GuiTest); AddTest(new InputTest::InputTest); AddTest(new LoggerTest::LoggerTest); AddTest(new MouseCursorTest::MouseCursorTest); AddTest(new MusicFadeTest::MusicFadeTest); AddTest(new MusicTest::MusicTest); AddTest(new NamesTest::NamesTest); AddTest(new NetworkTest::NetworkTest); AddTest(new ParticlesTest::ParticlesTest); AddTest(new PhysicsSimpleTest::PhysicsSimpleTest); AddTest(new PhysicsStressTest::PhysicsStressTest); AddTest(new PrimitivesTest::PrimitivesTest); AddTest(new QObjectTest::QObjectTest); AddTest(new RandomTest::RandomTest); AddTest(new ResourceManagerTest::ResourceManagerTest); AddTest(new ScriptComponentTest::ScriptComponentTest); AddTest(new ScriptingTest::ScriptingTest); AddTest(new ShadowsTest::ShadowsTest); AddTest(new SignalsTest::SignalsTest); AddTest(new SoundTest::SoundTest); AddTest(new StatesTest::StatesTest); AddTest(new TextTest::TextTest); AddTest(new TimerTest::TimerTest); AddTest(new TerrainTest::TerrainTest); if(argc < 2) { std::cout << "TestFramework usage: " << std::endl; std::cout << " ./TestFramework <test name>" << std::endl; std::cout << std::endl << "Available tests:" << std::endl; for(auto iter = Tests.begin(); iter != Tests.end(); ++iter) { std::cout << " - " << dt::Utils::ToStdString(iter->first) << std::endl; } } else { bool failure = false; for(int i = 1; i < argc; ++i) { QString name(argv[i]); if(name == "client" || name == "server") // ignore parameters of network continue; std::cout << "Running test " + dt::Utils::ToStdString(name) + "..." << std::endl; Test* test = GetTest(name); if(test == nullptr) { std::cerr << "Test " + dt::Utils::ToStdString(name) + " not found. Skipping." << std::endl; } else if(!test->Run(argc, argv)) { failure = true; std::cerr << "Test " + dt::Utils::ToStdString(name) + " FAILED." << std::endl; } else { std::cout << "Test " + dt::Utils::ToStdString(name) + ": OK." << std::endl; } } if(failure) { std::cerr << std::endl << "Not all tests successful, check log for details." << std::endl; return 1; } } return 0; }
int main () { printf ("Results of sprite1_test:\n"); try { Test test (L"Sprite 1", true); RenderManager render_manager = test.RenderManager (); render_manager.LoadResource ("data/sprites.rfx"); render_manager.LoadResource ("data/bottom.jpg"); render_manager.LoadResource ("data/sprites.xmtl"); Entity entity = render_manager.CreateEntity (); Primitive primitive = render_manager.CreatePrimitive (); // SpriteList sprites = primitive.AddStandaloneSpriteList (SpriteMode_Oriented, math::vec3f (0, 1.0f, 0)); // SpriteList sprites = primitive.AddStandaloneSpriteList (SpriteMode_Billboard, math::vec3f (0, 1.0f, 0)); // SpriteList sprites = primitive.AddBatchingSpriteList (SpriteMode_Oriented, math::vec3f (0, 1.0f, 0)); SpriteList sprites = primitive.AddBatchingSpriteList (SpriteMode_Oriented, math::vec3f (0, 1.0f, 0)); Sprite sprite; primitive.Buffers ().ReserveDynamicBuffers (8192, 8192); entity.SetWorldScissor (Box (math::vec3f (0.0f), math::vec3f (1.0f))); entity.EnableScissor (); sprite.position = math::vec3f (0.0f); sprite.size = math::vec2f (1.0f); sprite.color = math::vec4f (1.0f); sprite.tex_offset = math::vec2f (0.0f); sprite.tex_size = math::vec2f (1.0f); sprite.normal = math::vec3f (0, 0, 1.0f); sprite.rotation = math::anglef (); sprites.SetMaterial ("sprite"); sprites.Add (1, &sprite); entity.SetPrimitive (primitive); entity.SetWorldMatrix (math::rotate (math::degree (45.0f), math::vec3f (0, 0, 1))); Frame frame = render_manager.CreateFrame (); frame.DisableAutoCleanup (); frame.SetRenderTarget ("main_color_target", test.Window ().ColorBuffer ()); frame.SetRenderTarget ("main_depth_stencil_target", test.Window ().DepthStencilBuffer ()); frame.SetEffect ("main"); frame.SetClearColor (math::vec4f (0.0f, 0.0f, 1.0f, 1.0f)); frame.SetEntityDrawHandler (&update_entity_frame_transformations); common::PropertyMap entity_dependent_properties; frame.SetEntityDependentProperties (entity_dependent_properties); entity_dependent_properties.SetProperty ("myObjectMatrix", math::mat4f (1.0f)); common::PropertyMap frame_properties = frame.Properties (); common::PropertyMap entity_properties = entity.Properties (); frame_properties.SetProperty ("myProjMatrix", get_ortho_proj (-2, 2, -2, 2, -15, 15)); frame_properties.SetProperty ("myViewMatrix", inverse (math::lookat (math::vec3f (0, 0, 10), math::vec3f (0.0f), math::vec3f (0, 1, 0)))); frame.SetViewProjectionMatrix (frame_properties.GetMatrix ("myProjMatrix") * frame_properties.GetMatrix ("myViewMatrix")); frame.AddEntity (entity); test.ShowWindow (); syslib::Application::RegisterEventHandler (syslib::ApplicationEvent_OnIdle, xtl::bind (&idle, xtl::ref (test), xtl::ref (entity), xtl::ref (frame))); return test.Run (); } catch (std::exception& e) { printf ("%s\n", e.what ()); } return 0; }
/// Run all benchmarking tests. static void RunAllTests() { Benchmarker& instance = Instance(); // Initial output std::cout << std::fixed; std::cout << Console::TextGreen << "[==========]" << Console::TextDefault << " Running " << instance._tests.size() << (instance._tests.size() == 1 ? " benchmark." : " benchmarks.") << std::endl; // Run through all the tests in ascending order. #define PAD(x) std::cout << std::setw(34) << x << std::endl; #define PAD_DEVIATION(description, \ deviated, \ average, \ unit) \ { \ double _d_ = \ double(deviated) - double(average); \ \ PAD(description << \ deviated << " "unit" (" << \ (deviated < average ? \ Console::TextRed : \ Console::TextGreen) << \ (deviated > average ? "+" : "") << \ _d_ << " "unit" / " << \ (deviated > average ? "+" : "") << \ (_d_ * 100.0 / average) << " %" << \ Console::TextDefault << ")"); \ } #define PAD_DEVIATION_INVERSE(description, \ deviated, \ average, \ unit) \ { \ double _d_ = \ double(deviated) - double(average); \ \ PAD(description << \ deviated << " "unit" (" << \ (deviated > average ? \ Console::TextRed : \ Console::TextGreen) << \ (deviated > average ? "+" : "") << \ _d_ << " "unit" / " << \ (deviated > average ? "+" : "") << \ (_d_ * 100.0 / average) << " %" << \ Console::TextDefault << ")"); \ } std::size_t index = 0; std::size_t ran = 0; /// Number of executed tests while (index < instance._tests.size()) { // Get the test descriptor. TestDescriptor* descriptor = instance._tests[index++]; // Check if test matches include filters if(instance._include.size() > 0) { bool included = false; std::string name = descriptor->FixtureName + "." + descriptor->TestName+descriptor->Parameters; for(std::size_t i = 0; i <instance._include.size(); i++) { if(name.find(instance._include[i]) != std::string::npos) { included = true; break; } } if(!included) { continue; } } ran++; // Get test instance, which will handle BeforeTest() and AfterTest() hooks. Test* hooks = descriptor->Factory->CreateTest(); hooks->BeforeTest(descriptor->FixtureName, descriptor->TestName, descriptor->Runs, descriptor->Iterations); // Describe the beginning of the run. std::cout << Console::TextGreen << "[ RUN ]" << Console::TextYellow << " " << descriptor->FixtureName << "." << descriptor->TestName << descriptor->Parameters << Console::TextDefault << " (" << descriptor->Runs << (descriptor->Runs == 1 ? " run, " : " runs, ") << descriptor->Iterations << (descriptor->Iterations == 1 ? " iteration per run)" : " iterations per run)") << std::endl; // Execute each individual run. int64_t timeTotal = 0, timeRunMin = std::numeric_limits<int64_t>::max(), timeRunMax = std::numeric_limits<int64_t>::min(); std::size_t run = descriptor->Runs; while (run--) { // Construct a test instance. Test* test = descriptor->Factory->CreateTest(); // Run the test. int64_t time = test->Run(descriptor->Iterations); // Store the test time. timeTotal += time; if (timeRunMin > time) timeRunMin = time; if (timeRunMax < time) timeRunMax = time; // Dispose of the test instance. delete test; } // Calculate different metrics. double timeRunAverage = double(timeTotal) / double(descriptor->Runs); double runsPerSecondAverage = 1000000.0 / timeRunAverage; double runsPerSecondMax = 1000000.0 / double(timeRunMin); double runsPerSecondMin = 1000000.0 / double(timeRunMax); double timeIterationAverage = timeRunAverage / double(descriptor->Iterations); double timeIterationMin = double(timeRunMin) / double(descriptor->Iterations); double timeIterationMax = double(timeRunMax) / double(descriptor->Iterations); double iterationsPerSecondAverage = 1000000.0 / timeIterationAverage; double iterationsPerSecondMax = 1000000.0 / timeIterationMin; double iterationsPerSecondMin = 1000000.0 / timeIterationMax; // Describe the end of the run. std::cout << Console::TextGreen << "[ DONE ]" << Console::TextYellow << " " << descriptor->FixtureName << "." << descriptor->TestName << descriptor->Parameters << Console::TextDefault << " (" << (double(timeTotal) / 1000.0) << " ms)" << std::endl; std::cout << Console::TextBlue << "[ RUNS ] " << Console::TextDefault << " Average time: " << timeRunAverage << " us" << std::endl; PAD_DEVIATION_INVERSE("Fastest: ", timeRunMin, timeRunAverage, "us"); PAD_DEVIATION_INVERSE("Slowest: ", timeRunMax, timeRunAverage, "us"); PAD(""); PAD("Average performance: " << runsPerSecondAverage << " runs/s"); PAD_DEVIATION("Best performance: ", runsPerSecondMax, runsPerSecondAverage, "runs/s"); PAD_DEVIATION("Worst performance: ", runsPerSecondMin, runsPerSecondAverage, "runs/s"); std::cout << Console::TextBlue << "[ITERATIONS] " << Console::TextDefault << " Average time: " << timeIterationAverage << " us" << std::endl; PAD_DEVIATION_INVERSE("Fastest: ", timeIterationMin, timeIterationAverage, "us"); PAD_DEVIATION_INVERSE("Slowest: ", timeIterationMax, timeIterationAverage, "us"); PAD(""); PAD("Average performance: " << iterationsPerSecondAverage << " iterations/s"); PAD_DEVIATION("Best performance: ", iterationsPerSecondMax, iterationsPerSecondAverage, "iterations/s"); PAD_DEVIATION("Worst performance: ", iterationsPerSecondMin, iterationsPerSecondAverage, "iterations/s"); hooks->AfterRun( timeRunAverage, runsPerSecondAverage, runsPerSecondMax, runsPerSecondMin, timeIterationAverage, timeIterationMax, timeIterationMin, iterationsPerSecondAverage, iterationsPerSecondMax, iterationsPerSecondMin ); delete hooks; } #undef PAD // Final output. std::cout << Console::TextGreen << "[==========]" << Console::TextDefault << " Ran " << ran << (ran == 1 ? " benchmark." : " benchmarks.") << std::endl; }
int main(int argc, char const *argv[]) { for( int i = 1; i < argc; i++ ) { if( strncmp( argv[i], "--vutpp:", 8 ) == 0 ) { std::string strVutppParam = argv[i] + 8; const size_t seperator = strVutppParam.find( ',' ); if( seperator == std::string::npos ) return -1; HANDLE readPipe, writePipe; sscanf( strVutppParam.substr( 0, seperator ).c_str(), "%d", &readPipe ); sscanf( strVutppParam.substr( seperator+1 ).c_str(), "%d", &writePipe ); char readBuffer[1024], writeBuffer[1024]; DWORD dwSize = 0; strcpy( writeBuffer, "connect" ); if( WriteFile( writePipe, writeBuffer, 1024, &dwSize, NULL ) == false || dwSize != 1024 ) return -1; while( true ) { if( ReadFile( readPipe, readBuffer, 1024, &dwSize, NULL ) == false || dwSize != 1024 ) return -1; if( strncmp( readBuffer, "__VUTPP_FINISH__", 16 ) == 0 ) break; const char* pSeperator = strchr( readBuffer, ',' ); std::string suiteName( readBuffer, pSeperator - readBuffer ), testName( pSeperator+1 ); testName += "Test"; bool bRun = false; for( int testIndex = 0; testIndex < TestRegistry::Instance().TestCount(); testIndex++ ) { Test* pTest = TestRegistry::Instance().Tests()[testIndex]; if( strcmp( pTest->Name(), testName.c_str() ) == 0 ) { VUTPP_Result testResult( writePipe ); pTest->Run(testResult); strcpy( writeBuffer, "-1," ); bRun = true; if( WriteFile( writePipe, writeBuffer, 1024, &dwSize, NULL ) == false || dwSize != 1024 ) return -1; } } if( bRun == false ) { sprintf( writeBuffer, "%d,,%s", -2, "can't find test" ); if( WriteFile( writePipe, writeBuffer, 1024, &dwSize, NULL ) == false || dwSize != 1024 ) return -1; } } return 0; } } VUTPP_Result testResult( 0 ); TestResultStdErr result; TestRegistry::Instance().Run(testResult); return (result.FailureCount()); }
/// Run all benchmarking tests. static void RunAllTests() { Benchmarker& instance = Instance(); // Initial output std::cout << std::fixed; std::cout << Console::TextGreen << "[==========]" << Console::TextDefault << " Running " << instance._tests.size() << (instance._tests.size() == 1 ? " benchmark." : " benchmarks.") << std::endl; // Run through all the tests in ascending order. #define PAD(x) std::cout << std::setw(34) << x << std::endl; #define PAD_DEVIATION(description, \ deviated, \ average, \ unit) \ { \ double _d_ = \ double(deviated) - double(average); \ \ PAD(description << \ deviated << " " << unit << " (" << \ (deviated < average ? \ Console::TextRed : \ Console::TextGreen) << \ (deviated > average ? "+" : "") << \ _d_ << " " << unit << " / " << \ (deviated > average ? "+" : "") << \ (_d_ * 100.0 / average) << " %" << \ Console::TextDefault << ")"); \ } #define PAD_DEVIATION_INVERSE(description, \ deviated, \ average, \ unit) \ { \ double _d_ = \ double(deviated) - double(average); \ \ PAD(description << \ deviated << " " << unit << " (" << \ (deviated > average ? \ Console::TextRed : \ Console::TextGreen) << \ (deviated > average ? "+" : "") << \ _d_ << " " << unit << " / " << \ (deviated > average ? "+" : "") << \ (_d_ * 100.0 / average) << " %" << \ Console::TextDefault << ")"); \ } std::size_t index = 0; while (index < instance._tests.size()) { // Get the test descriptor. TestDescriptor* descriptor = instance._tests[index++]; // Describe the beginning of the run. std::cout << Console::TextGreen << "[ RUN ]" << Console::TextDefault << " " << descriptor->FixtureName << "." << descriptor->TestName << " (" << descriptor->Runs << (descriptor->Runs == 1 ? " run, " : " runs, ") << descriptor->Iterations << (descriptor->Iterations == 1 ? " iteration per run)" : " iterations per run)") << std::endl; // Execute each individual run. int64_t timeTotal = 0, timeRunMin = std::numeric_limits<int64_t>::max(), timeRunMax = std::numeric_limits<int64_t>::min(); std::size_t run = descriptor->Runs; while (run--) { // Construct a test instance. Test* test = descriptor->Factory->CreateTest(); // Run the test. int64_t time = test->Run(descriptor->Iterations); // Store the test time. timeTotal += time; if (timeRunMin > time) timeRunMin = time; if (timeRunMax < time) timeRunMax = time; // Dispose of the test instance. delete test; } // Calculate different metrics. double timeRunAverage = double(timeTotal) / double(descriptor->Runs); double runsPerSecondAverage = 1000000.0 / timeRunAverage; double runsPerSecondMax = 1000000.0 / double(timeRunMin); double runsPerSecondMin = 1000000.0 / double(timeRunMax); double timeIterationAverage = timeRunAverage / double(descriptor->Iterations); double timeIterationMin = double(timeRunMin) / double(descriptor->Iterations); double timeIterationMax = double(timeRunMax) / double(descriptor->Iterations); double iterationsPerSecondAverage = 1000000.0 / timeIterationAverage; double iterationsPerSecondMax = 1000000.0 / timeIterationMin; double iterationsPerSecondMin = 1000000.0 / timeIterationMax; // Describe the end of the run. std::cout << Console::TextGreen << "[ DONE ]" << Console::TextDefault << " " << descriptor->FixtureName << "." << descriptor->TestName << " (" << (double(timeTotal) / 1000.0) << " ms)" << std::endl; std::cout << Console::TextYellow << "[ RUNS ] " << Console::TextDefault << " Average time: " << timeRunAverage << " us" << std::endl; PAD_DEVIATION_INVERSE("Fastest: ", timeRunMin, timeRunAverage, "us"); PAD_DEVIATION_INVERSE("Slowest: ", timeRunMax, timeRunAverage, "us"); PAD(""); PAD("Average performance: " << runsPerSecondAverage << " runs/s"); PAD_DEVIATION("Best performance: ", runsPerSecondMax, runsPerSecondAverage, "runs/s"); PAD_DEVIATION("Worst performance: ", runsPerSecondMin, runsPerSecondAverage, "runs/s"); std::cout << Console::TextYellow << "[ITERATIONS] " << Console::TextDefault << " Average time: " << timeIterationAverage << " us" << std::endl; PAD_DEVIATION_INVERSE("Fastest: ", timeIterationMin, timeIterationAverage, "us"); PAD_DEVIATION_INVERSE("Slowest: ", timeIterationMax, timeIterationAverage, "us"); PAD(""); PAD("Average performance: " << iterationsPerSecondAverage << " iterations/s"); PAD_DEVIATION("Best performance: ", iterationsPerSecondMax, iterationsPerSecondAverage, "iterations/s"); PAD_DEVIATION("Worst performance: ", iterationsPerSecondMin, iterationsPerSecondAverage, "iterations/s"); } #undef PAD // Final output. std::cout << Console::TextGreen << "[==========]" << Console::TextDefault << " Ran " << instance._tests.size() << (instance._tests.size() == 1 ? " benchmark." : " benchmarks.") << std::endl; }
void iCoreThreadPoolTest() { Test test; test.Run(); }
int main() { Test t; return t.Run(); }