int main(int argc, char* argv[]) { setvbuf(stdout, NULL, _IONBF, 0); if (argc > 1) { BOOL Run = FALSE ; if (initialize_registry()) { printf("\nInitialize of test Registry failed."); } if (!strcmp("--test", argv[1])) { Run = TRUE ; AddTests(); } else if (!strcmp("--atest", argv[1])) { Run = TRUE ; AddAssertTests(); } else if (!strcmp("--alltest", argv[1])) { Run = TRUE ; AddTests(); AddAssertTests(); } if (TRUE == Run) { curses_run_tests(); } cleanup_registry(); } return 0; }
void TestSetCont::AddTests ( const TestCase nullTerminatedArray[] ) { AddTests( NextTestSetName(), nullTerminatedArray ); }
int RunTest() { if(CU_initialize_registry()){ fprintf(stderr, " Initialization of Test Registry failed. "); exit(EXIT_FAILURE); }else{ AddTests(); /**** Automated Mode ***************** CU_set_output_filename("TestMax"); CU_list_tests_to_file(); CU_automated_run_tests(); ************************************/ CU_basic_set_mode(CU_BRM_VERBOSE); CU_basic_run_tests(); /***** Basice Mode ******************* ************************************/ /*****Console Mode ******************** CU_console_run_tests(); ************************************/ CU_cleanup_registry(); return CU_get_error(); } }
int RunTest(void) { if (CU_initialize_registry()) { fprintf(stderr, "Initialization of Test Registry failed."); exit(EXIT_FAILURE); } else { AddTests(); // 第一种:直接输出测试结果 CU_basic_set_mode(CU_BRM_VERBOSE); CU_basic_run_tests(); // 第二种:交互式输出测试结果 //CU_console_run_tests(); // 第三种:自动生成xml,xlst等文件 //CU_set_output_filename("TestMax"); //CU_list_tests_to_file(); //CU_automated_run_tests(); CU_cleanup_registry(); return CU_get_error(); } }
int main(int argc, char *argv[]) { int basic; int console; int automated; /* parse the command line options */ while ((c = getopt(argc, argv, "abc")) != -1) { switch (c) { case 'a': /* basic mode */ basic = 1; break; case 'b': /* basic mode */ basic = 1; break; case 'c': /* run from command line */ console = 1; break; default: /* error, print usage */ usage(argv[0]); return -1; } } if (CU_initialize_registry()) { fprintf(stderr, "\nInitialization of Test Registry failed.\n"); exit(EXIT_FAILURE); } else { AddTests(); // set up the run mode and run the tests if (automated) { CU_set_output_filename("TestOutput.xml"); CU_list_tests_to_file(); CU_automated_run_tests(); } else if (basic) { CU_BasicRunMode mode = CU_BRM_VERBOSE; CU_ErrorAction error_action = CUEA_IGNORE; CU_basic_set_mode(mode); CU_set_error_action(error_action); CU_basic_run_tests(); } else if (console) { CU_console_run_tests(); } CU_cleanup_registry(); } return 0; }
int basicTest(int argc, char* argv[]) { CU_BasicRunMode mode = CU_BRM_VERBOSE; CU_ErrorAction error_action = CUEA_IGNORE; int i; setvbuf(stdout, NULL, _IONBF, 0); for (i=1 ; i<argc ; i++) { if (!strcmp("-i", argv[i])) { error_action = CUEA_IGNORE; } else if (!strcmp("-f", argv[i])) { error_action = CUEA_FAIL; } else if (!strcmp("-A", argv[i])) { error_action = CUEA_ABORT; } else if (!strcmp("-s", argv[i])) { mode = CU_BRM_SILENT; } else if (!strcmp("-n", argv[i])) { mode = CU_BRM_NORMAL; } else if (!strcmp("-v", argv[i])) { mode = CU_BRM_VERBOSE; } // else if (!strcmp("-e", argv[i])) { // print_example_results(); // return 0; // } else { printf("\nUsage: BasicTest [options]\n\n" "Options: -i ignore framework errors [default].\n" " -f fail on framework error.\n" " -A abort on framework error.\n\n" " -s silent mode - no output to screen.\n" " -n normal mode - standard output to screen.\n" " -v verbose mode - max output to screen [default].\n\n" // " -e print expected test results and exit.\n" " -h print this message and exit.\n\n"); return 0; } } if (CU_initialize_registry()) { printf("\nInitialization of Test Registry failed."); } else { AddTests(); CU_basic_set_mode(mode); CU_set_error_action(error_action); printf("\nTests completed with return value %d.\n", CU_basic_run_tests()); CU_cleanup_registry(); } return 0; }
int main( int argc, char *argv[] ) { if(CU_initialize_registry()) { fprintf(stderr, " Initialization of Test Registry failed. "); exit(EXIT_FAILURE); }else{ AddTests(); CU_set_output_filename("Test single"); CU_list_tests_to_file(); CU_automated_run_tests(); CU_cleanup_registry(); } return 0; }
int automatedTest(int argc, char* argv[]) { CU_BOOL Run = CU_FALSE ; setvbuf(stdout, NULL, _IONBF, 0); if (argc > 1) { if (!strcmp("-i", argv[1])) { Run = CU_TRUE ; CU_set_error_action(CUEA_IGNORE); } else if (!strcmp("-f", argv[1])) { Run = CU_TRUE ; CU_set_error_action(CUEA_FAIL); } else if (!strcmp("-A", argv[1])) { Run = CU_TRUE ; CU_set_error_action(CUEA_ABORT); } // else if (!strcmp("-e", argv[1])) { // print_example_results(); // } else { printf("\nUsage: AutomatedTest [option]\n\n" " Options: -i Run, ignoring framework errors [default].\n" " -f Run, failing on framework error.\n" " -A Run, aborting on framework error.\n" // " -e Print expected test results and exit.\n" " -h Print this message.\n\n"); } } else { Run = CU_TRUE; CU_set_error_action(CUEA_IGNORE); } if (CU_TRUE == Run) { if (CU_initialize_registry()) { printf("\nInitialization of Test Registry failed."); } else { AddTests(); CU_set_output_filename("TestAutomated"); CU_list_tests_to_file(); CU_automated_run_tests(); CU_cleanup_registry(); } } return 0; }
int main() { if (CU_initialize_registry()) { printf("\nInitialization of Test Registry failed."); }else{ LOG_FILE_OPEN("log.txt"); AddTests(); /*******Automated Mode(best)********************* * CU_set_output_filename("TestAutomated"); * CU_list_tests_to_file(); * CU_automated_run_tests(); ******************************************/ CU_set_output_filename("TestAutomated"); CU_list_tests_to_file(); CU_automated_run_tests(); /*******Basic Mode********************* * mode can choose: * typedef enum { * CU_BRM_NORMAL = 0, Normal mode - failures and run summary are printed [default]. * CU_BRM_SILENT, Silent mode - no output is printed except framework error messages. * CU_BRM_VERBOSE Verbose mode - maximum output of run details. * } CU_BasicRunMode; **************************************** * * CU_basic_set_mode(CU_BRM_NORMAL); * CU_basic_run_tests(); ******************************************/ /*******Console Mode********************* * CU_console_run_tests(); ******************************************/ /*******Curses Mode********************* * CU_curses_run_tests(); ******************************************/ CU_cleanup_registry(); } LOG_FILE_CLOSE(); return 0; }
int main(int args, char *argv[]) { CU_BasicRunMode mode = CU_BRM_VERBOSE; CU_ErrorAction error_action = CUEA_FAIL; setvbuf(stdout, 0, _IONBF, 0); if (CU_initialize_registry()) { printf("\nTest init error\n"); } else { AddTests(); CU_basic_set_mode(mode); CU_set_error_action(error_action); printf("\nTests completed with return value %d. \n", CU_basic_run_tests()); CU_cleanup_registry(); } }
int main( int argc, char *argv[] ) { printf("test start\n"); if(CU_initialize_registry()){ fprintf(stderr, "\nInitialization of Test Registry failed.\n"); exit(EXIT_FAILURE); }else{ AddTests(); CU_set_output_filename("ununpack"); CU_list_tests_to_file(); CU_automated_run_tests(); CU_cleanup_registry(); } printf("end\n"); return 0; }
int main (int argc, char **argv) { CU_ErrorCode cuError; cuError = CU_initialize_registry(); if (cuError == CUE_NOMEMORY) { DbgLog(__FILE__, "%d CU_initialize_registry failed\n", __LINE__); return 1; } if (AddTests() != 0) { DbgLog(__FILE__, "line:%d AddTests failed\n", __LINE__); return 1; } CU_console_run_tests(); CU_cleanup_registry(); return 0; }
int main() { char *result = (char *)calloc(1024, sizeof(char)); bool passed = false; memset(result, '\0', 1024); passed = AddTests(result); if(passed) { printf("\nAdd Tests Passed!"); } else { printf("\nAdd Tests Failed: %s\n", result); return 1; } fflush(NULL); passed = TraverseTest(result); if(passed) { printf("\nTraverse Test Passed!"); } else { printf("\nTraverse Test Failed: %s\n", result); return 1; } fflush(NULL); passed = FindTests(result); if(passed) { printf("\nFind Tests Passed!"); } else { printf("\nFind Tests Failed: %s\n", result); return 1; } fflush(NULL); // memset(result, '\0', 1024); // passed = RemoveTests(result); // if(passed) // { // printf("\nRemove Tests Passed!"); // } // else // { // printf("\nRemove Tests Failed: %s\n", result); // return 1; // } // fflush(NULL); // memset(result, '\0', 1024); // passed = PrintTest(result); // if(passed) // { // printf("\nPrint Test Passed!"); // } // else // { // printf("\nPrint Test Failed: %s\n", result); // return 1; // } // fflush(NULL); printf("\n"); return 0; }
nls_test_suite() { // add the tests AddTests(data__()); }
piecewise_polynomial_creator_test_suite() : tol() { AddTests(data__()); }
piecewise_capped_surface_creator_test_suite() : tol() { AddTests(data__()); }
piecewise_four_digit_creator_test_suite() : tol() { AddTests(data__()); }
explicit_bezier_curve_test_suite() { AddTests(data__()); }
piecewise_point_creator_test_suite() : tol() { AddTests(data__()); }
cst_airfoil_test_suite() { AddTests(data__()); }
// Called by the framework to obtain a list of all unit test objects in this catalog // return value: reference to list<UnitTestBase> object UnitTestList& UnitTestCatalogBase::GetTests() { AddTests(); return m_testsList; }
minimum_distance_point_test_suite() { AddTests(data__()); }