VALUE run() { test_scgi(); test_yaml_parser(); test_queue(); test_util(); }
Suite* all_tests(void) { Suite* s = suite_create("all_tests"); suite_add_tcase(s, test_util()); suite_add_tcase(s, test_types()); suite_add_tcase(s, test_metagame()); return s; }
int main() { test_jeu(); test_util(); test_matrix(); recap(); return 0; }
int main(){ debug("== Start Unit Testing =="); test_examineFile(); test_getFileName(); test_cleanLine(); test_cleanLine2(); test_commandTypeCheck(); test_arg1(); test_arg2(); test_util(); return 0; }
/** Main test */ int main( int argc, char **argv ) { setlocale( LC_ALL, "" ); srand( time( 0 ) ); program_name=L"(ignore)"; say( L"Testing low-level functionality"); say( L"Lines beginning with '(ignore):' are not errors, they are warning messages\ngenerated by the fish parser library when given broken input, and can be\nignored. All actual errors begin with 'Error:'." ); proc_init(); halloc_util_init(); event_init(); parser_init(); function_init(); builtin_init(); reader_init(); env_init(); test_util(); test_escape(); test_convert(); test_tok(); test_parser(); test_expand(); test_path(); say( L"Encountered %d errors in low-level tests", err_count ); /* Skip performance tests for now, since they seem to hang when running from inside make (?) */ // say( L"Testing performance" ); // perf_complete(); env_destroy(); reader_destroy(); parser_destroy(); function_destroy(); builtin_destroy(); wutil_destroy(); event_destroy(); proc_destroy(); halloc_util_destroy(); }
int _tmain(int argc, _TCHAR* argv[]) { LOG_F("xTest main(), 测试 \n"); int arg = 1018; switch(arg) { case 1000: { test_util_ex(); }break; case 1: { test_util(); }break; case 2: { test_container(); }break; case 3: { test_thread(); }break; case 4: { test_net(true); }break; case 5: { test_boost(); }break; case 6: { test_kbe(); }break; case 1001: { test_db(); }break; case 1002: { test_math(); }break; case 1003: { test_design(); }break; case 1005: { //test_net(false); test_async(true); }break; case 1006: { test_lua(); }break; case 1007: { test_js(); }break; case 1008: { test_proto(); }break; case 1010: { test_graph(); }break; case 1011: { test_graph_d3d(); }break; case 1013: { test_cv(); }break; case 1015: { test_battle(); }break; case 1016: { test_hacker(); }break; case 1017: { test_yh(); }break; case 1018: { test_ai(); } case 1900: { test_tool(); }break; default: { MainEx(argc, argv); }break; } // { // int n = X::Init(NULL); // LOGD_F(" n = %d", n); // // X_HSER->Init(); // X_HSER->Start(); // // X_HSER->Stop(); // X_HSER->Destroy(); // } // { // // [4/26/2015 Administrator] // HINSTANCE hnst=LoadLibrary(_T("xService.dll")); // FreeLibrary(hnst); // // HINSTANCE hnst2=LoadLibrary(_T("xService.dll")); // FreeLibrary(hnst); // // } LOG_F("xTest main(), end"); while(true) { X::Sleep_f(1); } return 0; }
int main(int argc, char **argv) { cortex_init(); cmd_init(argc, argv); ctx_msg_out = NULL; ctx_tst_out = stdout; test_status("Tests running k=%i..%i...", get_min_kmer_size(), get_max_kmer_size()); test_status("[version] "VERSION_STATUS_STR"\n"); // Binary Kmer tests should work for all values of MAXK test_bkmer_functions(); test_hash_table(); #if MAX_KMER_SIZE == 31 // not kmer dependent test_util(); test_dna_functions(); test_binary_seq_functions(); // only written in k=31 test_db_node(); test_build_graph(); test_supernode(); test_subgraph(); test_cleaning(); test_paths(); // test_path_sets(); // TODO: replace with test_path_subset() test_graph_walker(); test_corrected_aln(); test_repeat_walker(); test_graph_crawler(); test_bubble_caller(); test_kmer_occur(); test_infer_edges_tests(); #endif cmd_destroy(); // Check we free'd all our memory size_t still_alloced = alloc_get_num_allocs() - alloc_get_num_frees(); TASSERT2(still_alloced == 0, "%zu not free'd", still_alloced); // Finished char num_test_str[100], num_passed_str[100]; size_t tests_num_passed = tests_num_run - tests_num_failed; ulong_to_str(tests_num_run, num_test_str); ulong_to_str(tests_num_passed, num_passed_str); test_status("Tests passed: %s / %s (%.1f%%)", num_passed_str, num_test_str, (100.0*tests_num_passed)/tests_num_run); if(tests_num_failed) test_status("%zu tests failed", tests_num_failed); else test_status("All tests passed."); cortex_destroy(); // Return 1 if any tests failed, 0 on success return tests_num_failed ? 1 : 0; }
void run_test(){ test_scgi(); test_yaml_parser(); test_queue(); test_util(); }