static const char *filter_bytype1(cmd_parms *cmd, void *CFG, const char *pname, const char **types) { const char *rv; const char *fname; int seen_name = 0; mod_filter_cfg *cfg = CFG; /* construct fname from name */ fname = apr_pstrcat(cmd->pool, "BYTYPE:", pname, NULL); /* check whether this is already registered, in which case * it's already in the filter chain */ if (apr_hash_get(cfg->live_filters, fname, APR_HASH_KEY_STRING)) { seen_name = 1; } rv = add_filter(cmd, CFG, fname, pname, NULL, types); /* If it's the first time through, add to filterchain */ if (rv == NULL && !seen_name) { rv = filter_chain(cmd, CFG, fname); } return rv; }
static result run(framework_services& fw, const info_t& info, const registered* tests, size_t num_tests, const filter_chain::filter_t* filters, size_t num_filters) { services& srv = fw; try { fw.check_system_requirements(info); } catch (std::exception& e) { throw std::runtime_error(std::string("unit tests did not meet system requirements:\n ") + e.what()); } filter_chain filter; try { filter = std::move(filter_chain(filters, num_filters)); } catch (std::exception& e) { srv.printf(print_type::error, "%s\n", e.what()); return result::failure; } // sort tests std::vector<const registered*> sorted_tests; { sorted_tests.reserve(num_tests); auto t = tests; auto e = t + num_tests; while (t < e) sorted_tests.push_back(t++); std::sort(sorted_tests.begin(), sorted_tests.end(), [](const registered* a, const registered* b)->bool { return *a < *b; } ); } // prepare formatting used to print results char fmtname [64]; if (64 < snprintf(fmtname, "%%-%us", info.name_col_width )) oThrow(std::errc::invalid_argument, "name col width too wide"); char fmtstatus[64]; if (64 < snprintf(fmtstatus, "%%-%us", info.status_col_width)) oThrow(std::errc::invalid_argument, "status col width too wide"); char fmttime [64]; if (64 < snprintf(fmttime, "%%-%us", info.time_col_width )) oThrow(std::errc::invalid_argument, "time col width too wide"); char fmtmsg [64]; snprintf(fmtmsg, "%%s\n" ); uint32_t nsucceeded = 0; uint32_t nfailed = 0; uint32_t nleaks = 0; uint32_t nskipped = 0; std::array<uint32_t, (int)result::count> counts; fw.pre_iterations(info); timer whole_run_timer; for (size_t iteration = 0; iteration < info.num_iterations; iteration++) { counts.fill(0); srv.printf(print_type::normal, "========== %s Run %u ==========\n", info.test_suite_name, iteration + 1); // table headers { srv.printf(print_type::heading, fmtname, "TEST NAME"); separator(srv); srv.printf(print_type::heading, fmtstatus, "STATUS"); separator(srv); srv.printf(print_type::heading, fmttime, "TIME"); separator(srv); srv.printf(print_type::heading, fmtmsg, "STATUS MESSAGE"); } timer iter_timer; for (auto test : sorted_tests) { if (!test) continue; const char* test_name = test->name; srv.printf(print_type::normal, fmtname, test_name); separator(srv); result res = result::filtered; double test_run_time = 0.0; if (filter.passes(test_name)) { srv.trace("========== Begin %s Run %u ==========", test_name, iteration + 1); // restore initial state fw.seed_rand(info.random_seed); // clear status fw.status(""); fw.pre_test(info, test_name); timer test_timer; try { test->run(srv); res = result::success; } catch (skip_test& e) { res = result::skipped; srv.status(e.what()); } catch (test_not_ready& e) { res = result::notready; srv.status(e.what()); } catch (std::exception& e) { res = result::failure; srv.trace("%s: %s", test_name, e.what()); srv.status(e.what()); } test_run_time = test_timer.seconds(); fw.post_test(info); // check for leaks if (res != result::failure && fw.has_memory_leaks(info)) res = result::leaks; char duration[64]; format_duration(duration, round(test_run_time)); srv.trace("========== End %s Run %u %s in %s ==========", test_name, iteration + 1, as_string(res), duration); } else { res = result::skipped; srv.status("---"); } counts[(int)res]++; // print the result srv.printf(s_result_print_type[(int)res], fmtstatus, as_string(res)); separator(srv); // print the time taken { double runtime = test_run_time; print_type type = runtime > info.run_time_very_slow ? print_type::error : ((runtime > info.run_time_slow) ? print_type::caution : print_type::normal); char duration[64]; format_duration(duration, round(runtime), true); srv.printf(type, fmttime, duration); separator(srv); } // print the status message { apply_default_status(srv, res, test->bug); srv.printf(print_type::normal, fmtmsg, srv.status()); ::_flushall(); } } ::_flushall(); // Summarize results for this iteration and sum total statistics const uint32_t iter_nsucceeded = counts[(int)result::success]; const uint32_t iter_nfailed = counts[(int)result::failure]; const uint32_t iter_nleaks = counts[(int)result::leaks]; const uint32_t iter_nskipped = counts[(int)result::skipped] + counts[(int)result::filtered] + counts[(int)result::bugged] + counts[(int)result::notready]; if ((iter_nsucceeded + iter_nfailed + iter_nleaks) == 0) { srv.printf(print_type::error, "========== Unit Tests: ERROR NO TESTS RUN ==========\n"); break; } else { char duration[64]; format_duration(duration, round(iter_timer.seconds())); srv.printf(print_type::heading, "========== Unit Tests: %u succeeded, %u failed, %u leaked, %u skipped in %s ==========\n", iter_nsucceeded, iter_nfailed, iter_nleaks, iter_nskipped, duration); } nsucceeded += iter_nsucceeded; nfailed += iter_nfailed; nleaks += iter_nleaks; nskipped += iter_nskipped; } if (info.num_iterations != 1) { char duration[64]; format_duration(duration, round(whole_run_timer.seconds())); srv.printf(print_type::heading, "========== %u Iterations: %u succeeded, %u failed, %u leaked, %u skipped in %s ==========\n", info.num_iterations, nsucceeded, nfailed, nleaks, nskipped, duration); } if ((nsucceeded + nfailed + nleaks) == 0) return result::notfound; if (nfailed > 0) return result::failure; if (nleaks > 0) return result::leaks; return result::success; }