Beispiel #1
0
void fail_test(const char * reason, ...)
{
    va_list args;

    va_start(args, reason);
    vfprintf(stderr, reason, args);
    va_end(args);

    fprintf(stderr, " : errno = %d (%s)\n", errno, strerror(errno));
    teardown_test();
    abort();
}
Beispiel #2
0
Datei: corelib.c Projekt: 8l/awl
void test_corelib_do(void) {
    awlenv* e = setup_test();

    TEST_ASSERT_TYPE(e, "(do x)", AWLVAL_ERR);
    TEST_ASSERT_TYPE(e, "(do (/ 1 0))", AWLVAL_ERR);

    TEST_ASSERT_EQ(e, "(do (let ((x 5) (y 6)) (+ x y)))", "11");
    TEST_ASSERT_TYPE(e, "x", AWLVAL_ERR);
    TEST_ASSERT_TYPE(e, "y", AWLVAL_ERR);

    TEST_ASSERT_EQ(e, "(do 1 2 3 4 5)", "5");
    TEST_ASSERT_EQ(e, "(do (define x 5) (define y 6) (+ x y))", "11");
    TEST_ASSERT_EQ(e, "x", "5");
    TEST_ASSERT_EQ(e, "y", "6");

    teardown_test(e);
}
static bool go(bool result_only, size_t object_size, size_t binary_size, const char* name) {

    // setup
    if (!result_only) {
        printf("%s: ================\n", name);
        printf("%s: setting up size %i\n", name, (int)object_size);
    }
    if (!setup_test(object_size)) {
        fprintf(stderr, "%s: failed to get setup result.\n", name);
        return false;
    }

    // if this isn't a benchmark (the file creators), nothing left to do
    if (!is_benchmark()) {
        teardown_test();
        if (!result_only)
            printf("%s: done\n", name);
        return true;
    }

    // figure out a reasonable number of iterations between checking the time
    int iterations;
    #ifdef __arm__
    iterations = 1;
    #else
    iterations = 32;
    #endif
    for (size_t i = 5; i > object_size; --i)
        iterations <<= 3;

    uint32_t hash_result;

    // warm up
    if (!result_only)
        printf("%s: warming for %.0f seconds \n", name, WARM_TIME);
    double start_time = dtime();
    while (true) {
        for (int i = 0; i < iterations; ++i) {
            hash_result = 0;
            if (!run_wrapper(&hash_result)) {
                fprintf(stderr, "%s: failed to get benchmark result.\n", name);
                return false;
            }
        }
        if (dtime() - start_time > WARM_TIME)
            break;
    }

    // run tests
    if (!result_only)
        printf("%s: running for %.0f seconds\n", name, WORK_TIME);
    int total_iterations = 0;
    start_time = dtime();
    double end_time;
    while (true) {
        for (int i = 0; i < iterations; ++i) {
            hash_result = HASH_INITIAL_VALUE;
            if (!run_wrapper(&hash_result)) {
                fprintf(stderr, "%s: failed to get benchmark result.\n", name);
                return false;
            }
            ++total_iterations;
        }
        end_time = dtime();
        if (end_time - start_time > WORK_TIME)
            break;
    }

    // print results
    double per_time = (end_time - start_time) / (double)total_iterations * (1000.0 * 1000.0);
    if (result_only) {
        printf("%f\n", per_time);
    } else {
        printf("%s: %i iterations took %f seconds\n", name, total_iterations, end_time - start_time);
        printf("%s: %f microseconds per iteration\n", name, per_time);
        printf("%s: hash result of last run: %08x\n", name, hash_result);
    }

    // write score
    if (!result_only) {
        FILE* file = fopen("results.csv", "a");
        fprintf(file, "\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",%i,%f,%i,%i,\"%08x\"\n",
                name, test_language(), test_version(), test_filename(), test_format(),
                (int)object_size, per_time, (int)binary_size,
                #if BENCHMARK_SIZE_OPTIMIZED
                1,
                #else
                0,
                #endif
                hash_result);
        fclose(file);
    }

    teardown_test();

    return true;
}