/**
* \brief Execute a test suite using the given run seend and execution key.
*
* The filter string is matched to the suite name (full comparison) to select a single suite,
* or if no suite matches, it is matched to the test names (full comparison) to select a single test.
*
* \param testSuites Suites containing the test case.
* \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
* \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
* \param filter Filter specification. NULL disables. Case sensitive.
* \param testIterations Number of iterations to run each test case.
*
* \returns Test run result; 0 when all tests passed, 1 if any tests failed.
*/
int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *userRunSeed, Uint64 userExecKey, const char *filter, int testIterations)
{
    int suiteCounter;
    int testCounter;
    int iterationCounter;
    SDLTest_TestSuiteReference *testSuite;
    SDLTest_TestCaseReference *testCase;
    const char *runSeed = NULL;
    char *currentSuiteName;
    char *currentTestName;
    Uint64 execKey;
    float runStartSeconds;
    float suiteStartSeconds;
    float testStartSeconds;
    float runEndSeconds;
    float suiteEndSeconds;
    float testEndSeconds;
    float runtime;
    int suiteFilter = 0;
    char *suiteFilterName = NULL;
    int testFilter = 0;
    char *testFilterName = NULL;
    int testResult = 0;
    int runResult = 0;
    Uint32 totalTestFailedCount = 0;
    Uint32 totalTestPassedCount = 0;
    Uint32 totalTestSkippedCount = 0;
    Uint32 testFailedCount = 0;
    Uint32 testPassedCount = 0;
    Uint32 testSkippedCount = 0;
    Uint32 countSum = 0;
    char *logFormat = (char *)SDLTest_LogSummaryFormat;

    /* Sanitize test iterations */
    if (testIterations < 1) {
        testIterations = 1;
    }

    /* Generate run see if we don't have one already */
    if (userRunSeed == NULL || userRunSeed[0] == '\0') {
        runSeed = SDLTest_GenerateRunSeed(16);
        if (runSeed == NULL) {
            SDLTest_LogError("Generating a random seed failed");
            return 2;
        }
    } else {
        runSeed = userRunSeed;
    }


    /* Reset per-run counters */
    totalTestFailedCount = 0;
    totalTestPassedCount = 0;
    totalTestSkippedCount = 0;

    /* Take time - run start */
    runStartSeconds = GetClock();

    /* Log run with fuzzer parameters */
    SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);

    /* Initialize filtering */
    if (filter != NULL && filter[0] != '\0') {
        /* Loop over all suites to check if we have a filter match */
        suiteCounter = 0;
        while (testSuites[suiteCounter] && suiteFilter == 0) {
            testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
            suiteCounter++;
            if (testSuite->name != NULL && SDL_strcmp(filter, testSuite->name) == 0) {
                /* Matched a suite name */
                suiteFilter = 1;
                suiteFilterName = testSuite->name;
                SDLTest_Log("Filtering: running only suite '%s'", suiteFilterName);
                break;
            }

            /* Within each suite, loop over all test cases to check if we have a filter match */
            testCounter = 0;
            while (testSuite->testCases[testCounter] && testFilter == 0)
            {
                testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
                testCounter++;
                if (testCase->name != NULL && SDL_strcmp(filter, testCase->name) == 0) {
                    /* Matched a test name */
                    suiteFilter = 1;
                    suiteFilterName = testSuite->name;
                    testFilter = 1;
                    testFilterName = testCase->name;
                    SDLTest_Log("Filtering: running only test '%s' in suite '%s'", testFilterName, suiteFilterName);
                    break;
                }
            }
        }

        if (suiteFilter == 0 && testFilter == 0) {
            SDLTest_LogError("Filter '%s' did not match any test suite/case.", filter);
            SDLTest_Log("Exit code: 2");
            return 2;
        }
    }

    /* Loop over all suites */
    suiteCounter = 0;
    while(testSuites[suiteCounter]) {
        testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
        currentSuiteName = (char *)((testSuite->name) ? testSuite->name : SDLTest_InvalidNameFormat);
        suiteCounter++;

        /* Filter suite if flag set and we have a name */
        if (suiteFilter == 1 && suiteFilterName != NULL && testSuite->name != NULL &&
            SDL_strcmp(suiteFilterName, testSuite->name) != 0) {
                /* Skip suite */
                SDLTest_Log("===== Test Suite %i: '%s' skipped\n",
                    suiteCounter,
                    currentSuiteName);
        } else {

            /* Reset per-suite counters */
            testFailedCount = 0;
            testPassedCount = 0;
            testSkippedCount = 0;

            /* Take time - suite start */
            suiteStartSeconds = GetClock();

            /* Log suite started */
            SDLTest_Log("===== Test Suite %i: '%s' started\n",
                suiteCounter,
                currentSuiteName);

            /* Loop over all test cases */
            testCounter = 0;
            while(testSuite->testCases[testCounter])
            {
                testCase=(SDLTest_TestCaseReference *)testSuite->testCases[testCounter];
                currentTestName = (char *)((testCase->name) ? testCase->name : SDLTest_InvalidNameFormat);
                testCounter++;

                /* Filter tests if flag set and we have a name */
                if (testFilter == 1 && testFilterName != NULL && testCase->name != NULL &&
                    SDL_strcmp(testFilterName, testCase->name) != 0) {
                        /* Skip test */
                        SDLTest_Log("===== Test Case %i.%i: '%s' skipped\n",
                            suiteCounter,
                            testCounter,
                            currentTestName);
                } else {
                    /* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */
                    if (testFilter == 1 && !testCase->enabled) {
                        SDLTest_Log("Force run of disabled test since test filter was set");
                        testCase->enabled = 1;
                    }

                    /* Take time - test start */
                    testStartSeconds = GetClock();

                    /* Log test started */
                    SDLTest_Log("----- Test Case %i.%i: '%s' started",
                        suiteCounter,
                        testCounter,
                        currentTestName);
                    if (testCase->description != NULL && testCase->description[0] != '\0') {
                        SDLTest_Log("Test Description: '%s'",
                            (testCase->description) ? testCase->description : SDLTest_InvalidNameFormat);
                    }

                    /* Loop over all iterations */
                    iterationCounter = 0;
                    while(iterationCounter < testIterations)
                    {
                        iterationCounter++;

                        if (userExecKey != 0) {
                            execKey = userExecKey;
                        } else {
                            execKey = SDLTest_GenerateExecKey((char *)runSeed, testSuite->name, testCase->name, iterationCounter);
                        }

                        SDLTest_Log("Test Iteration %i: execKey %llu", iterationCounter, execKey);
                        testResult = SDLTest_RunTest(testSuite, testCase, execKey);

                        if (testResult == TEST_RESULT_PASSED) {
                            testPassedCount++;
                            totalTestPassedCount++;
                        } else if (testResult == TEST_RESULT_SKIPPED) {
                            testSkippedCount++;
                            totalTestSkippedCount++;
                        } else {
                            testFailedCount++;
                            totalTestFailedCount++;
                        }
                    }

                    /* Take time - test end */
                    testEndSeconds = GetClock();
                    runtime = testEndSeconds - testStartSeconds;
                    if (runtime < 0.0f) runtime = 0.0f;

                    if (testIterations > 1) {
                        /* Log test runtime */
                        SDLTest_Log("Runtime of %i iterations: %.1f sec", testIterations, runtime);
                        SDLTest_Log("Average Test runtime: %.5f sec", runtime / (float)testIterations);
                    } else {
                        /* Log test runtime */
                        SDLTest_Log("Total Test runtime: %.1f sec", runtime);
                    }

                    /* Log final test result */
                    switch (testResult) {
                    case TEST_RESULT_PASSED:
                        SDLTest_Log((char *)SDLTest_FinalResultFormat, "Test", currentTestName, "Passed");
                        break;
                    case TEST_RESULT_FAILED:
                        SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Test", currentTestName, "Failed");
                        break;
                    case TEST_RESULT_NO_ASSERT:
                        SDLTest_LogError((char *)SDLTest_FinalResultFormat,"Test", currentTestName, "No Asserts");
                        break;
                    }

                }
            }

            /* Take time - suite end */
            suiteEndSeconds = GetClock();
            runtime = suiteEndSeconds - suiteStartSeconds;
            if (runtime < 0.0f) runtime = 0.0f;

            /* Log suite runtime */
            SDLTest_Log("Total Suite runtime: %.1f sec", runtime);

            /* Log summary and final Suite result */
            countSum = testPassedCount + testFailedCount + testSkippedCount;
            if (testFailedCount == 0)
            {
                SDLTest_Log(logFormat, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
                SDLTest_Log((char *)SDLTest_FinalResultFormat, "Suite", currentSuiteName, "Passed");
            }
            else
            {
                SDLTest_LogError(logFormat, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
                SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Suite", currentSuiteName, "Failed");
            }

        }
    }

    /* Take time - run end */
    runEndSeconds = GetClock();
    runtime = runEndSeconds - runStartSeconds;
    if (runtime < 0.0f) runtime = 0.0f;

    /* Log total runtime */
    SDLTest_Log("Total Run runtime: %.1f sec", runtime);

    /* Log summary and final run result */
    countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount;
    if (totalTestFailedCount == 0)
    {
        runResult = 0;
        SDLTest_Log(logFormat, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
        SDLTest_Log((char *)SDLTest_FinalResultFormat, "Run /w seed", runSeed, "Passed");
    }
    else
    {
        runResult = 1;
        SDLTest_LogError(logFormat, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
        SDLTest_LogError((char *)SDLTest_FinalResultFormat, "Run /w seed", runSeed, "Failed");
    }

    SDLTest_Log("Exit code: %d", runResult);
    return runResult;
}
Exemple #2
0
/**
* \brief Execute a test suite using the given run seed and execution key.
*
* The filter string is matched to the suite name (full comparison) to select a single suite,
* or if no suite matches, it is matched to the test names (full comparison) to select a single test.
*
* \param testSuites Suites containing the test case.
* \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one.
* \param userExecKey Custom execution key provided by user, or 0 to autogenerate one.
* \param filter Filter specification. NULL disables. Case sensitive.
* \param testIterations Number of iterations to run each test case.
*
* \returns Test run result; 0 when all tests passed, 1 if any tests failed.
*/
int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *userRunSeed, Uint64 userExecKey, const char *filter, int testIterations)
{
    int totalNumberOfTests = 0;
    int failedNumberOfTests = 0;
    int suiteCounter;
    int testCounter;
    int iterationCounter;
    SDLTest_TestSuiteReference *testSuite;
    const SDLTest_TestCaseReference *testCase;
    const char *runSeed = NULL;
    char *currentSuiteName;
    char *currentTestName;
    Uint64 execKey;
    float runStartSeconds;
    float suiteStartSeconds;
    float testStartSeconds;
    float runEndSeconds;
    float suiteEndSeconds;
    float testEndSeconds;
    float runtime;
    int suiteFilter = 0;
    char *suiteFilterName = NULL;
    int testFilter = 0;
    char *testFilterName = NULL;
	SDL_bool forceTestRun = SDL_FALSE;
    int testResult = 0;
    int runResult = 0;
    Uint32 totalTestFailedCount = 0;
    Uint32 totalTestPassedCount = 0;
    Uint32 totalTestSkippedCount = 0;
    Uint32 testFailedCount = 0;
    Uint32 testPassedCount = 0;
    Uint32 testSkippedCount = 0;
    Uint32 countSum = 0;
    const SDLTest_TestCaseReference **failedTests;

    /* Sanitize test iterations */
    if (testIterations < 1) {
        testIterations = 1;
    }

    /* Generate run see if we don't have one already */
    if (userRunSeed == NULL || userRunSeed[0] == '\0') {
        runSeed = SDLTest_GenerateRunSeed(16);
        if (runSeed == NULL) {
            SDLTest_LogError("Generating a random seed failed");
            return 2;
        }
    } else {
        runSeed = userRunSeed;
    }


    /* Reset per-run counters */
    totalTestFailedCount = 0;
    totalTestPassedCount = 0;
    totalTestSkippedCount = 0;

    /* Take time - run start */
    runStartSeconds = GetClock();

    /* Log run with fuzzer parameters */
    SDLTest_Log("::::: Test Run /w seed '%s' started\n", runSeed);

	/* Count the total number of tests */
    suiteCounter = 0;
    while (testSuites[suiteCounter]) {
        testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
        suiteCounter++;
        testCounter = 0;
        while (testSuite->testCases[testCounter])
        {
            testCounter++;
			totalNumberOfTests++;
		}
	}

	/* Pre-allocate an array for tracking failed tests (potentially all test cases) */
	failedTests = (const SDLTest_TestCaseReference **)SDL_malloc(totalNumberOfTests * sizeof(SDLTest_TestCaseReference *));
	if (failedTests == NULL) {	
	   SDLTest_LogError("Unable to allocate cache for failed tests");
           SDL_Error(SDL_ENOMEM);	   
           return -1;
	}

    /* Initialize filtering */
    if (filter != NULL && filter[0] != '\0') {
        /* Loop over all suites to check if we have a filter match */
        suiteCounter = 0;
        while (testSuites[suiteCounter] && suiteFilter == 0) {
            testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
            suiteCounter++;
            if (testSuite->name != NULL && SDL_strcmp(filter, testSuite->name) == 0) {
                /* Matched a suite name */
                suiteFilter = 1;
                suiteFilterName = testSuite->name;
                SDLTest_Log("Filtering: running only suite '%s'", suiteFilterName);
                break;
            }

            /* Within each suite, loop over all test cases to check if we have a filter match */
            testCounter = 0;
            while (testSuite->testCases[testCounter] && testFilter == 0)
            {
                testCase = testSuite->testCases[testCounter];
                testCounter++;
                if (testCase->name != NULL && SDL_strcmp(filter, testCase->name) == 0) {
                    /* Matched a test name */
                    suiteFilter = 1;
                    suiteFilterName = testSuite->name;
                    testFilter = 1;
                    testFilterName = testCase->name;
                    SDLTest_Log("Filtering: running only test '%s' in suite '%s'", testFilterName, suiteFilterName);
                    break;
                }
            }
        }

        if (suiteFilter == 0 && testFilter == 0) {
            SDLTest_LogError("Filter '%s' did not match any test suite/case.", filter);
            SDLTest_Log("Exit code: 2");
            SDL_free((void *) failedTests);
            return 2;
        }
    }

    /* Loop over all suites */
    suiteCounter = 0;
    while(testSuites[suiteCounter]) {
        testSuite=(SDLTest_TestSuiteReference *)testSuites[suiteCounter];
        currentSuiteName = (char *)((testSuite->name) ? testSuite->name : SDLTEST_INVALID_NAME_FORMAT);
        suiteCounter++;

        /* Filter suite if flag set and we have a name */
        if (suiteFilter == 1 && suiteFilterName != NULL && testSuite->name != NULL &&
            SDL_strcmp(suiteFilterName, testSuite->name) != 0) {
                /* Skip suite */
                SDLTest_Log("===== Test Suite %i: '%s' skipped\n",
                    suiteCounter,
                    currentSuiteName);
        } else {

            /* Reset per-suite counters */
            testFailedCount = 0;
            testPassedCount = 0;
            testSkippedCount = 0;

            /* Take time - suite start */
            suiteStartSeconds = GetClock();

            /* Log suite started */
            SDLTest_Log("===== Test Suite %i: '%s' started\n",
                suiteCounter,
                currentSuiteName);

            /* Loop over all test cases */
            testCounter = 0;
            while(testSuite->testCases[testCounter])
            {
                testCase = testSuite->testCases[testCounter];
                currentTestName = (char *)((testCase->name) ? testCase->name : SDLTEST_INVALID_NAME_FORMAT);
                testCounter++;

                /* Filter tests if flag set and we have a name */
                if (testFilter == 1 && testFilterName != NULL && testCase->name != NULL &&
                    SDL_strcmp(testFilterName, testCase->name) != 0) {
                        /* Skip test */
                        SDLTest_Log("===== Test Case %i.%i: '%s' skipped\n",
                            suiteCounter,
                            testCounter,
                            currentTestName);
                } else {
                    /* Override 'disabled' flag if we specified a test filter (i.e. force run for debugging) */
                    if (testFilter == 1 && !testCase->enabled) {
                        SDLTest_Log("Force run of disabled test since test filter was set");
						forceTestRun = SDL_TRUE;
                    }

                    /* Take time - test start */
                    testStartSeconds = GetClock();

                    /* Log test started */
                    SDLTest_Log("----- Test Case %i.%i: '%s' started",
                        suiteCounter,
                        testCounter,
                        currentTestName);
                    if (testCase->description != NULL && testCase->description[0] != '\0') {
                        SDLTest_Log("Test Description: '%s'",
                            (testCase->description) ? testCase->description : SDLTEST_INVALID_NAME_FORMAT);
                    }

                    /* Loop over all iterations */
                    iterationCounter = 0;
                    while(iterationCounter < testIterations)
                    {
                        iterationCounter++;

                        if (userExecKey != 0) {
                            execKey = userExecKey;
                        } else {
                            execKey = SDLTest_GenerateExecKey(runSeed, testSuite->name, testCase->name, iterationCounter);
                        }

                        SDLTest_Log("Test Iteration %i: execKey %" SDL_PRIu64, iterationCounter, execKey);
						testResult = SDLTest_RunTest(testSuite, testCase, execKey, forceTestRun);

                        if (testResult == TEST_RESULT_PASSED) {
                            testPassedCount++;
                            totalTestPassedCount++;
                        } else if (testResult == TEST_RESULT_SKIPPED) {
                            testSkippedCount++;
                            totalTestSkippedCount++;
                        } else {
                            testFailedCount++;
                            totalTestFailedCount++;
                        }
                    }

                    /* Take time - test end */
                    testEndSeconds = GetClock();
                    runtime = testEndSeconds - testStartSeconds;
                    if (runtime < 0.0f) runtime = 0.0f;

                    if (testIterations > 1) {
                        /* Log test runtime */
                        SDLTest_Log("Runtime of %i iterations: %.1f sec", testIterations, runtime);
                        SDLTest_Log("Average Test runtime: %.5f sec", runtime / (float)testIterations);
                    } else {
                        /* Log test runtime */
                        SDLTest_Log("Total Test runtime: %.1f sec", runtime);
                    }

                    /* Log final test result */
                    switch (testResult) {
                    case TEST_RESULT_PASSED:
                        SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "Passed");
                        break;
                    case TEST_RESULT_FAILED:
                        SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Test", currentTestName, "Failed");
                        break;
                    case TEST_RESULT_NO_ASSERT:
                        SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT,"Test", currentTestName, "No Asserts");
                        break;
                    }

                    /* Collect failed test case references for repro-step display */
                    if (testResult == TEST_RESULT_FAILED) {
                        failedTests[failedNumberOfTests] = testCase;
                        failedNumberOfTests++;
                    }
                }
            }

            /* Take time - suite end */
            suiteEndSeconds = GetClock();
            runtime = suiteEndSeconds - suiteStartSeconds;
            if (runtime < 0.0f) runtime = 0.0f;

            /* Log suite runtime */
            SDLTest_Log("Total Suite runtime: %.1f sec", runtime);

            /* Log summary and final Suite result */
            countSum = testPassedCount + testFailedCount + testSkippedCount;
            if (testFailedCount == 0)
            {
                SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
                SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, "Passed");
            }
            else
            {
                SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Suite", countSum, testPassedCount, testFailedCount, testSkippedCount);
                SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Suite", currentSuiteName, "Failed");
            }

        }
    }

    /* Take time - run end */
    runEndSeconds = GetClock();
    runtime = runEndSeconds - runStartSeconds;
    if (runtime < 0.0f) runtime = 0.0f;

    /* Log total runtime */
    SDLTest_Log("Total Run runtime: %.1f sec", runtime);

    /* Log summary and final run result */
    countSum = totalTestPassedCount + totalTestFailedCount + totalTestSkippedCount;
    if (totalTestFailedCount == 0)
    {
        runResult = 0;
        SDLTest_Log(SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
        SDLTest_Log(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, "Passed");
    }
    else
    {
        runResult = 1;
        SDLTest_LogError(SDLTEST_LOG_SUMMARY_FORMAT, "Run", countSum, totalTestPassedCount, totalTestFailedCount, totalTestSkippedCount);
        SDLTest_LogError(SDLTEST_FINAL_RESULT_FORMAT, "Run /w seed", runSeed, "Failed");
    }

    /* Print repro steps for failed tests */
    if (failedNumberOfTests > 0) {
        SDLTest_Log("Harness input to repro failures:");
        for (testCounter = 0; testCounter < failedNumberOfTests; testCounter++) {
          SDLTest_Log(" --seed %s --filter %s", runSeed, failedTests[testCounter]->name);
        }
    }
    SDL_free((void *) failedTests);

    SDLTest_Log("Exit code: %d", runResult);
    return runResult;
}