Esempio n. 1
0
int UnitTest::RunAllInGroup( const char* group )
{
  printf( "GROUP %s\n", group );
  int error_count = 0;
  const char dashes[] = "------------------------------";
  
  // Because of (naughty) reliance of static constructor to register tests, the order in the list is out of my
  // control. A little extra code to make them run in alphabetical order.
  
  UnitTest* test = FindNext( group, NULL );
  while( test )
  {
    size_t offset = strlen( test->m_Name );
    if( offset >= sizeof( dashes ) )
    {
      offset = sizeof( dashes ) - 1;
    }
    printf( "  %s %s ", test->m_Name, dashes + offset );
    test->m_ErrorCount = 0;
    test->Test();
    if( test->m_ErrorCount == 0 )
    {
      printf( "pass\n" );
    }
    error_count += test->m_ErrorCount;
    test->m_Done = true;
    test = FindNext( group, test );
  }
  return error_count;
}
Esempio n. 2
0
int main (int argc, char** argv)
{
  UnitTest t (16);

  // Ensure environment has no influence.
  unsetenv ("TASKDATA");
  unsetenv ("TASKRC");

  t.is (mk_wcwidth ('a'),    1, "U+0061 --> 1");

  t.is (mk_wcwidth (0x5149), 2, "U+5149 --> 2");
  t.is (mk_wcwidth (0x9a8c), 2, "U+9a8c --> 2");
  t.is (mk_wcwidth (0x4e70), 2, "U+4e70 --> 2");
  t.is (mk_wcwidth (0x94b1), 2, "U+94b1 --> 2");
  t.is (mk_wcwidth (0x5305), 2, "U+5305 --> 2");
  t.is (mk_wcwidth (0x91cd), 2, "U+91cd --> 2");
  t.is (mk_wcwidth (0x65b0), 2, "U+65b0 --> 2");
  t.is (mk_wcwidth (0x8bbe), 2, "U+8bbe --> 2");
  t.is (mk_wcwidth (0x8ba1), 2, "U+8ba1 --> 2");
  t.is (mk_wcwidth (0x5411), 2, "U+5411 --> 2");
  t.is (mk_wcwidth (0x4e0a), 2, "U+4e0a --> 2");
  t.is (mk_wcwidth (0x4e0b), 2, "U+4e0b --> 2");
  t.is (mk_wcwidth (0x7bad), 2, "U+7bad --> 2");
  t.is (mk_wcwidth (0x5934), 2, "U+5934 --> 2");
  t.is (mk_wcwidth (0xff0c), 2, "U+ff0c --> 2"); // comma

  return 0;
}
Esempio n. 3
0
void test_tialloc_size10(UnitTest &utf) {
    vector<void *> all_allocations;
    vector<int   > all_allocations_size;

    for(size_t n=0; n<10000; n++) {
        int allocation_size = 10;

        unsigned char *allocation = (unsigned char *) tialloc::instance()->alloc(allocation_size);

        // fill allocated memory
        for(size_t i=0; i<allocation_size; i++) {
            allocation[i] = (i*n)%256;
        }

        all_allocations.push_back(allocation);
        all_allocations_size.push_back(allocation_size);
    }

    // check allocations
    for(size_t n=0; n<all_allocations.size(); n++) {
        for(size_t i=0; i<all_allocations_size[n]; i++) {
            unsigned char v = ((unsigned char *) all_allocations[n])[i];
            if(all_allocations_size[n] <= 121) {
                utf.test_equality(true,tialloc::instance()->is_tiallocated(all_allocations[n]));
            }
            utf.test_equality((int)v,(int) (i*n)%256);
        }
        tialloc::instance()->free(all_allocations[n]);
    }
}
XP_EXPORT int XP_RunUnitTests(PRFileDesc *fd)
{
    int passed = 0;
    int failed = 0;

    PR_fprintf(fd, "Running unit tests\n");

    UnitTest *test = tests;
    while (test) {
        PR_fprintf(fd, "Running %s from %s:%d\n", test->name, test->filename, test->line);
        XPStatus rv = test->fn(fd);
        if (rv == XP_SUCCESS) {
            PR_fprintf(fd, "PASS: %s passed\n", test->name);
            passed++;
        } else {
            PR_fprintf(fd, "FAILURE: %s failed\n", test->name);
            failed++;
        }
        test = test->next;
    }

    PR_fprintf(fd, "%d test(s) passed\n", passed);
    PR_fprintf(fd, "%d test(s) failed\n", failed);

    int total = passed + failed;
    PR_fprintf(fd, "%.0f%% pass rate\n", total ? passed * 100.0 / total : 100.0);

    if (failed) {
        PR_fprintf(fd, "FAILURE: Some tests failed\n");
    } else {
        PR_fprintf(fd, "SUCCESS: All tests passed\n");
    }

    return failed;
}
Esempio n. 5
0
void TestSuite::RunUnitTests(ITestCallback* callback)
{
	callback->BeginTestSuite(this);

	UnitTests::iterator it = mUnitTests.begin();
	UnitTests::iterator end = mUnitTests.end();
	for(;it != end; ++it) {
		UnitTest* test = *it;
		callback->BeginUnitTest(this, test);
		try {
			test->Run(this);
		} catch(TestException e) {
			callback->TestFailure(this, test, e.File.c_str(), e.Message.c_str(), e.Line);
		}/* catch(glmock::IValidationException e) {
			for(unsigned int i = 0; i < e.Count; ++i) {
				char tmp[512];
				sprintf(tmp, "'%s' when calling '%s'", e.Errors[i].Error, e.Errors[i].Command);
				callback->TestFailure(this, test, test->File.c_str(), tmp, test->Line);
			}
		}*/
		callback->EndUnitTest(this, test);
	}

	callback->EndTestSuite(this);
}
Esempio n. 6
0
// Called before each iteration of tests starts.
void TersePrinter::OnTestIterationStart(const UnitTest& unit_test, int /*iteration*/)
{
	ColoredPrintf(COLOR_GREEN, "%s", "Using TersePrinter.\n");
	ColoredPrintf(COLOR_GREEN, "%s", "[==========] ");
	printf("Running %s from %d test cases.\n",
	       FormatTestCount(unit_test.test_to_run_count()).c_str(),
	       unit_test.test_case_to_run_count());
	fflush(stdout);
}
Esempio n. 7
0
	std::vector<mxArray*> TestCommand::handle()
	{
		UnitTest ut;
		bool b = ut.test(t.val, u.val, f.val, deltaF.val, g.val, deltaG.val);

		std::vector<mxArray*> r(1);
		r[0] = mxCreateLogicalScalar(b);
		return r;
	}
Esempio n. 8
0
int test_tialloc(UnitTest &utf) {

    utf.begin_test_set("tialloc tests");

    test_tialloc_size10(utf);
    test_tialloc_size5to10(utf);
    test_tialloc_random(utf);

    utf.end_test_set();
}
Esempio n. 9
0
void AllTests()
{
	UnitTest tests;
	ArrayTests(tests);
	DictionaryTests(tests);
	ListTests(tests);
	StringTests(tests);
	WriteLine(tests.Execute());
	ReadLine();
}
Esempio n. 10
0
int _tmain(int argc, _TCHAR* argv[])
{
	UnitTest unittest;
	unittest.add(new BasicTestA());
	unittest.add(new BasicTestB());
	unittest.add(new BasicTestC());
	unittest.add(new BasicTestD());
	unittest.add(new BasicTestE());
	unittest.add(new BasicTestF());
	unittest.add(new BasicTestG());
	unittest.add(new BasicTestH());
	unittest.add(new BasicTestI());
	std::cout << unittest.execute() << std::endl;
	std::cout << unittest.percentageOKString() << std::endl;
	std::cout << "Test sucess: " << unittest.percentageOK() << "%" << std::endl;
	std::cout << "Press any key to continue...";
	std::cin.get();
	/*
	std::cout << ASSERT_IS_EQUAL(0,0) << std::endl;
	std::cout << ASSERT_IS_EQUAL(0,'h') << std::endl;
	std::cout << ASSERT_IS_EQUAL(0,1) << std::endl;
	std::cout << ASSERT_IS_EQUAL("hola","hola") << std::endl;
	std::cout << std::endl;
	std::cout << ASSERT_IS_NOT_EQUAL(0,0) << std::endl;
	std::cout << ASSERT_IS_NOT_EQUAL(0,'h') << std::endl;
	std::cout << ASSERT_IS_NOT_EQUAL(0,1) << std::endl;
	std::cout << "Press any key to continue...";
	std::cin.get();
	*/
	return 0;
}
Esempio n. 11
0
int main (int argc, char** argv)
{
  UnitTest t (8);

  std::vector <std::string> options;
  options.push_back ("abc");
  options.push_back ("abcd");
  options.push_back ("abcde");
  options.push_back ("bcdef");
  options.push_back ("cdefg");

  std::vector <std::string> matches;
  int result = autoComplete ("", options, matches);
  t.is (result, 0, "no match on empty string");

  result = autoComplete ("x", options, matches);
  t.is (result, 0, "no match on wrong string");

  result = autoComplete ("abcd", options, matches);
  t.is (result, 1, "exact match on 'abcd'");
  t.is (matches[0], "abcd", "exact match on 'abcd'");

  result = autoComplete ("ab", options, matches);
  t.is (result, 3, "partial match on 'ab'");
  t.is (matches[0], "abc", "partial match on 'abc'");
  t.is (matches[1], "abcd", "partial match on 'abcd'");
  t.is (matches[2], "abcde", "partial match on 'abcde'");

  return 0;
}
Esempio n. 12
0
int main (int argc, char** argv)
{
  UnitTest t (12);

  // std::string formatBytes (size_t);
  t.is (formatBytes (0), "0 B", "0 -> 0 B");

  t.is (formatBytes (994),  "994 B", "994 -> 994 B");
  t.is (formatBytes (995),  "1.0 KiB", "995 -> 1.0 KiB");
  t.is (formatBytes (999),  "1.0 KiB", "999 -> 1.0 KiB");
  t.is (formatBytes (1000), "1.0 KiB", "1000 -> 1.0 KiB");
  t.is (formatBytes (1001), "1.0 KiB", "1001 -> 1.0 KiB");

  t.is (formatBytes (999999),  "1.0 MiB", "999999 -> 1.0 MiB");
  t.is (formatBytes (1000000), "1.0 MiB", "1000000 -> 1.0 MiB");
  t.is (formatBytes (1000001), "1.0 MiB", "1000001 -> 1.0 MiB");

  t.is (formatBytes (999999999),  "1.0 GiB", "999999999 -> 1.0 GiB");
  t.is (formatBytes (1000000000), "1.0 GiB", "1000000000 -> 1.0 GiB");
  t.is (formatBytes (1000000001), "1.0 GiB", "1000000001 -> 1.0 GiB");

  // TODO const std::string uuid ();

  // TODO const std::string encode (const std::string& value);
  // TODO const std::string decode (const std::string& value);

  return 0;
}
Esempio n. 13
0
int	main(int ac, char *argv[])
{
  UnitTest	unit;

  try {
    unit.init(ac, argv);
    unit.launch();
  } catch (const std::exception &e) {
    std::cerr << e.what() << std::endl;
  }
}
Esempio n. 14
0
void test_tialloc_random(UnitTest &utf) {


    vector<void *> all_allocations;
    vector<int   > all_allocations_size;

    for(size_t n=0; n<1000; n++) {
        int allocation_size = (rand()%500) + 1;

        unsigned char *allocation = (unsigned char *) tialloc::instance()->alloc(allocation_size);

        // fill allocated memory
        for(size_t i=0; i<allocation_size; i++) {
            allocation[i] = i%256;
        }

        all_allocations.push_back(allocation);
        all_allocations_size.push_back(allocation_size);
    }

    // check allocations
    for(size_t n=0; n<all_allocations.size(); n++) {
        for(size_t i=0; i<all_allocations_size[n]; i++) {
            unsigned char v = ((unsigned char *) all_allocations[n])[i];
            if(all_allocations_size[n] <= 121) {
                utf.test_equality(true,tialloc::instance()->is_tiallocated(all_allocations[n]));
            }
            utf.test_equality((int)v,(int) i%256);
        }
    }

    // try reallocs
    for(size_t n=0; n<all_allocations.size(); n++) {
        all_allocations[n] = tialloc::instance()->realloc(all_allocations[n],all_allocations_size[n]+1);
    }

    // check allocations again!
    for(size_t n=0; n<all_allocations.size(); n++) {
        for(size_t i=0; i<all_allocations_size[n]; i++) {
            unsigned char v = ((unsigned char *) all_allocations[n])[i];
            utf.test_equality((int)v,(int) i%256);
        }
        tialloc::instance()->free(all_allocations[n]);
    }

    // alloc size tests
    for(size_t n=1; n<1000; n++) {
        uint32_t *i = (uint32_t *) tialloc::instance()->alloc(n);
        utf.test_equality(tialloc::instance()->alloc_size(i),n);
    }


}
Esempio n. 15
0
void UnitTest::DoAll(const char *test_name) {
    UnitTest *test = First;
    Time_struct Time;
    int n = 0;
    while(test) {
        if(!test_name ||
           ToLower(test->Test.Name.c_str()).find(ToLower(test_name)) != string::npos) {
            test->Run();
            n++;
        }
        test = test->Next;
    }
    cout << "Tests done " << n << " test time " << Time << "\n";
}
Esempio n. 16
0
void test_adaptivethreshold(UnitTest &ut) {

  ut.begin_test_set("AdaptiveThreshold");
/*
  SwiftImage<uint16> img("./Images/tiny5dot.tif");
  AdaptiveThreshold<uint16> at(1,0.7,AdaptiveThreshold<uint16>::mask_type_circle);
  SwiftImage<uint16> imgat = at.process(img);
  ut.test(imgat(0,0),static_cast<uint16>(0));
  ut.test(imgat(1,0),static_cast<uint16>(0));
  ut.test(imgat(2,0),static_cast<uint16>(0));
  ut.test(imgat(3,0),static_cast<uint16>(0));
  ut.test(imgat(4,0),static_cast<uint16>(0));

  ut.test(imgat(0,1),static_cast<uint16>(0));
  ut.test(imgat(1,1),static_cast<uint16>(65534));
  ut.test(imgat(2,1),static_cast<uint16>(65534));
  ut.test(imgat(3,1),static_cast<uint16>(65534));
  ut.test(imgat(4,1),static_cast<uint16>(0));

  ut.test(imgat(0,2),static_cast<uint16>(0));
  ut.test(imgat(1,2),static_cast<uint16>(65534));
  ut.test(imgat(2,2),static_cast<uint16>(0));
  ut.test(imgat(3,2),static_cast<uint16>(65534));
  ut.test(imgat(4,2),static_cast<uint16>(0));

  ut.test(imgat(0,3),static_cast<uint16>(0));
  ut.test(imgat(1,3),static_cast<uint16>(65534));
  ut.test(imgat(2,3),static_cast<uint16>(65534));
  ut.test(imgat(3,3),static_cast<uint16>(65534));
  ut.test(imgat(4,3),static_cast<uint16>(0));

  ut.test(imgat(0,4),static_cast<uint16>(0));
  ut.test(imgat(1,4),static_cast<uint16>(0));
  ut.test(imgat(2,4),static_cast<uint16>(0));
  ut.test(imgat(3,4),static_cast<uint16>(0));
  ut.test(imgat(4,4),static_cast<uint16>(0));

  SwiftImage<uint16> i2("./Images/run475_lane1tile1/C2.1/s_1_1_a.tif");
  MorphologicalOpening<uint16> mo(4);
  SwiftImage<uint16> i4 = mo.process(i2);
 
  i2 = i2 - i4;
  SwiftImage<uint16> i3 = at.process(i2);
  

  i3.save("./adaptive_morph.tif");
*/
  ut.end_test_set();
}
Esempio n. 17
0
void test_runlengthencode(UnitTest &ut) {

  ut.begin_test_set("RunLengthEncode");

  SwiftImage<uint16> img("./Images/tinyline.tif");
  RunLengthEncode<uint16> rle;

  vector<RLERun<> > runs = rle.process(img);
 
  ut.test(static_cast<int>(runs.size())   ,1);
  ut.test(runs[0].pos.x ,2);
  ut.test(runs[0].pos.y ,1);
  ut.test(runs[0].length,9);

  ut.end_test_set();
}
Esempio n. 18
0
// Called after each iteration of tests finishes.
void TersePrinter::OnTestIterationEnd(const UnitTest& unit_test, int /*iteration*/)
{
	g_test_to_run = unit_test.test_to_run_count();
	ColoredPrintf(COLOR_GREEN, "%s", "[==========] ");
	printf("%s from %d test cases ran.",
	       FormatTestCount(unit_test.test_to_run_count()).c_str(),
	       unit_test.test_case_to_run_count());
	float time_in_ms = static_cast<float>(unit_test.elapsed_time());
	printf(" (%1.2f seconds total)\n", time_in_ms / 1000);
	// Print total passed.
	ColoredPrintf(COLOR_GREEN, "%s", "[  PASSED  ] ");
	printf("%s.\n", FormatTestCount(unit_test.successful_test_count()).c_str());
	// Print total failed.
	int num_failures = unit_test.failed_test_count();
	if (num_failures)
	{
		ColoredPrintf(COLOR_RED, "%s", "[  FAILED  ] ");
		printf("%s, listed below:\n", FormatTestCount(num_failures).c_str());
		PrintFailedTestsList(unit_test);
		printf("\n%d FAILED %s\n", num_failures,
		       num_failures == 1 ? "TEST" : "TESTS");
	}
	// Print total disabled.
	int num_disabled = unit_test.disabled_test_count();
	if (num_disabled)
	{
		if (!num_failures)
			printf("\n");  // Add a spacer if no FAILURE banner is displayed.
		ColoredPrintf(COLOR_YELLOW, "  YOU HAVE %d DISABLED TESTS\n", num_disabled);
	}
	// Ensure that Google Test output is printed before, e.g., heapchecker output.
	fflush(stdout);
}
Esempio n. 19
0
void test_reporting(UnitTest &ut) {

  ut.begin_test_set("Reporting");

  Cluster<double> c1;
  
  c1.add_signal("RAW");
  c1.signal("RAW").push_back(ReadIntensity<double>(1000 ,0,0,0   ));
  c1.signal("RAW").push_back(ReadIntensity<double>(500  ,0,0,2000));
  c1.signal("RAW").push_back(ReadIntensity<double>(250  ,0,0,2000));
  c1.signal("RAW").push_back(ReadIntensity<double>(125  ,0,0,2000));
  c1.signal("RAW").push_back(ReadIntensity<double>(62.5 ,0,0,2000));
  c1.signal("RAW").push_back(ReadIntensity<double>(31.25,0,0,2000));
  
  c1.add_signal("FINAL_CORRECTED");
  c1.signal("FINAL_CORRECTED").push_back(ReadIntensity<double>(1000 ,0,0,0   ));
  c1.signal("FINAL_CORRECTED").push_back(ReadIntensity<double>(500  ,0,0,2000));
  c1.signal("FINAL_CORRECTED").push_back(ReadIntensity<double>(250  ,0,0,2000));
  c1.signal("FINAL_CORRECTED").push_back(ReadIntensity<double>(125  ,0,0,2000));
  c1.signal("FINAL_CORRECTED").push_back(ReadIntensity<double>(62.5 ,0,0,2000));
  c1.signal("FINAL_CORRECTED").push_back(ReadIntensity<double>(31.25,0,0,2000));

  c1.add_sequence("BASECALL_FINAL");
  //TODO: I'm really unclear as to why I need this cast, for some reason gcc can't see base_type from here...
  c1.sequence("BASECALL_FINAL").sequence().push_back(static_cast<int>(ScoredSequence<>::base_a));
  c1.sequence("BASECALL_FINAL").sequence().push_back(static_cast<int>(ScoredSequence<>::base_t));
  c1.sequence("BASECALL_FINAL").sequence().push_back(static_cast<int>(ScoredSequence<>::base_t));
  c1.sequence("BASECALL_FINAL").sequence().push_back(static_cast<int>(ScoredSequence<>::base_t));
  c1.sequence("BASECALL_FINAL").sequence().push_back(static_cast<int>(ScoredSequence<>::base_t));
  c1.sequence("BASECALL_FINAL").sequence().push_back(static_cast<int>(ScoredSequence<>::base_t));
  
  vector<Cluster<double> > clusters;
  clusters.push_back(c1);
  clusters.push_back(c1);
  clusters.push_back(c1);
  clusters.push_back(c1);
  clusters.push_back(c1);
  clusters.push_back(c1);

  Reporting<double> m_reporting(clusters,true,"./phi_plus_SNPs.fa");

  m_reporting.write_report_file("report");

  ut.end_test_set();
}
Esempio n. 20
0
void TestSuite::runUnitTests(ITestCallback* callback) {
	callback->beginTestSuite(this);

	UnitTests::iterator it = mUnitTests.begin();
	UnitTests::iterator end = mUnitTests.end();
	for(;it != end; ++it) {
		UnitTest* test = *it;
		callback->beginUnitTest(this, test);
		try {
			test->Run(this);
		} catch(TestException e) {
			callback->testFailure(this, test, e.File.c_str(), e.Message.c_str(), e.Line);
		}
		callback->endUnitTest(this, test);
	}

	callback->endTestSuite(this);
}
Esempio n. 21
0
// Called before environment tear-down for each iteration of tests starts.
void TersePrinter::OnEnvironmentsTearDownStart(const UnitTest& unit_test)
{
	// need a linefeed if a test case end is not printed
	if ((useTerseOutput && unit_test.failed_test_count() != 0)
	        || !GTEST_FLAG(print_time))
		printf("\n");
	ColoredPrintf(COLOR_GREEN, "%s", "[----------] ");
	printf("Global test environment tear-down.\n");
	fflush(stdout);
}
Esempio n. 22
0
int main (int argc, char** argv)
{
#if defined(HAVE_LIBLUA)
  UnitTest t (7);
#else
  UnitTest t (6);
#endif

  try
  {
    // Prime the pump.
    context.a3.capture ("task");

    // TODO dom.get rc.name
    DOM dom;
    t.is (dom.get ("system.version"),     VERSION,     "DOM system.version -> VERSION");
#ifdef HAVE_LIBLUA
    t.is (dom.get ("system.lua.version"), LUA_RELEASE, "DOM system.lua.version -> LUA_RELEASE");
#endif
    t.ok (dom.get ("system.os") != "<unknown>",        "DOM system.os -> != Unknown");
    t.is (dom.get ("context.program"),    "task",      "DOM context.program -> 'task'");
    t.is (dom.get ("context.args"),       "task",      "DOM context.args -> 'task'");
    t.is (dom.get ("context.width"),      "0",         "DOM context.width -> '0'");
    t.is (dom.get ("context.height"),     "0",         "DOM context.height -> '0'");

    // TODO dom.set rc.name
  }

  catch (std::string& error)
  {
    t.diag (error);
    return -1;
  }

  catch (...)
  {
    t.diag ("Unknown error.");
    return -2;
  }

  return 0;
}
Esempio n. 23
0
// Internal helper for printing the list of failed tests at end of run.
void TersePrinter::PrintFailedTestsList(const UnitTest& unit_test) const
{
	const int failed_test_count = unit_test.failed_test_count();
	if (failed_test_count == 0)
		return;
	for (int i = 0; i < unit_test.total_test_case_count(); ++i)
	{
		const TestCase& test_case = *unit_test.GetTestCase(i);
		if (!test_case.should_run() || (test_case.failed_test_count() == 0))
			continue;
		for (int j = 0; j < test_case.total_test_count(); ++j)
		{
			const TestInfo& test_info = *test_case.GetTestInfo(j);
			if (!test_info.should_run() || test_info.result()->Passed())
				continue;
			ColoredPrintf(COLOR_RED, "%s", "[  FAILED  ] ");
			printf("%s.%s\n", test_case.name(), test_info.name());
		}
	}
}
Esempio n. 24
0
void TestRun::test(TestRegistry* reg)
{
   Con::printf("-- Testing: %s %s",reg->getName(), reg->isInteractive() ? "(interactive)" : "" );

   UnitMargin::Push(_Margin[0]);

   // Run the test.
   UnitTest* test = reg->newTest();
   test->run();

   UnitMargin::Pop();

   // Update stats.
   _failureCount += test->getFailureCount();
   _subCount += test->getTestCount();
   _warningCount += test->getWarningCount();
   _testCount++;

   // Don't forget to delete the test!
   delete test;
}
Esempio n. 25
0
		void testBasic()
		{
			Jamoma::Gain			my_gain;
			Jamoma::SampleBundle	in_samples(2, 8);
			
			my_gain.gain = 0.5;
			
			in_samples.fill(1.0);
			auto out_samples = my_gain(in_samples);
			
			my_gain.gain = 0.25;
			in_samples = out_samples;
			out_samples = my_gain(in_samples);
						
			auto bar = in_samples[0][0];
			mTest->TEST_ASSERT("in_sample casting operator", mTest->compare(bar, 0.5));
			
			in_samples[0][0] = 2.0;
			auto foo = in_samples[0][0];
			mTest->TEST_ASSERT("setting and getting", mTest->compare(foo, 2.0));
		}
Esempio n. 26
0
void test_phasingcorrection(UnitTest &ut) {

  ut.begin_test_set("PhasingCorrection");

  Cluster<double> c1;
  c1.add_signal("RAW");
  c1.signal("RAW").push_back(ReadIntensity<double>(1000 ,0,0,0   ));
  c1.signal("RAW").push_back(ReadIntensity<double>(500  ,0,0,2000));
  c1.signal("RAW").push_back(ReadIntensity<double>(250  ,0,0,2000));
  c1.signal("RAW").push_back(ReadIntensity<double>(125  ,0,0,2000));
  c1.signal("RAW").push_back(ReadIntensity<double>(62.5 ,0,0,2000));
  c1.signal("RAW").push_back(ReadIntensity<double>(31.25,0,0,2000));

  vector<Cluster<double> > clusters;
  clusters.push_back(c1);

  PhasingCorrection<double> m_phasing_correction(0,0.5,0.6,"RAW","PHASE_CORRECTED");
  m_phasing_correction.process(clusters);
  
  ut.test(clusters[0].signal("PHASE_CORRECTED")[0],ReadIntensity<double>(1968.75,0,0,0));
  ut.test(clusters[0].signal("PHASE_CORRECTED")[1],ReadIntensity<double>(0      ,0,0,2000));
  ut.test(clusters[0].signal("PHASE_CORRECTED")[2],ReadIntensity<double>(0      ,0,0,2000));
  ut.test(clusters[0].signal("PHASE_CORRECTED")[3],ReadIntensity<double>(0      ,0,0,2000));
  ut.test(clusters[0].signal("PHASE_CORRECTED")[4],ReadIntensity<double>(0      ,0,0,2000));
  ut.test(clusters[0].signal("PHASE_CORRECTED")[5],ReadIntensity<double>(0      ,0,0,2000));

  ut.end_test_set();
}
Esempio n. 27
0
void TestsDirectory(UnitTest& tester)
{
  tester.AddTest("Filesystem", "Creating a directory", []() -> string
  {
    if (Directory::MakeDir("testdir"))
      return ("");
    return ("Directory::MakeDir returned false");
  });
  tester.AddTest("Filesystem", "Removing a directory", []() -> string
  {
    if (Directory::RemoveDir("testdir"))
      return ("");
    return ("Directory::RemoveDir returned false");
  });
  tester.AddTest("Filesystem", "Copying a file", []() -> string
  {
    if (!(Directory::MakeDir("testdir")))
      return ("Can't perform test without a working MakeDir");
    if (Filesystem::FileCopy("data/perks.json", "testdir/perks.json"))
    {
      DataTree* tree = DataTree::Factory::JSON("testdir/perks.json");

      if (tree)
        return ("");
      return ("Failed to properly copy file");
    }
    return ("Filesystem::FileCopy returned false");
  });
  tester.AddTest("Filesystem", "Not removing a directory containing files", []() -> string
  {
    string to_ret;

    if (Directory::RemoveDir("testdir"))
      to_ret = "Directory::RemoveDir returned true";
    remove("testdir/perks.json");
    Directory::RemoveDir("testdir");
    return (to_ret);
  });
}
// Fired before each iteration of tests starts.
void SDKUnitTestListener::OnTestIterationStart(const UnitTest& unit_test, int iteration) {
	if (GTEST_FLAG(repeat) != 1)
		PushResult( UTIL_VarArgs("\nRepeating all tests (iteration %d) . . .\n\n", iteration + 1) );

	const char* const filter = GTEST_FLAG(filter).c_str();

	// Prints the filter if it's not *.  This reminds the user that some
	// tests may be skipped.
	if (!internal::String::CStringEquals(filter, kUniversalFilter)) {
		PushResult( UTIL_VarArgs( "Note: %s filter = %s\n", GTEST_NAME_, filter ) );
	}

	if (GTEST_FLAG(shuffle)) {
		PushResult( UTIL_VarArgs( "Note: Randomizing tests' orders with a seed of %d .\n",
			unit_test.random_seed()) );
	}

	PushResult( "[==========] " );
	PushResult( UTIL_VarArgs("Running %s from %s.\n",
		FormatTestCount(unit_test.test_to_run_count()).c_str(),
		FormatTestCaseCount(unit_test.test_case_to_run_count()).c_str()) );
}
// Internal helper for printing the list of failed tests.
void SDKUnitTestListener::PrintFailedTests(const UnitTest& unit_test) {
	const int failed_test_count = unit_test.failed_test_count();
	if (failed_test_count == 0) {
		return;
	}

	for (int i = 0; i < unit_test.total_test_case_count(); ++i) {
		const TestCase& test_case = *unit_test.GetTestCase(i);
		if (!test_case.should_run() || (test_case.failed_test_count() == 0)) {
			continue;
		}
		for (int j = 0; j < test_case.total_test_count(); ++j) {
			const TestInfo& test_info = *test_case.GetTestInfo(j);
			if (!test_info.should_run() || test_info.result()->Passed()) {
				continue;
			}
			Msg( "[  FAILED  ] ");
			Msg("%s.%s", test_case.name(), test_info.name());
			Msg("\n");
		}
	}
}
Esempio n. 30
0
//
// Program starts here
//
int main()
{
	//
	// Note: Order is important here. you need to create the WindowCanvas before
	// you setup the skin - because the renderer won't be properly set up
	// if you try to do it before (The window canvas initializes the renderer)
	//
	// Create the skin and renderer
	//
	//gwen::Renderer::GDIPlusBuffered		renderer;
	//gwen::Renderer::Direct2D				renderer;
	//gwen::Renderer::DirectX9				renderer;
	gwen::Renderer::OpenGL_DebugFont		renderer;
	gwen::Skin::TexturedBase			skin( &renderer );
	//
	// The window canvas is a cross between a window and a canvas
	// It's cool because it takes care of creating an OS specific
	// window - so we don't have to bother with all that crap.
	//
	//
	gwen::Controls::WindowCanvas window_canvas( -1, -1, 700, 500, &skin, "gwen's Cross Platform Example" );
	//
	// Now it's safe to set up the skin
	//
	skin.Init( "DefaultSkin.png" );
	//
	// Create our unittest control
	//
	UnitTest* pUnit = new UnitTest( &window_canvas );
	pUnit->SetPos( 10, 10 );

	while ( !window_canvas.WantsQuit() )
	{
		window_canvas.DoThink();
	}

	// Everything should be automatically released
	// pUnit is a child of Canvas - which releases all of its children
}