TEST(mnemonics, all_languages) { srand(time(NULL)); std::vector<Language::Base*> languages({ Language::Singleton<Language::Chinese_Simplified>::instance(), Language::Singleton<Language::English>::instance(), Language::Singleton<Language::Spanish>::instance(), Language::Singleton<Language::Portuguese>::instance(), Language::Singleton<Language::Japanese>::instance(), Language::Singleton<Language::German>::instance(), Language::Singleton<Language::Italian>::instance(), Language::Singleton<Language::Russian>::instance(), Language::Singleton<Language::French>::instance(), Language::Singleton<Language::Dutch>::instance() }); for (std::vector<Language::Base*>::iterator it = languages.begin(); it != languages.end(); it++) { try { test_language(*(*it)); } catch (const std::exception &e) { std::cout << "Error testing " << (*it)->get_language_name() << " language: " << e.what() << std::endl; ASSERT_TRUE(false); } } }
int main(int argc, char** argv) { quotek::nlp::alchemy ac(argv[1],"http://access.alchemyapi.com"); test_sentiment(ac); test_language(ac); test_relations(ac); test_raw(ac); test_taxonomy(ac); exit(0); }
TEST(mnemonics, all_languages) { srand(time(NULL)); std::vector<Language::Base*> languages({ Language::Singleton<Language::English>::instance(), Language::Singleton<Language::Spanish>::instance(), Language::Singleton<Language::Portuguese>::instance(), Language::Singleton<Language::Japanese>::instance(), }); for (std::vector<Language::Base*>::iterator it = languages.begin(); it != languages.end(); it++) { test_language(*(*it)); } }
static bool go(bool result_only, size_t object_size, size_t binary_size, const char* name) { // setup if (!result_only) { printf("%s: ================\n", name); printf("%s: setting up size %i\n", name, (int)object_size); } if (!setup_test(object_size)) { fprintf(stderr, "%s: failed to get setup result.\n", name); return false; } // if this isn't a benchmark (the file creators), nothing left to do if (!is_benchmark()) { teardown_test(); if (!result_only) printf("%s: done\n", name); return true; } // figure out a reasonable number of iterations between checking the time int iterations; #ifdef __arm__ iterations = 1; #else iterations = 32; #endif for (size_t i = 5; i > object_size; --i) iterations <<= 3; uint32_t hash_result; // warm up if (!result_only) printf("%s: warming for %.0f seconds \n", name, WARM_TIME); double start_time = dtime(); while (true) { for (int i = 0; i < iterations; ++i) { hash_result = 0; if (!run_wrapper(&hash_result)) { fprintf(stderr, "%s: failed to get benchmark result.\n", name); return false; } } if (dtime() - start_time > WARM_TIME) break; } // run tests if (!result_only) printf("%s: running for %.0f seconds\n", name, WORK_TIME); int total_iterations = 0; start_time = dtime(); double end_time; while (true) { for (int i = 0; i < iterations; ++i) { hash_result = HASH_INITIAL_VALUE; if (!run_wrapper(&hash_result)) { fprintf(stderr, "%s: failed to get benchmark result.\n", name); return false; } ++total_iterations; } end_time = dtime(); if (end_time - start_time > WORK_TIME) break; } // print results double per_time = (end_time - start_time) / (double)total_iterations * (1000.0 * 1000.0); if (result_only) { printf("%f\n", per_time); } else { printf("%s: %i iterations took %f seconds\n", name, total_iterations, end_time - start_time); printf("%s: %f microseconds per iteration\n", name, per_time); printf("%s: hash result of last run: %08x\n", name, hash_result); } // write score if (!result_only) { FILE* file = fopen("results.csv", "a"); fprintf(file, "\"%s\",\"%s\",\"%s\",\"%s\",\"%s\",%i,%f,%i,%i,\"%08x\"\n", name, test_language(), test_version(), test_filename(), test_format(), (int)object_size, per_time, (int)binary_size, #if BENCHMARK_SIZE_OPTIMIZED 1, #else 0, #endif hash_result); fclose(file); } teardown_test(); return true; }