Esempio n. 1
0
int main()
{
    verbose = (getenv ("PEGASUS_TEST_VERBOSE")) ? true : false;

    // Generate the source instance to test
    CIMInstance inst = buildCIMInstanceSourceObject(verbose);
    // generate the corresponding FQLInstancePropertySource
    FQLInstancePropertySource src(inst);

    // The following is a compilable bypass to allow testing of a single
    // testQuery.  To run it, set the following line to true and
    // put the query before the cout statement
    Boolean executeSingleTest = false;
    if (executeSingleTest)
    {

//      testQuery(src,"ANY DoubleArrayProp1 = 1011.04", true);
        // Generates an evaluation error, not a parse error
//  testQuery(src,"strScal1 NOT LIKE \'.*\'", false);
//      testQuery(src, "IntArrayProp1[20] = 7", false);
//      testQuery(src,"IntScal1 = reallybad", false, true);
        cout << "+++++ passed short tests" << endl;
        return 0;
    }
    else
    {
        executeTests(src);
    }

    cout << "+++++ passed all tests" << endl;
    return 0;
}
bool atWrapper::runAutoTests()
{
    //SVG needs this widget...
    QWidget dummy;

    bool haveBaseline = false;

    if (!initTests(&haveBaseline))
        return false;
    executeTests();

    if ( !haveBaseline )
    {
        qDebug( " First run! Creating baseline..." );
        createBaseline();
    }
    else
    {
        qDebug( " Comparing results..." );
        compare();
    }
    return true;
}
Esempio n. 3
0
int Test::main( int argc, char *argv[] ) noexcept {

#ifdef AIDKIT_GCC
	set_terminate( __gnu_cxx::__verbose_terminate_handler );
#endif
	QApplication application( argc, argv );
	QStringList arguments = application.arguments();

	// if there aren't any parameters then we want the 'silent' mode:
	// http://doc.qt.io/qt-5/qtest-overview.html#qt-test-command-line-arguments

	if ( arguments.length() == 1 ) {
		arguments.append( "-silent" );
	}

	// Execute the tests and print which have failed:

	QVector< Test * > failedTests = executeTests( arguments );
	for ( Test *test : failedTests ) {
		cerr << "Test failed: '" << test->name() << "'!" << endl;
	}
	return failedTests.size();
}
Esempio n. 4
0
/**
 * Main method (Duh ;) ) Runs the whole simulation. Main loop runs in this method
 *
 * @param argc number of command line arguments
 * @param argsv[] command line parameters. Several Config file parameters can be overriden by setting them here. "--visualize" is especially cool, as it shows an OpenGL live view
 *
 * @return 0 if successful
 */
int main(int argc, char* argsv[]) {
	if (argc > 1 && (!strcmp(argsv[1], "-?") || !strcmp(argsv[1], "help") || !strcmp(argsv[1], "--help"))) {
		std::cout << "This is the NUKULAR Simulator" << std::endl;
		std::cout << "Authors: " << std::endl;
		std::cout << "\tLeonhard Rannabauer" << std::endl;
		std::cout << "\tJakob Weiss" << std::endl;
		std::cout << "\tAlexander Winkler" << std::endl;
		std::cout << std::endl;
		std::cout << "By default, configuration will be loaded from a configuration file." << std::endl;
		std::cout << "The CWD will be searched for \"config.cfg\". Override this by specifying the" << std::endl;
		std::cout << "\t-configFile command line parameter with the path to your own configuration" << std::endl;
		std::cout << "\t(either .xml or .cfg format)" << std::endl;
		std::cout << std::endl;
		std::cout << "EXAMPLE: ./MolSim -configFile simulationConfig.xml -outputFilePrefix MD_sim -endTime 10" << std::endl;
		std::cout << std::endl;
		std::cout << "\t will load Settings from simulationConfig.xml, simulate the world for 10 seconds" << std::endl;
		std::cout << "\t and output the files with a prefix of \"MD_sim\", which will lead to files like" << std::endl;
		std::cout << "\t\"MD_sim_0010.vtu\"" << std::endl;
		return 0;
	}

	std::cout << "Initializing the logger..." << std::endl << std::flush;
	//Initialize the logging stuff
	initializeLogger();


	Settings::initSettings(argc, argsv);

	LOG4CXX_TRACE(rootLogger, "Settings initialized!");

#ifdef _OPENMP
	if(Settings::numThreads > 0) {
		LOG4CXX_INFO(rootLogger, "Setting OpenMP Threads to " << Settings::numThreads);
		omp_set_num_threads(Settings::numThreads);
	}
	else {
		LOG4CXX_INFO(rootLogger, "Running on " << omp_get_max_threads() << " threads");
		Settings::numThreads = omp_get_max_threads();
	}
#else
	Settings::numThreads = 1;
	LOG4CXX_INFO(rootLogger, "Running serial version!");
#endif

#ifdef PAPI_BENCH
	for(int i=0; i < Settings::numThreads; i++) {
		char fileName[200];
		sprintf(fileName, "CalcF #%i.txt", i);
		papiCalcFCounters[i] = new PapiEnv(fileName);
		sprintf(fileName, "CalcX #%i.txt", i);
		papiCalcXCounters[i] = new PapiEnv(fileName);
	}	
#endif
	//Check if we should be executing some unit tests
	if(!Settings::testCase.empty()) {
		return executeTests();
	}

	LOG4CXX_TRACE(rootLogger, "Creating Simulator instance...");
	Simulator *sim = new Simulator();
#ifndef NOGLVISUALIZER
	outputWriter::theSimulator = sim;
#endif
	//Check if we should initialize with old state file
	if(Settings::inputFile.size() !=0){
		std::cout << "state found"<<std::endl;
	}



	double current_time = Settings::startTime;

	int iteration = 0;

	int benchmarkStartTime = getMilliCount();
	double timeForOneIteration = 0;

	 // for this loop, we assume: current x, current f and current v are known
	int maxIterations = (Settings::endTime - Settings::startTime) / Settings::deltaT;
	int nextProgressBarDraw = 1;
	int iterationsPerPercent = (maxIterations/100) + 1;

	LOG4CXX_INFO(rootLogger, "Will calculate " <<  maxIterations << " iterations and output " << maxIterations/Settings::outputFrequency << " frames ");


	while (current_time < Settings::endTime) {
		if (iteration % Settings::outputFrequency == 0) {
			sim->plotParticles(iteration + Settings::outputFileIterationOffset);
		}
		sim->nextTimeStep();
		iteration++;
		if(iteration == nextProgressBarDraw) {
			nextProgressBarDraw+=iterationsPerPercent;
			printProgressBar(100*iteration/maxIterations, -(benchmarkStartTime - getMilliCount()));
		}
		LOG4CXX_TRACE(rootLogger, "Iteration " << iteration << " finished.");
		current_time += Settings::deltaT;
		timeForOneIteration = ((double)(benchmarkStartTime - getMilliCount()))/iteration;

#ifndef NOGLVISUALIZER
		while(outputWriter::renderingPaused)		usleep(2000);
#endif

#ifdef PAPI_BENCH
		for(int i=0; i < Settings::numThreads; i++) {
			papiCalcFCounters[i]->printResults();
			papiCalcXCounters[i]->printResults();
			papiCalcFCounters[i]->reset();
			papiCalcXCounters[i]->reset();
		}
#endif
	}


	int benchmarkEndTime = getMilliCount();

	if(Settings::saveLastState)
		sim->exportPhaseSpace();
	if(Settings::printStatistics)
		sim->exportStatistics();

	LOG4CXX_INFO(rootLogger, "Simulation finished. Took " << (benchmarkEndTime - benchmarkStartTime)/1000.0 << " seconds");

	delete sim;


	LOG4CXX_DEBUG(rootLogger, "Created " << Particle::createdInstances << " Particle instances (" << Particle::createdByCopy << " by copy)");
	LOG4CXX_DEBUG(rootLogger, "Destroyed " << Particle::destroyedInstances << " Particle instances");

#ifdef PAPI_BENCH
	
	for(int i=0; i < Settings::numThreads; i++) {
		std::cout << "Writing PAPI output for thread " << i << std::endl;
		papiCalcFCounters[i]->createResultFile();
		papiCalcXCounters[i]->createResultFile();
		delete 	papiCalcFCounters[i];
		delete papiCalcXCounters[i];
	}	
#endif
	//10 is arbitrarily chosen. there will always be some stray particles because of
	//static instances that will be destroyed at program exit
#ifndef NOGLVISUALIZER
	if(Particle::createdInstances - Particle::destroyedInstances - outputWriter::render3dParticles.size() > 10) {
#else
	if(Particle::createdInstances - Particle::destroyedInstances > 10) {
#endif
		LOG4CXX_WARN(rootLogger, "Significant mismatch between created and destroyed particle instances. This can be a memory leak! " << (Particle::createdInstances - Particle::destroyedInstances));
	}

	LOG4CXX_DEBUG(rootLogger, "output written. Terminating...");
	return 0;
}


int executeTests() {
	std::cout << "Running tests..." << std::endl;

	CppUnit::TextUi::TestRunner runner;

	bool all = !Settings::testCase.compare("all");

	if(all || !Settings::testCase.compare("ParticleContainer"))
	  runner.addTest(ParticleContainerTests::suite());

	if(all || !Settings::testCase.compare("ParticleGenerator"))
		 runner.addTest(ParticleGeneratorTests::suite());

//	if(all || !Settings::testCase.compare("Settings"))
//		 runner.addTest(SettingsXsdTest::suite());


	if(all || !Settings::testCase.compare("Matrix"))
		 runner.addTest(MatrixTests::suite());

	runner.setOutputter( new CppUnit::CompilerOutputter( &runner.result(),
													   std::cerr ) );
	// Run the tests.
	bool wasSuccessful = runner.run();

	// Return error code 1 if the one of test failed.
	if(wasSuccessful) {
	  std::cout << "Tests ok!" << std::endl;
	}
	else {
		std::cout << "Some tests failed!" << std::endl;
	}

	return wasSuccessful ? 0 : 1;
}