Пример #1
0
int main(int argc, char **argv){
  int peid, numpes;
  MPI_Comm newComm;

  //basic MPI initilization
  MPI_Init(&argc, &argv);
  MPI_Comm_rank(MPI_COMM_WORLD, &peid);
  MPI_Comm_size(MPI_COMM_WORLD, &numpes);


  //initialize Charm for each set
  CharmLibInit(MPI_COMM_WORLD, argc, argv);
  MPI_Barrier(MPI_COMM_WORLD);
  CollisionList *colls;
  CkVector3d o(-6.8,7.9,8.0), x(4.0,0,0), y(0,0.3,0);
  CkVector3d boxSize(0.2,0.2,0.2);
  int nBoxes=1000;
  bbox3d *box=new bbox3d[nBoxes];
  for (int i=0;i<nBoxes;i++) {
	  CkVector3d c(o+x*peid+y*i);
	  CkVector3d c2(c+boxSize);
	  box[i].empty();
	  box[i].add(c); box[i].add(c2);
  }
  // first box stretches over into next object:
  box[0].add(o+x*(peid+1.5)+y*2);
  detectCollision(colls,nBoxes, box, NULL);
  int numColls=colls->length();
  for (int c=0;c<numColls;c++) {
	  printf("%d:%d hits %d:%d\n",
			  (*colls)[c].A.chunk,(*colls)[c].A.number,
			  (*colls)[c].B.chunk,(*colls)[c].B.number);
  }

  delete box;
  MPI_Barrier(MPI_COMM_WORLD);
  CharmLibExit();

  //final synchronization
  MPI_Barrier(MPI_COMM_WORLD);

  MPI_Finalize();
  return 0;  
}
Пример #2
0
//! \brief UnitTest main()
//! \details UnitTest does have a main() function so that we can have tests
//!   calling MPI functions. Thus we are using Charm++'s MPI-interoperation
//!   capability as would have to be done with interoperation with an MPI
//!   library. This is necessary, since MPI_Init() is a bit adamant about
//!   capturing resources it wants and hence it has to be called before Charm is
//!   initialized.
//! \author J. Bakosi
int main( int argc, char **argv ) {

  int peid, numpes;

  // Initialize MPI
  MPI_Init( &argc, &argv );
  MPI_Comm_rank( MPI_COMM_WORLD, &peid );
  MPI_Comm_size( MPI_COMM_WORLD, &numpes );

  // Run serial and Charm++ unit test suite
  CharmLibInit( MPI_COMM_WORLD, argc, argv );
  CharmLibExit();

  // Run MPI test suite
  try {

    tk::Print print;    // quiet output by default using print, see ctor
    unittest::ctr::CmdLine cmdline;
    bool helped;
    unittest::CmdLineParser cmdParser( argc, argv, print, cmdline, helped );

    // Print out help on all command-line arguments if help was requested
    const auto helpcmd = cmdline.get< tag::help >();
    if (peid == 0 && helpcmd)
      print.help< tk::QUIET >( UNITTEST_EXECUTABLE,
                               cmdline.get< tag::cmdinfo >(),
                               "Command-line Parameters:", "-" );

    // Print out verbose help for a single keyword if requested
    const auto helpkw = cmdline.get< tag::helpkw >();
    if (peid == 0 && !helpkw.keyword.empty())
      print.helpkw< tk::QUIET >( UNITTEST_EXECUTABLE, helpkw );

    // Immediately exit if any help was output
    if (helpcmd || !helpkw.keyword.empty()) {
      MPI_Finalize();
      return tk::ErrCode::SUCCESS;
    }

    unittest::UnitTestPrint
      uprint( cmdline.get< tag::verbose >() ? std::cout : std::clog );

    const auto& groups = unittest::g_runner.get().list_groups();

    // Get group name string passed in by -g
    const auto grp = cmdline.get< tag::group >();

    // If only select groups to be run, see if there is any that will run
    bool work = false;
    if (grp.empty())
      work = true;
    else
      for (const auto& g : groups)
        if ( g.find("MPI") != std::string::npos &&  // only consider MPI groups
             g.find(grp) != std::string::npos )
          work = true;

    // Quit if there is no work to be done
    if (!work) {
      if (peid == 0)
        uprint.note( "\nNo MPI test groups to be executed because no test "
                     "group names match '" + grp + "'.\n" );
      MPI_Finalize();
      return tk::ErrCode::SUCCESS;
    }

    if (peid == 0) {
      uprint.endpart();
      uprint.part( "MPI unit test suite" );
      uprint.unithead( "Unit tests computed", cmdline.get< tag::group >() );
    }

    std::size_t nrun=0, ncomplete=0, nwarn=0, nskip=0, nexcp=0, nfail=0;
    tk::Timer timer;  // start new timer measuring the MPI-suite runtime

    // Lambda to fire up all tests in a test group
    auto spawngrp = [&]( const std::string& g ) {
      for (int t=1; t<=unittest::g_maxTestsInGroup; ++t) {
        tut::test_result tr;
        unittest::g_runner.get().run_test( g, t, tr );
        if (peid == 0) {
          ++nrun;
          std::vector< std::string > status
            { tr.group, tr.name, std::to_string(tr.result), tr.message,
              tr.exception_typeid };
          unittest::evaluate( status, ncomplete, nwarn, nskip, nexcp, nfail );
          uprint.test( ncomplete, nfail, status );
        }
      }
    };

    // Fire up all tests in all test groups exercising MPI on rank 0
    for (const auto& g : groups)
      if (g.find("MPI") != std::string::npos) { // only start MPI test groups
        if (grp.empty()) {                        // consider all test groups
          spawngrp( g );
        } else if (g.find(grp) != std::string::npos) {
          // spawn only the groups that match the string specified via -g string
          spawngrp( g );
        }
      }

    if (peid == 0) {
      unittest::assess( uprint, "MPI", nfail, nwarn, nskip, nexcp, ncomplete );
      std::vector< std::pair< std::string, tk::Timer::Watch > > timestamp;
      timestamp.emplace_back( "MPI tests runtime", timer.hms() );
      uprint.time( "MPI test suite timers (h:m:s)", timestamp );
    }

  } catch (...) { tk::processExceptionMPI(); }

  // Finalize MPI
  MPI_Finalize();

  return tk::ErrCode::SUCCESS;
}