int testFromSeparateDataFiles( const RCP<const Teuchos::Comm<int> > & comm, int numParts, float imbalance, std::string fname, std::string pqParts, std::string pfname, int k, int migration_check_option, int migration_all_to_all_type, scalar_t migration_imbalance_cut_off, int migration_processor_assignment_type, int migration_doMigration_type ) { //std::string fname("simple"); //cout << "running " << fname << endl; int mR = comm->getRank(); if (mR == 0) cout << "size of scalar_t:" << sizeof(scalar_t) << endl; string tFile = fname +"_" + Zoltan2::toString<int>(mR) + ".mtx"; scalar_t **double_coords; lno_t numLocal = 0; int dim = 0; getCoords<scalar_t, lno_t>(double_coords, numLocal, dim, tFile); //UserInputForTests uinput(testDataFilePath, fname, comm, true); Teuchos::Array<Teuchos::ArrayView<const scalar_t> > coordView(dim); for (int i=0; i < dim; i++){ if(numLocal > 0){ Teuchos::ArrayView<const scalar_t> a(double_coords[i], numLocal); coordView[i] = a; } else{ Teuchos::ArrayView<const scalar_t> a; coordView[i] = a; } } gno_t numGlobal; gno_t nL = numLocal; Teuchos::Comm<int> *tcomm = (Teuchos::Comm<int> *)comm.getRawPtr(); reduceAll<int, gno_t>( *tcomm, Teuchos::REDUCE_SUM, 1, &nL, &numGlobal ); RCP<Tpetra::Map<lno_t, gno_t, node_t> > mp = rcp( new Tpetra::Map<lno_t, gno_t, node_t> (numGlobal, numLocal, 0, comm)); RCP< Tpetra::MultiVector<scalar_t, lno_t, gno_t, node_t> >coords = RCP< Tpetra::MultiVector<scalar_t, lno_t, gno_t, node_t> >( new Tpetra::MultiVector<scalar_t, lno_t, gno_t, node_t>( mp, coordView.view(0, dim), dim)); RCP<const tMVector_t> coordsConst = rcp_const_cast<const tMVector_t>(coords); typedef Zoltan2::XpetraMultiVectorInput<tMVector_t> inputAdapter_t; inputAdapter_t ia(coordsConst); Teuchos::RCP <Teuchos::ParameterList> params ; //Teuchos::ParameterList params("test params"); if(pfname != ""){ params = Teuchos::getParametersFromXmlFile(pfname); } else { params =RCP <Teuchos::ParameterList> (new Teuchos::ParameterList, true); } //params->set("timer_output_stream" , "std::cout"); params->set("compute_metrics", "true"); params->set("algorithm", "multijagged"); if(imbalance > 1){ params->set("imbalance_tolerance", double(imbalance)); } if(pqParts != ""){ params->set("pqParts", pqParts); } if(numParts > 0){ params->set("num_global_parts", numParts); } if (k > 0){ params->set("parallel_part_calculation_count", k); } if(migration_processor_assignment_type >= 0){ params->set("migration_processor_assignment_type", migration_processor_assignment_type); } if(migration_check_option >= 0){ params->set("migration_check_option", migration_check_option); } if(migration_all_to_all_type >= 0){ params->set("migration_all_to_all_type", migration_all_to_all_type); } if(migration_imbalance_cut_off >= 0){ params->set("migration_imbalance_cut_off", double (migration_imbalance_cut_off)); } if (migration_doMigration_type >= 0){ params->set("migration_doMigration_type", int (migration_doMigration_type)); } Zoltan2::PartitioningProblem<inputAdapter_t> *problem; try { #ifdef HAVE_ZOLTAN2_MPI problem = new Zoltan2::PartitioningProblem<inputAdapter_t>(&ia, params.getRawPtr(), MPI_COMM_WORLD); #else problem = new Zoltan2::PartitioningProblem<inputAdapter_t>(&ia, params.getRawPtr()); #endif } CATCH_EXCEPTIONS("PartitioningProblem()") try { problem->solve(); } CATCH_EXCEPTIONS("solve()") if (coordsConst->getGlobalLength() < 40) { int len = coordsConst->getLocalLength(); const inputAdapter_t::part_t *zparts = problem->getSolution().getPartList(); const gno_t *zgids = problem->getSolution().getIdList(); for (int i = 0; i < len; i++) cout << comm->getRank() << " gid " << zgids[i] << " part " << zparts[i] << endl; } if (comm->getRank() == 0){ problem->printMetrics(cout); cout << "testFromDataFile is done " << endl; } problem->printTimers(); delete problem; return 0; }
int testFromDataFile( const RCP<const Teuchos::Comm<int> > & comm, int numParts, float imbalance, std::string fname, std::string pqParts, std::string pfname, int k, int migration_check_option, int migration_all_to_all_type, scalar_t migration_imbalance_cut_off, int migration_processor_assignment_type, int migration_doMigration_type ) { //std::string fname("simple"); //cout << "running " << fname << endl; UserInputForTests uinput(testDataFilePath, fname, comm, true); RCP<tMVector_t> coords = uinput.getUICoordinates(); RCP<const tMVector_t> coordsConst = rcp_const_cast<const tMVector_t>(coords); typedef Zoltan2::XpetraMultiVectorAdapter<tMVector_t> inputAdapter_t; inputAdapter_t ia(coordsConst); Teuchos::RCP <Teuchos::ParameterList> params ; //Teuchos::ParameterList params("test params"); if(pfname != ""){ params = Teuchos::getParametersFromXmlFile(pfname); } else { params =RCP <Teuchos::ParameterList> (new Teuchos::ParameterList, true); } //params->set("timer_output_stream" , "std::cout"); params->set("compute_metrics", "true"); params->set("algorithm", "multijagged"); if(imbalance > 1){ params->set("imbalance_tolerance", double(imbalance)); } if(pqParts != ""){ params->set("mj_parts", pqParts); } if(numParts > 0){ params->set("num_global_parts", numParts); } if (k > 0){ params->set("mj_concurrent_part_count", k); } if(migration_check_option >= 0){ params->set("mj_migration_option", migration_check_option); } if(migration_imbalance_cut_off >= 0){ params->set("mj_minimum_migration_imbalance", double (migration_imbalance_cut_off)); } Zoltan2::PartitioningProblem<inputAdapter_t> *problem; try { #ifdef HAVE_ZOLTAN2_MPI problem = new Zoltan2::PartitioningProblem<inputAdapter_t>(&ia, params.getRawPtr(), MPI_COMM_WORLD); #else problem = new Zoltan2::PartitioningProblem<inputAdapter_t>(&ia, params.getRawPtr()); #endif } CATCH_EXCEPTIONS("PartitioningProblem()") try { problem->solve(); } CATCH_EXCEPTIONS("solve()") if (coordsConst->getGlobalLength() < 40) { int len = coordsConst->getLocalLength(); const inputAdapter_t::part_t *zparts = problem->getSolution().getPartList(); const gno_t *zgids = problem->getSolution().getIdList(); for (int i = 0; i < len; i++) cout << comm->getRank() << " gid " << zgids[i] << " part " << zparts[i] << endl; } if (comm->getRank() == 0){ problem->printMetrics(cout); cout << "testFromDataFile is done " << endl; } problem->printTimers(); delete problem; return 0; }
int main(int argc, char *argv[]) { #ifdef HAVE_ZOLTAN2_MPI MPI_Init(&argc, &argv); int rank, nprocs; MPI_Comm_size(MPI_COMM_WORLD, &nprocs); MPI_Comm_rank(MPI_COMM_WORLD, &rank); #else int rank=0, nprocs=1; #endif /////////////////////////////////////////////////////////////////////// // Generate some input data. size_t localCount = 40*(rank+1); globalId_t *globalIds = new globalId_t [localCount]; if (rank==0) for (int i=0, num=40; i <= nprocs ; i++, num+=40) cout << "Rank " << i << " has " << num << " ids." << endl; globalId_t offset = 0; for (int i=1; i <= rank; i++) offset += 40*i; for (size_t i=0; i < localCount; i++) globalIds[i] = offset++; /////////////////////////////////////////////////////////////////////// // Create a Zoltan2 input adapter with no weights // TODO explain typedef Zoltan2::BasicUserTypes<scalar_t, globalId_t, localId_t, globalId_t> myTypes; // TODO explain typedef Zoltan2::BasicIdentifierAdapter<myTypes> inputAdapter_t; std::vector<const scalar_t *> noWeights; std::vector<int> noStrides; inputAdapter_t ia(localCount, globalIds, noWeights, noStrides); /////////////////////////////////////////////////////////////////////// // Create parameters for an Block problem Teuchos::ParameterList params("test params"); params.set("debug_level", "basic_status"); params.set("debug_procs", "0"); params.set("error_check_level", "debug_mode_assertions"); params.set("algorithm", "block"); params.set("imbalance_tolerance", 1.1); params.set("num_global_parts", nprocs); /////////////////////////////////////////////////////////////////////// // Create a Zoltan2 partitioning problem #ifdef HAVE_ZOLTAN2_MPI Zoltan2::PartitioningProblem<inputAdapter_t> *problem = new Zoltan2::PartitioningProblem<inputAdapter_t>(&ia, ¶ms, MPI_COMM_WORLD); #else Zoltan2::PartitioningProblem<inputAdapter_t> *problem = new Zoltan2::PartitioningProblem<inputAdapter_t>(&ia, ¶ms); #endif /////////////////////////////////////////////////////////////////////// // Solve the problem problem->solve(); /////////////////////////////////////////////////////////////////////// // Check the solution. if (rank == 0) problem->printMetrics(cout); if (rank == 0) cout << "PASS" << endl; delete [] globalIds; delete problem; #ifdef HAVE_ZOLTAN2_MPI MPI_Finalize(); #endif }
int GeometricGenInterface(const RCP<const Teuchos::Comm<int> > & comm, int numParts, float imbalance, std::string paramFile, std::string pqParts, std::string pfname, int k, int migration_check_option, int migration_all_to_all_type, scalar_t migration_imbalance_cut_off, int migration_processor_assignment_type, int migration_doMigration_type ) { Teuchos::ParameterList geoparams("geo params"); readGeoGenParams(paramFile, geoparams, comm); GeometricGen::GeometricGenerator<scalar_t, lno_t, gno_t, node_t> *gg = new GeometricGen::GeometricGenerator<scalar_t, lno_t, gno_t, node_t>(geoparams,comm); int coord_dim = gg->getCoordinateDimension(); int numWeightsPerCoord = gg->getNumWeights(); lno_t numLocalPoints = gg->getNumLocalCoords(); gno_t numGlobalPoints = gg->getNumGlobalCoords(); scalar_t **coords = new scalar_t * [coord_dim]; for(int i = 0; i < coord_dim; ++i){ coords[i] = new scalar_t[numLocalPoints]; } gg->getLocalCoordinatesCopy(coords); scalar_t **weight = NULL; if(numWeightsPerCoord){ weight= new scalar_t * [numWeightsPerCoord]; for(int i = 0; i < numWeightsPerCoord; ++i){ weight[i] = new scalar_t[numLocalPoints]; } gg->getLocalWeightsCopy(weight); } delete gg; RCP<Tpetra::Map<lno_t, gno_t, node_t> > mp = rcp( new Tpetra::Map<lno_t, gno_t, node_t> (numGlobalPoints, numLocalPoints, 0, comm)); Teuchos::Array<Teuchos::ArrayView<const scalar_t> > coordView(coord_dim); for (int i=0; i < coord_dim; i++){ if(numLocalPoints > 0){ Teuchos::ArrayView<const scalar_t> a(coords[i], numLocalPoints); coordView[i] = a; } else{ Teuchos::ArrayView<const scalar_t> a; coordView[i] = a; } } RCP< Tpetra::MultiVector<scalar_t, lno_t, gno_t, node_t> >tmVector = RCP< Tpetra::MultiVector<scalar_t, lno_t, gno_t, node_t> >( new Tpetra::MultiVector<scalar_t, lno_t, gno_t, node_t>( mp, coordView.view(0, coord_dim), coord_dim)); RCP<const tMVector_t> coordsConst = Teuchos::rcp_const_cast<const tMVector_t>(tmVector); vector<const scalar_t *> weights; if(numWeightsPerCoord){ for (int i = 0; i < numWeightsPerCoord;++i){ weights.push_back(weight[i]); } } vector <int> stride; typedef Zoltan2::XpetraMultiVectorAdapter<tMVector_t> inputAdapter_t; //inputAdapter_t ia(coordsConst); inputAdapter_t ia(coordsConst,weights, stride); Teuchos::RCP <Teuchos::ParameterList> params ; //Teuchos::ParameterList params("test params"); if(pfname != ""){ params = Teuchos::getParametersFromXmlFile(pfname); } else { params =RCP <Teuchos::ParameterList> (new Teuchos::ParameterList, true); } /* params->set("memory_output_stream" , "std::cout"); params->set("memory_procs" , 0); */ params->set("timer_output_stream" , "std::cout"); params->set("algorithm", "multijagged"); params->set("compute_metrics", "true"); if(imbalance > 1){ params->set("imbalance_tolerance", double(imbalance)); } if(pqParts != ""){ params->set("mj_parts", pqParts); } if(numParts > 0){ params->set("num_global_parts", numParts); } if (k > 0){ params->set("mj_concurrent_part_count", k); } if(migration_check_option >= 0){ params->set("mj_migration_option", migration_check_option); } if(migration_imbalance_cut_off >= 0){ params->set("mj_minimum_migration_imbalance", double (migration_imbalance_cut_off)); } Zoltan2::PartitioningProblem<inputAdapter_t> *problem; try { #ifdef HAVE_ZOLTAN2_MPI problem = new Zoltan2::PartitioningProblem<inputAdapter_t>(&ia, params.getRawPtr(), MPI_COMM_WORLD); #else problem = new Zoltan2::PartitioningProblem<inputAdapter_t>(&ia, params.getRawPtr()); #endif } CATCH_EXCEPTIONS("PartitioningProblem()") try { problem->solve(); } CATCH_EXCEPTIONS("solve()") if (comm->getRank() == 0){ problem->printMetrics(cout); } problem->printTimers(); if(numWeightsPerCoord){ for(int i = 0; i < numWeightsPerCoord; ++i) delete [] weight[i]; delete [] weight; } if(coord_dim){ for(int i = 0; i < coord_dim; ++i) delete [] coords[i]; delete [] coords; } delete problem; return 0; }
int run( const RCP<const Comm<int> > &comm, int numGlobalParts, int testCnt, std::string *thisTest ) { #ifdef HAVE_ZOLTAN2_MPI // Zoltan needs an MPI comm const Teuchos::MpiComm<int> *tmpicomm = dynamic_cast<const Teuchos::MpiComm<int> *>(comm.getRawPtr()); MPI_Comm mpiComm = *(tmpicomm->getRawMpiComm()); #endif int me = comm->getRank(); int np = comm->getSize(); double tolerance = 1.05; ////////////////////////////////////////////// // Read test data from Zoltan's test directory ////////////////////////////////////////////// UserInputForTests *uinput; try{ uinput = new UserInputForTests(zoltanTestDirectory, thisTest[TESTNAMEOFFSET], comm, true); } catch(std::exception &e){ if (me == 0) cout << "Test " << testCnt << ": FAIL: UserInputForTests " << e.what() << endl; return 1; } RCP<tMatrix_t> matrix; try{ matrix = uinput->getUITpetraCrsMatrix(); } catch(std::exception &e){ if (me == 0) cout << "Test " << testCnt << ": FAIL: get matrix " << e.what() << endl; return 1; } RCP<const tMatrix_t> matrixConst = rcp_const_cast<const tMatrix_t>(matrix); RCP<tMVector_t> coords; try{ coords = uinput->getUICoordinates(); } catch(std::exception &e){ if (me == 0) cout << "Test " << testCnt << ": FAIL: get coordinates " << e.what() << endl; return 1; } RCP<tMVector_t> weights; try{ weights = uinput->getUIWeights(); } catch(std::exception &e){ if (me == 0) cout << "Test " << testCnt << ": FAIL: get weights " << e.what() << endl; return 1; } int nWeights = atoi(thisTest[TESTOBJWGTOFFSET].c_str()); if (me == 0) { cout << "Test " << testCnt << " filename = " << thisTest[TESTNAMEOFFSET] << endl; cout << "Test " << testCnt << " num processors = " << np << endl; cout << "Test " << testCnt << " zoltan method = " << thisTest[TESTMETHODOFFSET] << endl; cout << "Test " << testCnt << " num_global_parts = " << numGlobalParts << endl; cout << "Test " << testCnt << " imbalance_tolerance = " << tolerance << endl; cout << "Test " << testCnt << " num weights per ID = " << nWeights << endl; } ///////////////////////////////////////// // PARTITION USING ZOLTAN DIRECTLY ///////////////////////////////////////// if (me == 0) cout << "Calling Zoltan directly" << endl; # ifdef HAVE_ZOLTAN2_MPI Zoltan zz(mpiComm); # else Zoltan zz; # endif char tmp[56]; zz.Set_Param("LB_METHOD", thisTest[TESTMETHODOFFSET]); sprintf(tmp, "%d", numGlobalParts); zz.Set_Param("NUM_GLOBAL_PARTS", tmp); sprintf(tmp, "%d", nWeights); zz.Set_Param("OBJ_WEIGHT_DIM", tmp); sprintf(tmp, "%f", tolerance); zz.Set_Param("IMBALANCE_TOL", tmp); zz.Set_Param("RETURN_LISTS", "PART"); zz.Set_Param("FINAL_OUTPUT", "1"); zz.Set_Num_Obj_Fn(znumobj, (void *) coords.getRawPtr()); if (nWeights) zz.Set_Obj_List_Fn(zobjlist, (void *) weights.getRawPtr()); else zz.Set_Obj_List_Fn(zobjlist, (void *) coords.getRawPtr()); zz.Set_Num_Geom_Fn(znumgeom, (void *) coords.getRawPtr()); zz.Set_Geom_Multi_Fn(zgeom, (void *) coords.getRawPtr()); int changes, ngid, nlid; int numd, nump; ZOLTAN_ID_PTR dgid = NULL, dlid = NULL, pgid = NULL, plid = NULL; int *dproc = NULL, *dpart = NULL, *pproc = NULL, *ppart = NULL; int ierr = zz.LB_Partition(changes, ngid, nlid, numd, dgid, dlid, dproc, dpart, nump, pgid, plid, pproc, ppart); if (ierr != ZOLTAN_OK && ierr != ZOLTAN_WARN) { if (me == 0) cout << "Test " << testCnt << ": FAIL: direct Zoltan call" << endl; zz.LB_Free_Part(&pgid, &plid, &pproc, &ppart); return 1; } ///////////////////////////////////////// // PARTITION USING ZOLTAN THROUGH ZOLTAN2 ///////////////////////////////////////// if (me == 0) cout << "Calling Zoltan through Zoltan2" << endl; matrixAdapter_t *ia; try{ ia = new matrixAdapter_t(matrixConst, nWeights); } catch(std::exception &e){ if (me == 0) cout << "Test " << testCnt << ": FAIL: matrix adapter " << e.what() << endl; return 1; } for (int idx=0; idx < nWeights; idx++) ia->setRowWeights(weights->getData(idx).getRawPtr(), 1, idx); vectorAdapter_t *ca = NULL; try{ ca = new vectorAdapter_t(coords); } catch(std::exception &e){ if (me == 0) cout << "Test " << testCnt << ": FAIL: vector adapter " << e.what() << endl; return 1; } ia->setCoordinateInput(ca); Teuchos::ParameterList params; params.set("timer_output_stream" , "std::cout"); params.set("compute_metrics", "true"); // params.set("debug_level" , "verbose_detailed_status"); params.set("algorithm", "zoltan"); params.set("imbalance_tolerance", tolerance ); params.set("num_global_parts", numGlobalParts); if (thisTest[TESTMETHODOFFSET] != "default") { // "default" tests case of no Zoltan parameter sublist Teuchos::ParameterList &zparams = params.sublist("zoltan_parameters",false); zparams.set("LB_METHOD",thisTest[TESTMETHODOFFSET]); } Zoltan2::PartitioningProblem<matrixAdapter_t> *problem; # ifdef HAVE_ZOLTAN2_MPI try{ problem = new Zoltan2::PartitioningProblem<matrixAdapter_t>(ia, ¶ms, mpiComm); } # else try{ problem = new Zoltan2::PartitioningProblem<matrixAdapter_t>(ia, ¶ms); } # endif catch(std::exception &e){ cout << "Test " << testCnt << " FAIL: problem " << e.what() << endl; return 1; } try { problem->solve(); } catch(std::exception &e){ cout << "Test " << testCnt << " FAIL: solve " << e.what() << endl; return 1; } if (me == 0){ problem->printMetrics(cout); } problem->printTimers(); ///////////////////////////////////////// // COMPARE RESULTS ///////////////////////////////////////// size_t nObj = coords->getLocalLength(); const int *z2parts = problem->getSolution().getPartListView(); int diffcnt = 0, gdiffcnt = 0; for (size_t i = 0; i < nObj; i++) { if (z2parts[plid[i]] != ppart[i]) { diffcnt++; cout << me << " DIFF for " << i << " (" << coords->getMap()->getGlobalElement(i) << "): " << "Z2 = " << z2parts[i] << "; Z1 = " << ppart[plid[i]] << endl; } } ///////////////////////////////////////// // CLEAN UP ///////////////////////////////////////// zz.LB_Free_Part(&pgid, &plid, &pproc, &ppart); delete ia; delete ca; delete problem; delete uinput; Teuchos::reduceAll(*comm, Teuchos::REDUCE_SUM, 1, &diffcnt, &gdiffcnt); if (gdiffcnt > 0) { if (me == 0) cout << "Test " << testCnt << " " << thisTest[TESTNAMEOFFSET] << " " << thisTest[TESTMETHODOFFSET] << " " << thisTest[TESTOBJWGTOFFSET] << " " << " FAIL: comparison " << endl; return 1; } return 0; }