/*!
      \brief Apply the preconditoner.

      \copydoc Preconditioner::apply(X&,const Y&)
    */
    virtual void apply (Domain& v, const Range& d)
    {
        Range& md = const_cast<Range&>(d);
        comm_.copyOwnerToAll(md,md);
        auto endrow=ilu_.end();
        for ( auto row = ilu_.begin(); row != endrow; ++row )
        {
            auto rhs(d[row.index()]);
            for ( auto col = row->begin(); col.index() < row.index(); ++col )
            {
                col->mmv(v[col.index()],rhs);
            }
            v[row.index()] = rhs;
        }
        comm_.copyOwnerToAll(v, v);
        auto rendrow = ilu_.beforeBegin();
        for( auto row = ilu_.beforeEnd(); row != rendrow; --row)
        {
            auto rhs(v[row.index()]);
            auto col = row->beforeEnd();
            for( ; col.index() > row.index(); --col)
            {
                col->mmv(v[col.index()], rhs);
            }
            v[row.index()] = 0;
            col->umv(rhs, v[row.index()]);
        }
        comm_.copyOwnerToAll(v, v);
        v *= w_;
    }
示例#2
0
int main(int argc, char** argv) {

  int fail = 0, dim=0;

#ifdef HAVE_MPI
  MPI_Init(&argc, &argv);
  MPI_Comm_rank(MPI_COMM_WORLD, &localProc);
  MPI_Comm_size(MPI_COMM_WORLD, &numProcs);
  const Epetra_MpiComm Comm(MPI_COMM_WORLD);
#else
  const Epetra_SerialComm Comm;
#endif

  // =============================================================
  // get command line options
  // =============================================================

  Teuchos::CommandLineProcessor clp(false,true);

  std::string *inputFile = new std::string("simple.coords");
  bool verbose = false;

  clp.setOption( "f", inputFile, "Name of coordinate input file");

  clp.setOption( "v", "q", &verbose,
		"Display coordinates and weights before and after partitioning.");

  Teuchos::CommandLineProcessor::EParseCommandLineReturn parse_return =
    clp.parse(argc,argv);

  if( parse_return == Teuchos::CommandLineProcessor::PARSE_HELP_PRINTED){
#ifdef HAVE_MPI
    MPI_Finalize();
#endif
    return 0;
  }
  if( parse_return != Teuchos::CommandLineProcessor::PARSE_SUCCESSFUL ) {
#ifdef HAVE_MPI
    MPI_Finalize();
#endif
    return 1;
  }

  // =============================================================
  // Open file of coordinates and distribute them across processes
  // so they are unbalanced.
  // =============================================================

  Epetra_MultiVector *mv = ispatest::file2multivector(Comm, *inputFile);

  if (!mv || ((dim = mv->NumVectors()) < 1)){
    if (localProc == 0)
      std::cerr << "Invalid input file " << *inputFile << std::endl;
    exit(1);
  }

  if (localProc == 0){
    std::cerr << "Found input file " << *inputFile << ", " ;
    std::cerr << dim << " dimensional coordinates" << std::endl;
  }

  delete inputFile;

  int base = mv->Map().IndexBase();
  int globalSize = mv->GlobalLength();
  int myShare = 0;
  int n = numProcs - 1;

  if (n){
    if (localProc < n){
      int oneShare = globalSize / n;
      int leftOver = globalSize - (n * oneShare);
      myShare = oneShare + ((localProc < leftOver) ? 1 : 0);
    }
  }
  else{
    myShare = globalSize;
  }

  Epetra_BlockMap unbalancedMap(globalSize, myShare, 1, base, mv->Map().Comm());
  Epetra_Import importer(unbalancedMap, mv->Map());
  Epetra_MultiVector umv(unbalancedMap, dim);
  umv.Import(*mv, importer, Insert);

  delete mv;

  Teuchos::RCP<const Epetra_MultiVector> coords =
    Teuchos::rcp(new const Epetra_MultiVector(umv));

  // =============================================================
  // Create some different coordinate weight vectors
  // =============================================================

  Epetra_MultiVector *unitWgts = ispatest::makeWeights(coords->Map(), &ispatest::unitWeights);
  Epetra_MultiVector *veeWgts = ispatest::makeWeights(coords->Map(), &ispatest::veeWeights);
  Epetra_MultiVector *altWgts = ispatest::makeWeights(coords->Map(), &ispatest::alternateWeights);

  Teuchos::RCP<const Epetra_MultiVector> unit_weights_rcp = Teuchos::rcp(unitWgts);
  Teuchos::RCP<const Epetra_MultiVector> vee_weights_rcp = Teuchos::rcp(veeWgts);
  Teuchos::RCP<const Epetra_MultiVector> alt_weights_rcp = Teuchos::rcp(altWgts);

  if (localProc == 0){
    std::cerr << "Unit weights: Each object has weight 1.0" << std::endl;
    std::cerr << "V weights: Low and high GIDs have high weights, center GIDs have low weights" << std::endl;
    std::cerr << "Alternate weights: Objects on even rank processes have one weight, on odd another weight" << std::endl;
    std::cerr << std::endl;
  }

  // ======================================================================
  //  Create a parameter list for Zoltan, and one for internal partitioning
  // ======================================================================

  Teuchos::ParameterList internalParams;

  internalParams.set("PARTITIONING_METHOD", "SIMPLE_LINEAR");


  Teuchos::ParameterList zoltanParams;

  Teuchos::ParameterList sublist = zoltanParams.sublist("ZOLTAN");

  //sublist.set("DEBUG_LEVEL", "1"); // Zoltan will print out parameters
  //sublist.set("DEBUG_LEVEL", "5");   // proc 0 will trace Zoltan calls
  //sublist.set("DEBUG_MEMORY", "2");  // Zoltan will trace alloc & free

  // =============================================================
  // Run some tests
  // =============================================================
  zoltanParams.set("PARTITIONING METHOD", "RCB");

  if (localProc == 0){
    std::cerr << "RCB - unit weights" << std::endl;
  }

  fail = run_test(coords, unit_weights_rcp, zoltanParams);

  if (fail) goto failure;

  if (localProc == 0){
    std::cerr << "PASS" << std::endl << std::endl;
  }
  // *************************************************************

  if (localProc == 0){
    std::cerr << "HSFC - V weights" << std::endl;
  }


  zoltanParams.set("PARTITIONING METHOD", "HSFC");
  fail = run_test(coords, vee_weights_rcp, zoltanParams);

  if (fail) goto failure;

  if (localProc == 0){
    std::cerr << "PASS" << std::endl << std::endl;
  }

  // *************************************************************

  if (localProc == 0){
    std::cerr << "RIB - alternate weights" << std::endl;
  }
  zoltanParams.set("PARTITIONING METHOD", "RIB");

  fail = run_test(coords, alt_weights_rcp, zoltanParams);

  if (fail) goto failure;

  if (localProc == 0){
    std::cerr << "PASS" << std::endl << std::endl;
  }

  // *************************************************************

  if (localProc == 0){
    std::cerr << "RIB - no weights supplied" << std::endl;
  }
  zoltanParams.set("PARTITIONING METHOD", "RIB");

  fail = run_test(coords, zoltanParams);

  if (fail) goto failure;

  if (localProc == 0){
    std::cerr << "PASS" << std::endl << std::endl;
  }

  // *************************************************************

  goto done;


failure:

  if (localProc == 0){
    std::cerr << "FAIL: test failed" << std::endl;
  }

done:

#ifdef HAVE_MPI
  MPI_Finalize();
#endif

  return fail;
}