int main(int argc, char *argv[]) { #ifdef HAVE_MPI MPI_Init(&argc,&argv); Epetra_MpiComm Comm (MPI_COMM_WORLD); #else Epetra_SerialComm Comm; #endif int MyPID = Comm.MyPID(); // matrix downloaded from MatrixMarket char FileName[] = "../HBMatrices/fidap005.rua"; Epetra_Map * readMap; // Pointers because of Trilinos_Util_ReadHb2Epetra Epetra_CrsMatrix * readA; Epetra_Vector * readx; Epetra_Vector * readb; Epetra_Vector * readxexact; // Call routine to read in HB problem Trilinos_Util_ReadHb2Epetra(FileName, Comm, readMap, readA, readx, readb, readxexact); int NumGlobalElements = readMap->NumGlobalElements(); // Create uniform distributed map Epetra_Map map(NumGlobalElements, 0, Comm); // Create Exporter to distribute read-in matrix and vectors Epetra_Export exporter(*readMap, map); Epetra_CrsMatrix A(Copy, map, 0); Epetra_Vector x(map); Epetra_Vector b(map); Epetra_Vector xexact(map); Epetra_Time FillTimer(Comm); x.Export(*readx, exporter, Add); b.Export(*readb, exporter, Add); xexact.Export(*readxexact, exporter, Add); Comm.Barrier(); double vectorRedistributeTime = FillTimer.ElapsedTime(); A.Export(*readA, exporter, Add); Comm.Barrier(); double matrixRedistributeTime = FillTimer.ElapsedTime() - vectorRedistributeTime; A.FillComplete(); Comm.Barrier(); double fillCompleteTime = FillTimer.ElapsedTime() - matrixRedistributeTime; if( MyPID==0 ) { cout << "Vector redistribute time (sec) = " << vectorRedistributeTime<< endl; cout << "Matrix redistribute time (sec) = " << matrixRedistributeTime << endl; cout << "Transform to Local time (sec) = " << fillCompleteTime << endl<< endl; } delete readA; delete readx; delete readb; delete readxexact; delete readMap; #ifdef HAVE_MPI MPI_Finalize() ; #endif return(EXIT_SUCCESS); }
int main(int argc, char *argv[]) { #ifdef HAVE_MPI MPI_Init(&argc,&argv); Epetra_MpiComm Comm (MPI_COMM_WORLD); #else Epetra_SerialComm Comm; #endif int MyPID = Comm.MyPID(); bool verbose = false; if (MyPID==0) verbose = true; // matrix downloaded from MatrixMarket char FileName[] = "../HBMatrices/fidap005.rua"; Epetra_Map * readMap; // Pointers because of Trilinos_Util_ReadHb2Epetra Epetra_CrsMatrix * readA; Epetra_Vector * readx; Epetra_Vector * readb; Epetra_Vector * readxexact; // Call routine to read in HB problem Trilinos_Util_ReadHb2Epetra(FileName, Comm, readMap, readA, readx, readb, readxexact); int NumGlobalElements = readMap->NumGlobalElements(); // Create uniform distributed map Epetra_Map map(NumGlobalElements, 0, Comm); // Create Exporter to distribute read-in matrix and vectors Epetra_Export exporter(*readMap, map); Epetra_CrsMatrix A(Copy, map, 0); Epetra_Vector x(map); Epetra_Vector b(map); Epetra_Vector xexact(map); Epetra_Time FillTimer(Comm); A.Export(*readA, exporter, Add); x.Export(*readx, exporter, Add); b.Export(*readb, exporter, Add); xexact.Export(*readxexact, exporter, Add); A.FillComplete(); delete readA; delete readx; delete readb; delete readxexact; delete readMap; // ============================ // // Construct ILU preconditioner // // ---------------------------- // // modify those parameters int LevelFill = 1; double DropTol = 0.0; double Condest; Ifpack_CrsIct * ICT = NULL; ICT = new Ifpack_CrsIct(A,DropTol,LevelFill); // Init values from A ICT->InitValues(A); // compute the factors ICT->Factor(); // and now estimate the condition number ICT->Condest(false,Condest); cout << Condest << endl; if( Comm.MyPID() == 0 ) { cout << "Condition number estimate (level-of-fill = " << LevelFill << ") = " << Condest << endl; } // Define label for printing out during the solve phase string label = "Ifpack_CrsIct Preconditioner: LevelFill = " + toString(LevelFill) + " Overlap = 0"; ICT->SetLabel(label.c_str()); // Here we create an AztecOO object AztecOO solver; solver.SetUserMatrix(&A); solver.SetLHS(&x); solver.SetRHS(&b); solver.SetAztecOption(AZ_solver,AZ_cg); // Here we set the IFPACK preconditioner and specify few parameters solver.SetPrecOperator(ICT); int Niters = 1200; solver.SetAztecOption(AZ_kspace, Niters); solver.SetAztecOption(AZ_output, 20); solver.Iterate(Niters, 5.0e-5); if (ICT!=0) delete ICT; #ifdef HAVE_MPI MPI_Finalize() ; #endif return 0 ; }
// // Amesos_TestMultiSolver.cpp reads in a matrix in Harwell-Boeing format, // calls one of the sparse direct solvers, using blocked right hand sides // and computes the error and residual. // // TestSolver ignores the Harwell-Boeing right hand sides, creating // random right hand sides instead. // // Amesos_TestMultiSolver can test either A x = b or A^T x = b. // This can be a bit confusing because sparse direct solvers // use compressed column storage - the transpose of Trilinos' // sparse row storage. // // Matrices: // readA - Serial. As read from the file. // transposeA - Serial. The transpose of readA. // serialA - if (transpose) then transposeA else readA // distributedA - readA distributed to all processes // passA - if ( distributed ) then distributedA else serialA // // int Amesos_TestMultiSolver( Epetra_Comm &Comm, char *matrix_file, int numsolves, SparseSolverType SparseSolver, bool transpose, int special, AMESOS_MatrixType matrix_type ) { int iam = Comm.MyPID() ; // int hatever; // if ( iam == 0 ) std::cin >> hatever ; Comm.Barrier(); Epetra_Map * readMap; Epetra_CrsMatrix * readA; Epetra_Vector * readx; Epetra_Vector * readb; Epetra_Vector * readxexact; std::string FileName = matrix_file ; int FN_Size = FileName.size() ; std::string LastFiveBytes = FileName.substr( EPETRA_MAX(0,FN_Size-5), FN_Size ); std::string LastFourBytes = FileName.substr( EPETRA_MAX(0,FN_Size-4), FN_Size ); bool NonContiguousMap = false; if ( LastFiveBytes == ".triU" ) { NonContiguousMap = true; // Call routine to read in unsymmetric Triplet matrix EPETRA_CHK_ERR( Trilinos_Util_ReadTriples2Epetra( matrix_file, false, Comm, readMap, readA, readx, readb, readxexact, NonContiguousMap ) ); } else { if ( LastFiveBytes == ".triS" ) { NonContiguousMap = true; // Call routine to read in symmetric Triplet matrix EPETRA_CHK_ERR( Trilinos_Util_ReadTriples2Epetra( matrix_file, true, Comm, readMap, readA, readx, readb, readxexact, NonContiguousMap ) ); } else { if ( LastFourBytes == ".mtx" ) { EPETRA_CHK_ERR( Trilinos_Util_ReadMatrixMarket2Epetra( matrix_file, Comm, readMap, readA, readx, readb, readxexact) ); } else { // Call routine to read in HB problem Trilinos_Util_ReadHb2Epetra( matrix_file, Comm, readMap, readA, readx, readb, readxexact) ; } } } Epetra_CrsMatrix transposeA(Copy, *readMap, 0); Epetra_CrsMatrix *serialA ; if ( transpose ) { assert( CrsMatrixTranspose( readA, &transposeA ) == 0 ); serialA = &transposeA ; } else { serialA = readA ; } // Create uniform distributed map Epetra_Map map(readMap->NumGlobalElements(), 0, Comm); Epetra_Map* map_; if( NonContiguousMap ) { // // map gives us NumMyElements and MyFirstElement; // int NumGlobalElements = readMap->NumGlobalElements(); int NumMyElements = map.NumMyElements(); int MyFirstElement = map.MinMyGID(); std::vector<int> MapMap_( NumGlobalElements ); readMap->MyGlobalElements( &MapMap_[0] ) ; Comm.Broadcast( &MapMap_[0], NumGlobalElements, 0 ) ; map_ = new Epetra_Map( NumGlobalElements, NumMyElements, &MapMap_[MyFirstElement], 0, Comm); } else { map_ = new Epetra_Map( map ) ; } // Create Exporter to distribute read-in matrix and vectors Epetra_Export exporter(*readMap, *map_); Epetra_CrsMatrix A(Copy, *map_, 0); Epetra_RowMatrix * passA = 0; Epetra_MultiVector * passx = 0; Epetra_MultiVector * passb = 0; Epetra_MultiVector * passxexact = 0; Epetra_MultiVector * passresid = 0; Epetra_MultiVector * passtmp = 0; Epetra_MultiVector x(*map_,numsolves); Epetra_MultiVector b(*map_,numsolves); Epetra_MultiVector xexact(*map_,numsolves); Epetra_MultiVector resid(*map_,numsolves); Epetra_MultiVector tmp(*map_,numsolves); Epetra_MultiVector serialx(*readMap,numsolves); Epetra_MultiVector serialb(*readMap,numsolves); Epetra_MultiVector serialxexact(*readMap,numsolves); Epetra_MultiVector serialresid(*readMap,numsolves); Epetra_MultiVector serialtmp(*readMap,numsolves); bool distribute_matrix = ( matrix_type == AMESOS_Distributed ) ; if ( distribute_matrix ) { // // Initialize x, b and xexact to the values read in from the file // A.Export(*serialA, exporter, Add); Comm.Barrier(); assert(A.FillComplete()==0); Comm.Barrier(); passA = &A; passx = &x; passb = &b; passxexact = &xexact; passresid = &resid; passtmp = &tmp; } else { passA = serialA; passx = &serialx; passb = &serialb; passxexact = &serialxexact; passresid = &serialresid; passtmp = &serialtmp; } passxexact->SetSeed(131) ; passxexact->Random(); passx->SetSeed(11231) ; passx->Random(); passb->PutScalar( 0.0 ); passA->Multiply( transpose, *passxexact, *passb ) ; Epetra_MultiVector CopyB( *passb ) ; double Anorm = passA->NormInf() ; SparseDirectTimingVars::SS_Result.Set_Anorm(Anorm) ; Epetra_LinearProblem Problem( (Epetra_RowMatrix *) passA, (Epetra_MultiVector *) passx, (Epetra_MultiVector *) passb ); double max_resid = 0.0; for ( int j = 0 ; j < special+1 ; j++ ) { Epetra_Time TotalTime( Comm ) ; if ( false ) { #ifdef TEST_UMFPACK unused code } else if ( SparseSolver == UMFPACK ) { UmfpackOO umfpack( (Epetra_RowMatrix *) passA, (Epetra_MultiVector *) passx, (Epetra_MultiVector *) passb ) ; umfpack.SetTrans( transpose ) ; umfpack.Solve() ; #endif #ifdef TEST_SUPERLU } else if ( SparseSolver == SuperLU ) { SuperluserialOO superluserial( (Epetra_RowMatrix *) passA, (Epetra_MultiVector *) passx, (Epetra_MultiVector *) passb ) ; superluserial.SetPermc( SuperLU_permc ) ; superluserial.SetTrans( transpose ) ; superluserial.SetUseDGSSV( special == 0 ) ; superluserial.Solve() ; #endif #ifdef HAVE_AMESOS_SLUD } else if ( SparseSolver == SuperLUdist ) { SuperludistOO superludist( Problem ) ; superludist.SetTrans( transpose ) ; EPETRA_CHK_ERR( superludist.Solve( true ) ) ; #endif #ifdef HAVE_AMESOS_SLUD2 } else if ( SparseSolver == SuperLUdist2 ) { Superludist2_OO superludist2( Problem ) ; superludist2.SetTrans( transpose ) ; EPETRA_CHK_ERR( superludist2.Solve( true ) ) ; #endif #ifdef TEST_SPOOLES } else if ( SparseSolver == SPOOLES ) { SpoolesOO spooles( (Epetra_RowMatrix *) passA, (Epetra_MultiVector *) passx, (Epetra_MultiVector *) passb ) ; spooles.SetTrans( transpose ) ; spooles.Solve() ; #endif #ifdef HAVE_AMESOS_DSCPACK } else if ( SparseSolver == DSCPACK ) { Teuchos::ParameterList ParamList ; Amesos_Dscpack dscpack( Problem ) ; ParamList.set( "MaxProcs", -3 ); EPETRA_CHK_ERR( dscpack.SetParameters( ParamList ) ); EPETRA_CHK_ERR( dscpack.Solve( ) ); #endif #ifdef HAVE_AMESOS_UMFPACK } else if ( SparseSolver == UMFPACK ) { Teuchos::ParameterList ParamList ; Amesos_Umfpack umfpack( Problem ) ; ParamList.set( "MaxProcs", -3 ); EPETRA_CHK_ERR( umfpack.SetParameters( ParamList ) ); EPETRA_CHK_ERR( umfpack.SetUseTranspose( transpose ) ); EPETRA_CHK_ERR( umfpack.Solve( ) ); #endif #ifdef HAVE_AMESOS_KLU } else if ( SparseSolver == KLU ) { Teuchos::ParameterList ParamList ; Amesos_Klu klu( Problem ) ; ParamList.set( "MaxProcs", -3 ); EPETRA_CHK_ERR( klu.SetParameters( ParamList ) ); EPETRA_CHK_ERR( klu.SetUseTranspose( transpose ) ); EPETRA_CHK_ERR( klu.SymbolicFactorization( ) ); EPETRA_CHK_ERR( klu.NumericFactorization( ) ); EPETRA_CHK_ERR( klu.Solve( ) ); #endif #ifdef HAVE_AMESOS_PARAKLETE } else if ( SparseSolver == PARAKLETE ) { Teuchos::ParameterList ParamList ; Amesos_Paraklete paraklete( Problem ) ; ParamList.set( "MaxProcs", -3 ); EPETRA_CHK_ERR( paraklete.SetParameters( ParamList ) ); EPETRA_CHK_ERR( paraklete.SetUseTranspose( transpose ) ); EPETRA_CHK_ERR( paraklete.SymbolicFactorization( ) ); EPETRA_CHK_ERR( paraklete.NumericFactorization( ) ); EPETRA_CHK_ERR( paraklete.Solve( ) ); #endif #ifdef HAVE_AMESOS_SLUS } else if ( SparseSolver == SuperLU ) { Epetra_SLU superluserial( &Problem ) ; EPETRA_CHK_ERR( superluserial.SetUseTranspose( transpose ) ); EPETRA_CHK_ERR( superluserial.SymbolicFactorization( ) ); EPETRA_CHK_ERR( superluserial.NumericFactorization( ) ); EPETRA_CHK_ERR( superluserial.Solve( ) ); #endif #ifdef HAVE_AMESOS_LAPACK } else if ( SparseSolver == LAPACK ) { Teuchos::ParameterList ParamList ; ParamList.set( "MaxProcs", -3 ); Amesos_Lapack lapack( Problem ) ; EPETRA_CHK_ERR( lapack.SetUseTranspose( transpose ) ); EPETRA_CHK_ERR( lapack.SymbolicFactorization( ) ); EPETRA_CHK_ERR( lapack.NumericFactorization( ) ); EPETRA_CHK_ERR( lapack.Solve( ) ); #endif #ifdef HAVE_AMESOS_TAUCS } else if ( SparseSolver == TAUCS ) { Teuchos::ParameterList ParamList ; Amesos_Taucs taucs( Problem ) ; ParamList.set( "MaxProcs", -3 ); EPETRA_CHK_ERR( taucs.SetParameters( ParamList ) ); EPETRA_CHK_ERR( taucs.SetUseTranspose( transpose ) ); EPETRA_CHK_ERR( taucs.SymbolicFactorization( ) ); EPETRA_CHK_ERR( taucs.NumericFactorization( ) ); EPETRA_CHK_ERR( taucs.Solve( ) ); #endif #ifdef HAVE_AMESOS_PARDISO } else if ( SparseSolver == PARDISO ) { Teuchos::ParameterList ParamList ; Amesos_Pardiso pardiso( Problem ) ; ParamList.set( "MaxProcs", -3 ); EPETRA_CHK_ERR( pardiso.SetParameters( ParamList ) ); EPETRA_CHK_ERR( pardiso.SetUseTranspose( transpose ) ); EPETRA_CHK_ERR( pardiso.SymbolicFactorization( ) ); EPETRA_CHK_ERR( pardiso.NumericFactorization( ) ); EPETRA_CHK_ERR( pardiso.Solve( ) ); #endif #ifdef HAVE_AMESOS_PARKLETE } else if ( SparseSolver == PARKLETE ) { Teuchos::ParameterList ParamList ; Amesos_Parklete parklete( Problem ) ; ParamList.set( "MaxProcs", -3 ); EPETRA_CHK_ERR( parklete.SetParameters( ParamList ) ); EPETRA_CHK_ERR( parklete.SetUseTranspose( transpose ) ); EPETRA_CHK_ERR( parklete.SymbolicFactorization( ) ); EPETRA_CHK_ERR( parklete.NumericFactorization( ) ); EPETRA_CHK_ERR( parklete.Solve( ) ); #endif #ifdef HAVE_AMESOS_MUMPS } else if ( SparseSolver == MUMPS ) { Teuchos::ParameterList ParamList ; Amesos_Mumps mumps( Problem ) ; ParamList.set( "MaxProcs", -3 ); EPETRA_CHK_ERR( mumps.SetParameters( ParamList ) ); EPETRA_CHK_ERR( mumps.SetUseTranspose( transpose ) ); EPETRA_CHK_ERR( mumps.SymbolicFactorization( ) ); EPETRA_CHK_ERR( mumps.NumericFactorization( ) ); EPETRA_CHK_ERR( mumps.Solve( ) ); #endif #ifdef HAVE_AMESOS_SCALAPACK } else if ( SparseSolver == SCALAPACK ) { Teuchos::ParameterList ParamList ; Amesos_Scalapack scalapack( Problem ) ; ParamList.set( "MaxProcs", -3 ); EPETRA_CHK_ERR( scalapack.SetParameters( ParamList ) ); EPETRA_CHK_ERR( scalapack.SetUseTranspose( transpose ) ); EPETRA_CHK_ERR( scalapack.SymbolicFactorization( ) ); EPETRA_CHK_ERR( scalapack.NumericFactorization( ) ); EPETRA_CHK_ERR( scalapack.Solve( ) ); #endif #ifdef HAVE_AMESOS_SUPERLUDIST } else if ( SparseSolver == SUPERLUDIST ) { Teuchos::ParameterList ParamList ; Amesos_Superludist superludist( Problem ) ; ParamList.set( "MaxProcs", -3 ); EPETRA_CHK_ERR( superludist.SetParameters( ParamList ) ); EPETRA_CHK_ERR( superludist.SetUseTranspose( transpose ) ); EPETRA_CHK_ERR( superludist.SymbolicFactorization( ) ); EPETRA_CHK_ERR( superludist.NumericFactorization( ) ); EPETRA_CHK_ERR( superludist.Solve( ) ); #endif #ifdef HAVE_AMESOS_SUPERLU } else if ( SparseSolver == SUPERLU ) { Teuchos::ParameterList ParamList ; Amesos_Superlu superlu( Problem ) ; ParamList.set( "MaxProcs", -3 ); EPETRA_CHK_ERR( superlu.SetParameters( ParamList ) ); EPETRA_CHK_ERR( superlu.SetUseTranspose( transpose ) ); EPETRA_CHK_ERR( superlu.SymbolicFactorization( ) ); EPETRA_CHK_ERR( superlu.NumericFactorization( ) ); EPETRA_CHK_ERR( superlu.Solve( ) ); #endif #ifdef TEST_SPOOLESSERIAL } else if ( SparseSolver == SPOOLESSERIAL ) { SpoolesserialOO spoolesserial( (Epetra_RowMatrix *) passA, (Epetra_MultiVector *) passx, (Epetra_MultiVector *) passb ) ; spoolesserial.Solve() ; #endif } else { SparseDirectTimingVars::log_file << "Solver not implemented yet" << std::endl ; std::cerr << "\n\n#################### Requested solver not available (Or not tested with blocked RHS) on this platform #####################\n" << std::endl ; } SparseDirectTimingVars::SS_Result.Set_Total_Time( TotalTime.ElapsedTime() ); // SparseDirectTimingVars::SS_Result.Set_First_Time( 0.0 ); // SparseDirectTimingVars::SS_Result.Set_Middle_Time( 0.0 ); // SparseDirectTimingVars::SS_Result.Set_Last_Time( 0.0 ); // // Compute the error = norm(xcomp - xexact ) // std::vector <double> error(numsolves) ; double max_error = 0.0; passresid->Update(1.0, *passx, -1.0, *passxexact, 0.0); passresid->Norm2(&error[0]); for ( int i = 0 ; i< numsolves; i++ ) if ( error[i] > max_error ) max_error = error[i] ; SparseDirectTimingVars::SS_Result.Set_Error(max_error) ; // passxexact->Norm2(&error[0] ) ; // passx->Norm2(&error ) ; // // Compute the residual = norm(Ax - b) // std::vector <double> residual(numsolves) ; passtmp->PutScalar(0.0); passA->Multiply( transpose, *passx, *passtmp); passresid->Update(1.0, *passtmp, -1.0, *passb, 0.0); // passresid->Update(1.0, *passtmp, -1.0, CopyB, 0.0); passresid->Norm2(&residual[0]); for ( int i = 0 ; i< numsolves; i++ ) if ( residual[i] > max_resid ) max_resid = residual[i] ; SparseDirectTimingVars::SS_Result.Set_Residual(max_resid) ; std::vector <double> bnorm(numsolves); passb->Norm2( &bnorm[0] ) ; SparseDirectTimingVars::SS_Result.Set_Bnorm(bnorm[0]) ; std::vector <double> xnorm(numsolves); passx->Norm2( &xnorm[0] ) ; SparseDirectTimingVars::SS_Result.Set_Xnorm(xnorm[0]) ; if ( false && iam == 0 ) { std::cout << " Amesos_TestMutliSolver.cpp " << std::endl ; for ( int i = 0 ; i< numsolves && i < 10 ; i++ ) { std::cout << "i=" << i << " error = " << error[i] << " xnorm = " << xnorm[i] << " residual = " << residual[i] << " bnorm = " << bnorm[i] << std::endl ; } std::cout << std::endl << " max_resid = " << max_resid ; std::cout << " max_error = " << max_error << std::endl ; std::cout << " Get_residual() again = " << SparseDirectTimingVars::SS_Result.Get_Residual() << std::endl ; } } delete readA; delete readx; delete readb; delete readxexact; delete readMap; delete map_; Comm.Barrier(); return 0 ; }
int main(int argc, char *argv[]) { #ifdef EPETRA_MPI MPI_Init(&argc,&argv); Epetra_MpiComm Comm (MPI_COMM_WORLD); #else Epetra_SerialComm Comm; #endif cout << Comm << endl; int MyPID = Comm.MyPID(); bool verbose = false; if (MyPID==0) verbose = true; if(argc < 2 && verbose) { cerr << "Usage: " << argv[0] << " HB_filename [level_fill [level_overlap [absolute_threshold [ relative_threshold]]]]" << endl << "where:" << endl << "HB_filename - filename and path of a Harwell-Boeing data set" << endl << "level_fill - The amount of fill to use for ILU(k) preconditioner (default 0)" << endl << "level_overlap - The amount of overlap used for overlapping Schwarz subdomains (default 0)" << endl << "absolute_threshold - The minimum value to place on the diagonal prior to factorization (default 0.0)" << endl << "relative_threshold - The relative amount to perturb the diagonal prior to factorization (default 1.0)" << endl << endl << "To specify a non-default value for one of these parameters, you must specify all" << endl << " preceding values but not any subsequent parameters. Example:" << endl << "ifpackHbSerialMsr.exe mymatrix.hb 1 - loads mymatrix.hb, uses level fill of one, all other values are defaults" << endl << endl; return(1); } // Uncomment the next three lines to debug in mpi mode //int tmp; //if (MyPID==0) cin >> tmp; //Comm.Barrier(); Epetra_Map * readMap; Epetra_CrsMatrix * readA; Epetra_Vector * readx; Epetra_Vector * readb; Epetra_Vector * readxexact; // Call routine to read in HB problem Trilinos_Util_ReadHb2Epetra(argv[1], Comm, readMap, readA, readx, readb, readxexact); // Create uniform distributed map Epetra_Map map(readMap->NumGlobalElements(), 0, Comm); // Create Exporter to distribute read-in matrix and vectors Epetra_Export exporter(*readMap, map); Epetra_CrsMatrix A(Copy, map, 0); Epetra_Vector x(map); Epetra_Vector b(map); Epetra_Vector xexact(map); Epetra_Time FillTimer(Comm); x.Export(*readx, exporter, Add); b.Export(*readb, exporter, Add); xexact.Export(*readxexact, exporter, Add); Comm.Barrier(); double vectorRedistributeTime = FillTimer.ElapsedTime(); A.Export(*readA, exporter, Add); Comm.Barrier(); double matrixRedistributeTime = FillTimer.ElapsedTime() - vectorRedistributeTime; assert(A.FillComplete()==0); Comm.Barrier(); double fillCompleteTime = FillTimer.ElapsedTime() - matrixRedistributeTime; if (Comm.MyPID()==0) { cout << "\n\n****************************************************" << endl; cout << "\n Vector redistribute time (sec) = " << vectorRedistributeTime<< endl; cout << " Matrix redistribute time (sec) = " << matrixRedistributeTime << endl; cout << " Transform to Local time (sec) = " << fillCompleteTime << endl<< endl; } Epetra_Vector tmp1(*readMap); Epetra_Vector tmp2(map); readA->Multiply(false, *readxexact, tmp1); A.Multiply(false, xexact, tmp2); double residual; tmp1.Norm2(&residual); if (verbose) cout << "Norm of Ax from file = " << residual << endl; tmp2.Norm2(&residual); if (verbose) cout << "Norm of Ax after redistribution = " << residual << endl << endl << endl; //cout << "A from file = " << *readA << endl << endl << endl; //cout << "A after dist = " << A << endl << endl << endl; delete readA; delete readx; delete readb; delete readxexact; delete readMap; Comm.Barrier(); // Construct ILU preconditioner double elapsed_time, total_flops, MFLOPs; Epetra_Time timer(Comm); int LevelFill = 0; if (argc > 2) LevelFill = atoi(argv[2]); if (verbose) cout << "Using Level Fill = " << LevelFill << endl; int Overlap = 0; if (argc > 3) Overlap = atoi(argv[3]); if (verbose) cout << "Using Level Overlap = " << Overlap << endl; double Athresh = 0.0; if (argc > 4) Athresh = atof(argv[4]); if (verbose) cout << "Using Absolute Threshold Value of = " << Athresh << endl; double Rthresh = 1.0; if (argc > 5) Rthresh = atof(argv[5]); if (verbose) cout << "Using Relative Threshold Value of = " << Rthresh << endl; Ifpack_IlukGraph * IlukGraph = 0; Ifpack_CrsRiluk * ILUK = 0; if (LevelFill>-1) { elapsed_time = timer.ElapsedTime(); IlukGraph = new Ifpack_IlukGraph(A.Graph(), LevelFill, Overlap); assert(IlukGraph->ConstructFilledGraph()==0); elapsed_time = timer.ElapsedTime() - elapsed_time; if (verbose) cout << "Time to construct ILUK graph = " << elapsed_time << endl; Epetra_Flops fact_counter; elapsed_time = timer.ElapsedTime(); ILUK = new Ifpack_CrsRiluk(*IlukGraph); ILUK->SetFlopCounter(fact_counter); ILUK->SetAbsoluteThreshold(Athresh); ILUK->SetRelativeThreshold(Rthresh); //assert(ILUK->InitValues()==0); int initerr = ILUK->InitValues(A); if (initerr!=0) cout << Comm << "InitValues error = " << initerr; assert(ILUK->Factor()==0); elapsed_time = timer.ElapsedTime() - elapsed_time; total_flops = ILUK->Flops(); MFLOPs = total_flops/elapsed_time/1000000.0; if (verbose) cout << "Time to compute preconditioner values = " << elapsed_time << endl << "MFLOPS for Factorization = " << MFLOPs << endl; //cout << *ILUK << endl; } double Condest; ILUK->Condest(false, Condest); if (verbose) cout << "Condition number estimate for this preconditioner = " << Condest << endl; int Maxiter = 500; double Tolerance = 1.0E-14; Epetra_Vector xcomp(map); Epetra_Vector resid(map); Epetra_Flops counter; A.SetFlopCounter(counter); xcomp.SetFlopCounter(A); b.SetFlopCounter(A); resid.SetFlopCounter(A); ILUK->SetFlopCounter(A); elapsed_time = timer.ElapsedTime(); BiCGSTAB(A, xcomp, b, ILUK, Maxiter, Tolerance, &residual, verbose); elapsed_time = timer.ElapsedTime() - elapsed_time; total_flops = counter.Flops(); MFLOPs = total_flops/elapsed_time/1000000.0; if (verbose) cout << "Time to compute solution = " << elapsed_time << endl << "Number of operations in solve = " << total_flops << endl << "MFLOPS for Solve = " << MFLOPs<< endl << endl; resid.Update(1.0, xcomp, -1.0, xexact, 0.0); // resid = xcomp - xexact resid.Norm2(&residual); if (verbose) cout << "Norm of the difference between exact and computed solutions = " << residual << endl; if (ILUK!=0) delete ILUK; if (IlukGraph!=0) delete IlukGraph; #ifdef EPETRA_MPI MPI_Finalize() ; #endif return 0 ; }
// ************************************************************* // main program - This benchmark code reads a Harwell-Boeing data // set and finds the minimal eigenvalue of the matrix // using inverse iteration. // ************************************************************* int main(int argc, char *argv[]) { #ifdef EPETRA_MPI MPI_Init(&argc,&argv); Epetra_MpiComm Comm (MPI_COMM_WORLD); #else Epetra_SerialComm Comm; #endif cout << Comm << endl; int MyPID = Comm.MyPID(); bool verbose = false; if (MyPID==0) verbose = true; // Print out detailed results (turn off for best performance) if(argc != 2) { if (verbose) cerr << "Usage: " << argv[0] << " HB_data_file" << endl; exit(1); // Error } // Define pointers that will be set by HB read function Epetra_Map * readMap; Epetra_CrsMatrix * readA; Epetra_Vector * readx; Epetra_Vector * readb; Epetra_Vector * readxexact; // Call function to read in HB problem Trilinos_Util_ReadHb2Epetra(argv[1], Comm, readMap, readA, readx, readb, readxexact); // Not interested in x, b or xexact for an eigenvalue problem delete readx; delete readb; delete readxexact; #ifdef EPETRA_MPI // If running in parallel, we need to distribute matrix across all PEs. // Create uniform distributed map Epetra_Map map(readMap->NumGlobalElements(), 0, Comm); // Create Exporter to distribute read-in matrix and vectors Epetra_Export exporter(*readMap, map); Epetra_CrsMatrix A(Copy, map, 0); A.Export(*readA, exporter, Add); assert(A.FillComplete()==0); delete readA; delete readMap; #else // If not running in parallel, we do not need to distribute the matrix Epetra_CrsMatrix & A = *readA; #endif // Create flop counter to collect all FLOPS Epetra_Flops counter; A.SetFlopCounter(counter); double lambda = 0; // Minimal eigenvalue returned here // Call inverse iteration solver Epetra_Time timer(Comm); invIteration(A, lambda, verbose); double elapsedTime = timer.ElapsedTime(); double totalFlops = counter.Flops(); double MFLOPS = totalFlops/elapsedTime/1000000.0; cout << endl << "*************************************************" << endl << " Approximate smallest eigenvalue = " << lambda << endl << " Total Time = " << elapsedTime << endl << " Total FLOPS = " << totalFlops << endl << " Total MFLOPS = " << MFLOPS << endl << "*************************************************" << endl; // All done #ifdef EPETRA_MPI MPI_Finalize(); #else delete readA; delete readMap; #endif return (0); }
void EpetraExt::readEpetraLinearSystem( const std::string &fileName ,const Epetra_Comm &comm ,Teuchos::RCP<Epetra_CrsMatrix> *A ,Teuchos::RCP<Epetra_Map> *map ,Teuchos::RCP<Epetra_Vector> *x ,Teuchos::RCP<Epetra_Vector> *b ,Teuchos::RCP<Epetra_Vector> *xExact ) { Epetra_Map *readMap; Epetra_CrsMatrix *readA; Epetra_Vector *readx; Epetra_Vector *readb; Epetra_Vector *readxexact; const std::string::size_type ext_dot = fileName.rfind("."); TEUCHOS_TEST_FOR_EXCEPT( ext_dot == std::string::npos ); std::string ext = fileName.substr(ext_dot+1); //std::cout << "\nfileName = " << fileName << "\next = " << ext << std::endl; char *hacked_file_str = const_cast<char*>(fileName.c_str()); if ( ext == "triU" ) { const bool NonContiguousMap = true; TEUCHOS_TEST_FOR_EXCEPT( 0!=Trilinos_Util_ReadTriples2Epetra( hacked_file_str, false, comm, readMap, readA, readx, readb, readxexact, NonContiguousMap ) ); } else if ( ext == "triS" ) { const bool NonContiguousMap = true; TEUCHOS_TEST_FOR_EXCEPT( 0!=Trilinos_Util_ReadTriples2Epetra( hacked_file_str, true, comm, readMap, readA, readx, readb, readxexact, NonContiguousMap ) ); } else if( ext == "mtx" ) { TEUCHOS_TEST_FOR_EXCEPT( 0!=Trilinos_Util_ReadMatrixMarket2Epetra( hacked_file_str, comm, readMap, readA, readx, readb, readxexact ) ); } else if ( ext == "hb" ) { Trilinos_Util_ReadHb2Epetra( hacked_file_str, comm, readMap, readA, readx, readb, readxexact ); // No error return??? } else { TEUCHOS_TEST_FOR_EXCEPTION( true, std::logic_error ,"Error, the file = \'"<<hacked_file_str<<"\' has the extension " "\'*."<<ext<<"\' is not \'*.triU\', \'*.triS\', \'*.mtx\', or \'*.hb\'!" ); } Teuchos::RCP<Epetra_CrsMatrix> loc_A = Teuchos::rcp(readA); Teuchos::RCP<Epetra_Map> loc_map = Teuchos::rcp(readMap); Teuchos::RCP<Epetra_Vector> loc_x = Teuchos::rcp(readx); Teuchos::RCP<Epetra_Vector> loc_b = Teuchos::rcp(readb); Teuchos::RCP<Epetra_Vector> loc_xExact = Teuchos::rcp(readxexact); if(A) *A = loc_A; if(map) *map = loc_map; if(x) *x = loc_x; if(b) *b = loc_b; if(xExact) *xExact = loc_xExact; }
int main(int argc, char *argv[]) { #ifdef EPETRA_MPI MPI_Init(&argc,&argv); Epetra_MpiComm Comm (MPI_COMM_WORLD); #else Epetra_SerialComm Comm; #endif cout << Comm << endl; int MyPID = Comm.MyPID(); bool verbose = false; bool verbose1 = true; if (MyPID==0) verbose = true; if(argc < 2 && verbose) { cerr << "Usage: " << argv[0] << " HB_filename [level_fill [level_overlap [absolute_threshold [ relative_threshold]]]]" << endl << "where:" << endl << "HB_filename - filename and path of a Harwell-Boeing data set" << endl << "level_fill - The amount of fill to use for ILU(k) preconditioner (default 0)" << endl << "level_overlap - The amount of overlap used for overlapping Schwarz subdomains (default 0)" << endl << "absolute_threshold - The minimum value to place on the diagonal prior to factorization (default 0.0)" << endl << "relative_threshold - The relative amount to perturb the diagonal prior to factorization (default 1.0)" << endl << endl << "To specify a non-default value for one of these parameters, you must specify all" << endl << " preceding values but not any subsequent parameters. Example:" << endl << "ifpackHpcSerialMsr.exe mymatrix.hpc 1 - loads mymatrix.hpc, uses level fill of one, all other values are defaults" << endl << endl; return(1); } // Uncomment the next three lines to debug in mpi mode //int tmp; //if (MyPID==0) cin >> tmp; //Comm.Barrier(); Epetra_Map * readMap; Epetra_CrsMatrix * readA; Epetra_Vector * readx; Epetra_Vector * readb; Epetra_Vector * readxexact; // Call routine to read in HB problem Trilinos_Util_ReadHb2Epetra(argv[1], Comm, readMap, readA, readx, readb, readxexact); // Create uniform distributed map Epetra_Map map(readMap->NumGlobalElements(), 0, Comm); // Create Exporter to distribute read-in matrix and vectors Epetra_Export exporter(*readMap, map); Epetra_CrsMatrix A(Copy, map, 0); Epetra_Vector x(map); Epetra_Vector b(map); Epetra_Vector xexact(map); Epetra_Time FillTimer(Comm); x.Export(*readx, exporter, Add); b.Export(*readb, exporter, Add); xexact.Export(*readxexact, exporter, Add); Comm.Barrier(); double vectorRedistributeTime = FillTimer.ElapsedTime(); A.Export(*readA, exporter, Add); Comm.Barrier(); double matrixRedistributeTime = FillTimer.ElapsedTime() - vectorRedistributeTime; assert(A.FillComplete()==0); Comm.Barrier(); double fillCompleteTime = FillTimer.ElapsedTime() - matrixRedistributeTime; if (Comm.MyPID()==0) { cout << "\n\n****************************************************" << endl; cout << "\n Vector redistribute time (sec) = " << vectorRedistributeTime<< endl; cout << " Matrix redistribute time (sec) = " << matrixRedistributeTime << endl; cout << " Transform to Local time (sec) = " << fillCompleteTime << endl<< endl; } Epetra_Vector tmp1(*readMap); Epetra_Vector tmp2(map); readA->Multiply(false, *readxexact, tmp1); A.Multiply(false, xexact, tmp2); double residual; tmp1.Norm2(&residual); if (verbose) cout << "Norm of Ax from file = " << residual << endl; tmp2.Norm2(&residual); if (verbose) cout << "Norm of Ax after redistribution = " << residual << endl << endl << endl; //cout << "A from file = " << *readA << endl << endl << endl; //cout << "A after dist = " << A << endl << endl << endl; delete readA; delete readx; delete readb; delete readxexact; delete readMap; Comm.Barrier(); bool smallProblem = false; if (A.RowMap().NumGlobalElements()<100) smallProblem = true; if (smallProblem) cout << "Original Matrix = " << endl << A << endl; x.PutScalar(0.0); Epetra_LinearProblem FullProblem(&A, &x, &b); double normb, norma; b.NormInf(&normb); norma = A.NormInf(); if (verbose) cout << "Inf norm of Original Matrix = " << norma << endl << "Inf norm of Original RHS = " << normb << endl; Epetra_Time ReductionTimer(Comm); Epetra_CrsSingletonFilter SingletonFilter; Comm.Barrier(); double reduceInitTime = ReductionTimer.ElapsedTime(); SingletonFilter.Analyze(&A); Comm.Barrier(); double reduceAnalyzeTime = ReductionTimer.ElapsedTime() - reduceInitTime; if (SingletonFilter.SingletonsDetected()) cout << "Singletons found" << endl; else { cout << "Singletons not found" << endl; exit(1); } SingletonFilter.ConstructReducedProblem(&FullProblem); Comm.Barrier(); double reduceConstructTime = ReductionTimer.ElapsedTime() - reduceInitTime; double totalReduceTime = ReductionTimer.ElapsedTime(); if (verbose) cout << "\n\n****************************************************" << endl << " Reduction init time (sec) = " << reduceInitTime<< endl << " Reduction Analyze time (sec) = " << reduceAnalyzeTime << endl << " Construct Reduced Problem time (sec) = " << reduceConstructTime << endl << " Reduction Total time (sec) = " << totalReduceTime << endl<< endl; Statistics(SingletonFilter); Epetra_LinearProblem * ReducedProblem = SingletonFilter.ReducedProblem(); Epetra_CrsMatrix * Ap = dynamic_cast<Epetra_CrsMatrix *>(ReducedProblem->GetMatrix()); Epetra_Vector * bp = (*ReducedProblem->GetRHS())(0); Epetra_Vector * xp = (*ReducedProblem->GetLHS())(0); if (smallProblem) cout << " Reduced Matrix = " << endl << *Ap << endl << " LHS before sol = " << endl << *xp << endl << " RHS = " << endl << *bp << endl; // Construct ILU preconditioner double elapsed_time, total_flops, MFLOPs; Epetra_Time timer(Comm); int LevelFill = 0; if (argc > 2) LevelFill = atoi(argv[2]); if (verbose) cout << "Using Level Fill = " << LevelFill << endl; int Overlap = 0; if (argc > 3) Overlap = atoi(argv[3]); if (verbose) cout << "Using Level Overlap = " << Overlap << endl; double Athresh = 0.0; if (argc > 4) Athresh = atof(argv[4]); if (verbose) cout << "Using Absolute Threshold Value of = " << Athresh << endl; double Rthresh = 1.0; if (argc > 5) Rthresh = atof(argv[5]); if (verbose) cout << "Using Relative Threshold Value of = " << Rthresh << endl; Ifpack_IlukGraph * IlukGraph = 0; Ifpack_CrsRiluk * ILUK = 0; if (LevelFill>-1) { elapsed_time = timer.ElapsedTime(); IlukGraph = new Ifpack_IlukGraph(Ap->Graph(), LevelFill, Overlap); assert(IlukGraph->ConstructFilledGraph()==0); elapsed_time = timer.ElapsedTime() - elapsed_time; if (verbose) cout << "Time to construct ILUK graph = " << elapsed_time << endl; Epetra_Flops fact_counter; elapsed_time = timer.ElapsedTime(); ILUK = new Ifpack_CrsRiluk(*IlukGraph); ILUK->SetFlopCounter(fact_counter); ILUK->SetAbsoluteThreshold(Athresh); ILUK->SetRelativeThreshold(Rthresh); //assert(ILUK->InitValues()==0); int initerr = ILUK->InitValues(*Ap); if (initerr!=0) { cout << endl << Comm << endl << " InitValues error = " << initerr; if (initerr==1) cout << " Zero diagonal found, warning error only"; cout << endl << endl; } assert(ILUK->Factor()==0); elapsed_time = timer.ElapsedTime() - elapsed_time; total_flops = ILUK->Flops(); MFLOPs = total_flops/elapsed_time/1000000.0; if (verbose) cout << "Time to compute preconditioner values = " << elapsed_time << endl << "MFLOPS for Factorization = " << MFLOPs << endl; //cout << *ILUK << endl; double Condest; ILUK->Condest(false, Condest); if (verbose) cout << "Condition number estimate for this preconditioner = " << Condest << endl; } int Maxiter = 100; double Tolerance = 1.0E-8; Epetra_Flops counter; Ap->SetFlopCounter(counter); xp->SetFlopCounter(*Ap); bp->SetFlopCounter(*Ap); if (ILUK!=0) ILUK->SetFlopCounter(*Ap); elapsed_time = timer.ElapsedTime(); double normreducedb, normreduceda; bp->NormInf(&normreducedb); normreduceda = Ap->NormInf(); if (verbose) cout << "Inf norm of Reduced Matrix = " << normreduceda << endl << "Inf norm of Reduced RHS = " << normreducedb << endl; BiCGSTAB(*Ap, *xp, *bp, ILUK, Maxiter, Tolerance, &residual, verbose); elapsed_time = timer.ElapsedTime() - elapsed_time; total_flops = counter.Flops(); MFLOPs = total_flops/elapsed_time/1000000.0; if (verbose) cout << "Time to compute solution = " << elapsed_time << endl << "Number of operations in solve = " << total_flops << endl << "MFLOPS for Solve = " << MFLOPs<< endl << endl; SingletonFilter.ComputeFullSolution(); if (smallProblem) cout << " Reduced LHS after sol = " << endl << *xp << endl << " Full LHS after sol = " << endl << x << endl << " Full Exact LHS = " << endl << xexact << endl; Epetra_Vector resid(x); resid.Update(1.0, x, -1.0, xexact, 0.0); // resid = xcomp - xexact resid.Norm2(&residual); double normx, normxexact; x.Norm2(&normx); xexact.Norm2(&normxexact); if (verbose) cout << "2-norm of computed solution = " << normx << endl << "2-norm of exact solution = " << normxexact << endl << "2-norm of difference between computed and exact solution = " << residual << endl; if (verbose1 && residual>1.0e-5) { if (verbose) cout << "Difference between computed and exact solution appears large..." << endl << "Computing norm of A times this difference. If this norm is small, then matrix is singular" << endl; Epetra_Vector bdiff(b); assert(A.Multiply(false, resid, bdiff)==0); assert(bdiff.Norm2(&residual)==0); if (verbose) cout << "2-norm of A times difference between computed and exact solution = " << residual << endl; } if (verbose) cout << "********************************************************" << endl << " Solving again with 2*Ax=2*b" << endl << "********************************************************" << endl; A.Scale(1.0); // A = 2*A b.Scale(1.0); // b = 2*b x.PutScalar(0.0); b.NormInf(&normb); norma = A.NormInf(); if (verbose) cout << "Inf norm of Original Matrix = " << norma << endl << "Inf norm of Original RHS = " << normb << endl; double updateReducedProblemTime = ReductionTimer.ElapsedTime(); SingletonFilter.UpdateReducedProblem(&FullProblem); Comm.Barrier(); updateReducedProblemTime = ReductionTimer.ElapsedTime() - updateReducedProblemTime; if (verbose) cout << "\n\n****************************************************" << endl << " Update Reduced Problem time (sec) = " << updateReducedProblemTime<< endl << "****************************************************" << endl; Statistics(SingletonFilter); if (LevelFill>-1) { Epetra_Flops fact_counter; elapsed_time = timer.ElapsedTime(); int initerr = ILUK->InitValues(*Ap); if (initerr!=0) { cout << endl << Comm << endl << " InitValues error = " << initerr; if (initerr==1) cout << " Zero diagonal found, warning error only"; cout << endl << endl; } assert(ILUK->Factor()==0); elapsed_time = timer.ElapsedTime() - elapsed_time; total_flops = ILUK->Flops(); MFLOPs = total_flops/elapsed_time/1000000.0; if (verbose) cout << "Time to compute preconditioner values = " << elapsed_time << endl << "MFLOPS for Factorization = " << MFLOPs << endl; double Condest; ILUK->Condest(false, Condest); if (verbose) cout << "Condition number estimate for this preconditioner = " << Condest << endl; } bp->NormInf(&normreducedb); normreduceda = Ap->NormInf(); if (verbose) cout << "Inf norm of Reduced Matrix = " << normreduceda << endl << "Inf norm of Reduced RHS = " << normreducedb << endl; BiCGSTAB(*Ap, *xp, *bp, ILUK, Maxiter, Tolerance, &residual, verbose); elapsed_time = timer.ElapsedTime() - elapsed_time; total_flops = counter.Flops(); MFLOPs = total_flops/elapsed_time/1000000.0; if (verbose) cout << "Time to compute solution = " << elapsed_time << endl << "Number of operations in solve = " << total_flops << endl << "MFLOPS for Solve = " << MFLOPs<< endl << endl; SingletonFilter.ComputeFullSolution(); if (smallProblem) cout << " Reduced LHS after sol = " << endl << *xp << endl << " Full LHS after sol = " << endl << x << endl << " Full Exact LHS = " << endl << xexact << endl; resid.Update(1.0, x, -1.0, xexact, 0.0); // resid = xcomp - xexact resid.Norm2(&residual); x.Norm2(&normx); xexact.Norm2(&normxexact); if (verbose) cout << "2-norm of computed solution = " << normx << endl << "2-norm of exact solution = " << normxexact << endl << "2-norm of difference between computed and exact solution = " << residual << endl; if (verbose1 && residual>1.0e-5) { if (verbose) cout << "Difference between computed and exact solution appears large..." << endl << "Computing norm of A times this difference. If this norm is small, then matrix is singular" << endl; Epetra_Vector bdiff(b); assert(A.Multiply(false, resid, bdiff)==0); assert(bdiff.Norm2(&residual)==0); if (verbose) cout << "2-norm of A times difference between computed and exact solution = " << residual << endl; } if (ILUK!=0) delete ILUK; if (IlukGraph!=0) delete IlukGraph; #ifdef EPETRA_MPI MPI_Finalize() ; #endif return 0 ; }
int main(int argc, char *argv[]) { #ifdef EPETRA_MPI MPI_Init(&argc,&argv); Epetra_MpiComm Comm (MPI_COMM_WORLD); #else Epetra_SerialComm Comm; #endif int MyPID = Comm.MyPID(); bool verbose = true; if (MyPID==0) verbose = true; if (verbose) cout << EpetraExt::EpetraExt_Version() << endl << endl; cout << Comm << endl; if(argc < 2 && verbose) { cerr << "Usage: " << argv[0] << " HB_filename" << endl; return(1); } // Uncomment the next three lines to debug in mpi mode //int tmp; //if (MyPID==0) cin >> tmp; //Comm.Barrier(); Epetra_Map * readMap; Epetra_CrsMatrix * readA; Epetra_Vector * readx; Epetra_Vector * readb; Epetra_Vector * readxexact; // Call routine to read in HB problem Trilinos_Util_ReadHb2Epetra(argv[1], Comm, readMap, readA, readx, readb, readxexact); // Create uniform distributed map Epetra_Map map(readMap->NumGlobalElements(), 0, Comm); // Create Exporter to distribute read-in matrix and vectors Epetra_Export exporter(*readMap, map); Epetra_CrsMatrix A(Copy, map, 0); Epetra_Vector x(map); Epetra_Vector b(map); Epetra_Vector xexact(map); Epetra_Time FillTimer(Comm); x.Export(*readx, exporter, Add); b.Export(*readb, exporter, Add); xexact.Export(*readxexact, exporter, Add); Comm.Barrier(); double vectorRedistributeTime = FillTimer.ElapsedTime(); A.Export(*readA, exporter, Add); Comm.Barrier(); double matrixRedistributeTime = FillTimer.ElapsedTime() - vectorRedistributeTime; assert(A.FillComplete()==0); Comm.Barrier(); double fillCompleteTime = FillTimer.ElapsedTime() - matrixRedistributeTime; if (Comm.MyPID()==0) { cout << "\n\n****************************************************" << endl; cout << "\n Vector redistribute time (sec) = " << vectorRedistributeTime<< endl; cout << " Matrix redistribute time (sec) = " << matrixRedistributeTime << endl; cout << " Transform to Local time (sec) = " << fillCompleteTime << endl<< endl; } Epetra_Vector tmp1(*readMap); Epetra_Vector tmp2(map); readA->Multiply(false, *readxexact, tmp1); A.Multiply(false, xexact, tmp2); double residual; tmp1.Norm2(&residual); if (verbose) cout << "Norm of Ax from file = " << residual << endl; tmp2.Norm2(&residual); if (verbose) cout << "Norm of Ax after redistribution = " << residual << endl << endl << endl; //cout << "A from file = " << *readA << endl << endl << endl; //cout << "A after dist = " << A << endl << endl << endl; delete readA; delete readx; delete readb; delete readxexact; delete readMap; Comm.Barrier(); EpetraExt::RowMatrixToMatrixMarketFile("test.mm", A, "test matrix", "This is a test matrix"); #ifdef EPETRA_MPI MPI_Finalize() ; #endif return 0 ; }