//============================================================================ int Ifpack_PrintResidual(char* Label, const Epetra_RowMatrix& A, const Epetra_MultiVector& X, const Epetra_MultiVector&Y) { if (X.Comm().MyPID() == 0) { cout << "***** " << Label << endl; } Ifpack_PrintResidual(0,A,X,Y); return(0); }
void Hdf5MVOutputFile::write(const Epetra_MultiVector &mv) { #ifdef HAVE_EPETRAEXT_HDF5 const Epetra_Comm &fileComm = mv.Comm(); EpetraExt::HDF5 hdf5Output(fileComm); hdf5Output.Create(path()); // Truncate existing file if necessary TEUCHOS_TEST_FOR_EXCEPTION(!hdf5Output.IsOpen(), std::runtime_error, "Cannot create output file: " + path()); hdf5Output.Write(groupName_, mv); hdf5Output.Close(); #else /* HAVE_EPETRAEXT_HDF5 */ throw std::logic_error("HDF5 support disabled"); #endif /* HAVE_EPETRAEXT_HDF5 */ }
//============================================================================ int Ifpack_PrintResidual(const int iter, const Epetra_RowMatrix& A, const Epetra_MultiVector& X, const Epetra_MultiVector&Y) { Epetra_MultiVector RHS(X); std::vector<double> Norm2; Norm2.resize(X.NumVectors()); IFPACK_CHK_ERR(A.Multiply(false,X,RHS)); RHS.Update(1.0, Y, -1.0); RHS.Norm2(&Norm2[0]); if (X.Comm().MyPID() == 0) { cout << "***** iter: " << iter << ": ||Ax - b||_2 = " << Norm2[0] << endl; } return(0); }
LOCA::Epetra::Interface::MultiPoint:: MultiPoint( const Teuchos::RCP<LOCA::Epetra::Interface::Required> &iReq_, const Teuchos::RCP< NOX::Epetra::Interface::Jacobian> &iJac_, const Epetra_MultiVector &splitMultiVec_, const Teuchos::RCP<Epetra_RowMatrix> &splitJac_, const Teuchos::RCP<EpetraExt::MultiComm> &globalComm_) : iReq(iReq_), iJac(iJac_), splitJac(splitJac_), globalComm(globalComm_), splitVec(*(splitMultiVec_(0))), splitRes(*(splitMultiVec_(0))), jacobian(0), solution(0), solutionOverlap(0), overlapImporter(0), timeStepsOnTimeDomain(splitMultiVec_.NumVectors()), numTimeDomains(globalComm_->NumSubDomains()), timeDomain(globalComm_->SubDomainRank()), conStep(0), rowStencil(0), rowIndex(0) { if (globalComm->MyPID()==0) { // TODO: pass in globalData and use output stream cout << "----------MultiPoint Partition Info------------" << "\n\tNumProcs = " << globalComm->NumProc() << "\n\tSpatial Decomposition = " << splitMultiVec_.Comm().NumProc() << "\n\tNumber of Domains = " << numTimeDomains << "\n\tSteps on Domain 0 = " << timeStepsOnTimeDomain << "\n\tTotal Number of Steps = " << globalComm->NumTimeSteps(); cout << "\n-----------------------------------------------" << endl; } // Construct global block matrix graph from split jacobian and stencil, // which is just diagonal in this case rowStencil = new std::vector< std::vector<int> >(timeStepsOnTimeDomain); rowIndex = new std::vector<int>; for (int i=0; i < timeStepsOnTimeDomain; i++) { (*rowStencil)[i].push_back(0); (*rowIndex).push_back(i + globalComm->FirstTimeStepOnDomain()); } jacobian = new EpetraExt::BlockCrsMatrix(*splitJac, *rowStencil, *rowIndex, *globalComm); // Construct global solution vector, the overlap vector, //and importer between them solution = new EpetraExt::BlockVector(splitJac->RowMatrixRowMap(), jacobian->RowMap()); solutionOverlap = new EpetraExt::BlockVector(splitJac->RowMatrixRowMap(), jacobian->ColMap()); overlapImporter = new Epetra_Import(solutionOverlap->Map(), solution->Map()); // Load initial guess into block solution vector for (int i=0; i < timeStepsOnTimeDomain; i++) solution->LoadBlockValues(*(splitMultiVec_(i)), (*rowIndex)[i]); }
LOCA::Epetra::Interface::xyzt:: xyzt( const Teuchos::RCP<LOCA::Epetra::Interface::TimeDependent> &interface_, const Epetra_MultiVector &splitMultiVec_, const Teuchos::RCP<Epetra_RowMatrix> &splitJac_, const Teuchos::RCP<EpetraExt::MultiComm> &globalComm_, const Epetra_Vector &initialCondVec_, double dt_, Teuchos::ParameterList *precPrintParams_, Teuchos::ParameterList *precLSParams_) : interface(interface_), splitJac(splitJac_), globalComm(globalComm_), splitVec(*(splitMultiVec_(0))), splitRes(*(splitMultiVec_(0))), splitVecOld(*(splitMultiVec_(0))), initialCondVec(initialCondVec_), jacobian(0), solution(0), solutionOverlap(0), overlapImporter(0), timeStepsOnTimeDomain(splitMultiVec_.NumVectors()), numTimeDomains(globalComm_->NumSubDomains()), timeDomain(globalComm_->SubDomainRank()), conStep(0), rowStencil(0), rowIndex(0), precPrintParams(precPrintParams_), precLSParams(precLSParams_), splitJacCrs(NULL), savedSplitMassForFloquet(0), isCrsMatrix(true), floquetFillFlag(false), dt(dt_) { if (precLSParams) isPeriodic = precLSParams_->get("Periodic",false); else isPeriodic = false; if (globalComm->MyPID()==0) { // TODO: pass in globalData and use output stream std::cout << "--------------XYZT Partition Info---------------" << "\n\tNumProcs = " << globalComm->NumProc() << "\n\tSpatial Decomposition = " << splitMultiVec_.Comm().NumProc() << "\n\tNumber of Time Domains = " << numTimeDomains << "\n\tTime Steps on Domain 0 = " << timeStepsOnTimeDomain << "\n\tNumber of Time Steps = " << globalComm->NumTimeSteps(); if (isPeriodic) std::cout << "\n\t-->Solving for a Periodic Orbit!" ; std::cout << "\n-----------------------------------------------" << std::endl; } // Construct global block matrix graph from split jacobian and stencil // Each block has identical sparsity, and assumes mass matrix's sparsity // is a subset of the Jacobian's rowStencil = new std::vector< std::vector<int> >(timeStepsOnTimeDomain); rowIndex = new std::vector<int>; for (int i=0; i < timeStepsOnTimeDomain; i++) { if (timeDomain!=0 || i!=0) (*rowStencil)[i].push_back(-1); else if (isPeriodic) (*rowStencil)[i].push_back(globalComm->NumTimeSteps()-1); (*rowStencil)[i].push_back(0); (*rowIndex).push_back(i + globalComm->FirstTimeStepOnDomain()); } jacobian = new EpetraExt::BlockCrsMatrix(*splitJac, *rowStencil, *rowIndex, *globalComm); // Construct global solution vector, the overlap vector, //and importer between them solution = new EpetraExt::BlockVector(splitJac->RowMatrixRowMap(), jacobian->RowMap()); solutionOverlap = new EpetraExt::BlockVector(splitJac->RowMatrixRowMap(), jacobian->ColMap()); overlapImporter = new Epetra_Import(solutionOverlap->Map(), solution->Map()); // Load initial guess into block solution vector for (int i=0; i < timeStepsOnTimeDomain; i++) solution->LoadBlockValues(*(splitMultiVec_(i)), (*rowIndex)[i]); // Create preconditioner if (precLSParams != 0) { //Preconditioner needs CrsMatrix, must convert VBR or others splitJacCrs = dynamic_cast<Epetra_CrsMatrix *>(splitJac.get()); if (splitJacCrs == NULL) { isCrsMatrix = false; std::cout << "CAST OF splitJacCrs failed!, constructing CRS matrix " << std::endl; std::vector< std::vector<int> > row(1); row[0].push_back(0); std::vector<int> col; col.push_back(0); splitJacCrs = (Epetra_CrsMatrix *) new EpetraExt::BlockCrsMatrix(*splitJac, row, col, splitJac->Comm()); } preconditioner = Teuchos::rcp(new LOCA::Epetra::xyztPrec(*jacobian, *splitJacCrs, *solution, *solutionOverlap, *overlapImporter, *precPrintParams, *precLSParams, globalComm)); } }
int shylu_dist_solve<Epetra_CrsMatrix,Epetra_MultiVector>( shylu_symbolic<Epetra_CrsMatrix,Epetra_MultiVector> *ssym, shylu_data<Epetra_CrsMatrix,Epetra_MultiVector> *data, shylu_config<Epetra_CrsMatrix,Epetra_MultiVector> *config, const Epetra_MultiVector& X, Epetra_MultiVector& Y ) { int err; AztecOO *solver = 0; assert(X.Map().SameAs(Y.Map())); //assert(X.Map().SameAs(A_->RowMap())); const Epetra_MultiVector *newX; newX = &X; //rd_->redistribute(X, newX); int nvectors = newX->NumVectors(); // May have to use importer/exporter Epetra_Map BsMap(-1, data->Snr, data->SRowElems, 0, X.Comm()); Epetra_Map BdMap(-1, data->Dnr, data->DRowElems, 0, X.Comm()); Epetra_MultiVector Bs(BsMap, nvectors); Epetra_Import BsImporter(BsMap, newX->Map()); assert(BsImporter.SourceMap().SameAs(newX->Map())); assert((newX->Map()).SameAs(BsImporter.SourceMap())); Bs.Import(*newX, BsImporter, Insert); Epetra_MultiVector Xs(BsMap, nvectors); Epetra_SerialComm LComm; // Use Serial Comm for the local vectors. Epetra_Map LocalBdMap(-1, data->Dnr, data->DRowElems, 0, LComm); Epetra_MultiVector localrhs(LocalBdMap, nvectors); Epetra_MultiVector locallhs(LocalBdMap, nvectors); Epetra_MultiVector Z(BdMap, nvectors); Epetra_MultiVector Bd(BdMap, nvectors); Epetra_Import BdImporter(BdMap, newX->Map()); assert(BdImporter.SourceMap().SameAs(newX->Map())); assert((newX->Map()).SameAs(BdImporter.SourceMap())); Bd.Import(*newX, BdImporter, Insert); int lda; double *values; err = Bd.ExtractView(&values, &lda); assert (err == 0); int nrows = ssym->C->RowMap().NumMyElements(); // copy to local vector //TODO: OMP ? assert(lda == nrows); for (int v = 0; v < nvectors; v++) { for (int i = 0; i < nrows; i++) { err = localrhs.ReplaceMyValue(i, v, values[i+v*lda]); assert (err == 0); } } // TODO : Do we need to reset the lhs and rhs here ? if (config->amesosForDiagonal) { ssym->LP->SetRHS(&localrhs); ssym->LP->SetLHS(&locallhs); ssym->Solver->Solve(); } else { ssym->ifSolver->ApplyInverse(localrhs, locallhs); } err = locallhs.ExtractView(&values, &lda); assert (err == 0); // copy to distributed vector //TODO: OMP ? assert(lda == nrows); for (int v = 0; v < nvectors; v++) { for (int i = 0; i < nrows; i++) { err = Z.ReplaceMyValue(i, v, values[i+v*lda]); assert (err == 0); } } Epetra_MultiVector temp1(BsMap, nvectors); ssym->R->Multiply(false, Z, temp1); Bs.Update(-1.0, temp1, 1.0); Xs.PutScalar(0.0); Epetra_LinearProblem Problem(data->Sbar.get(), &Xs, &Bs); if (config->schurSolver == "Amesos") { Amesos_BaseSolver *solver2 = data->dsolver; data->LP2->SetLHS(&Xs); data->LP2->SetRHS(&Bs); //cout << "Calling solve *****************************" << endl; solver2->Solve(); //cout << "Out of solve *****************************" << endl; } else { if (config->libName == "Belos") { solver = data->innersolver; solver->SetLHS(&Xs); solver->SetRHS(&Bs); } else { // See the comment above on why we are not able to reuse the solver // when outer solve is AztecOO as well. solver = new AztecOO(); //solver.SetPrecOperator(precop_); solver->SetAztecOption(AZ_solver, AZ_gmres); // Do not use AZ_none solver->SetAztecOption(AZ_precond, AZ_dom_decomp); //solver->SetAztecOption(AZ_precond, AZ_none); //solver->SetAztecOption(AZ_precond, AZ_Jacobi); ////solver->SetAztecOption(AZ_precond, AZ_Neumann); //solver->SetAztecOption(AZ_overlap, 3); //solver->SetAztecOption(AZ_subdomain_solve, AZ_ilu); //solver->SetAztecOption(AZ_output, AZ_all); //solver->SetAztecOption(AZ_diagnostics, AZ_all); solver->SetProblem(Problem); } // What should be a good inner_tolerance :-) ? solver->Iterate(config->inner_maxiters, config->inner_tolerance); } Epetra_MultiVector temp(BdMap, nvectors); ssym->C->Multiply(false, Xs, temp); temp.Update(1.0, Bd, -1.0); //Epetra_SerialComm LComm; // Use Serial Comm for the local vectors. //Epetra_Map LocalBdMap(-1, data->Dnr, data->DRowElems, 0, LComm); //Epetra_MultiVector localrhs(LocalBdMap, nvectors); //Epetra_MultiVector locallhs(LocalBdMap, nvectors); //int lda; //double *values; err = temp.ExtractView(&values, &lda); assert (err == 0); //int nrows = data->Cptr->RowMap().NumMyElements(); // copy to local vector //TODO: OMP ? assert(lda == nrows); for (int v = 0; v < nvectors; v++) { for (int i = 0; i < nrows; i++) { err = localrhs.ReplaceMyValue(i, v, values[i+v*lda]); assert (err == 0); } } if (config->amesosForDiagonal) { ssym->LP->SetRHS(&localrhs); ssym->LP->SetLHS(&locallhs); ssym->Solver->Solve(); } else { ssym->ifSolver->ApplyInverse(localrhs, locallhs); } err = locallhs.ExtractView(&values, &lda); assert (err == 0); // copy to distributed vector //TODO: OMP ? assert(lda == nrows); for (int v = 0; v < nvectors; v++) { for (int i = 0; i < nrows; i++) { err = temp.ReplaceMyValue(i, v, values[i+v*lda]); assert (err == 0); } } // For checking faults //if (NumApplyInverse_ == 5) temp.ReplaceMyValue(0, 0, 0.0); Epetra_Export XdExporter(BdMap, Y.Map()); Y.Export(temp, XdExporter, Insert); Epetra_Export XsExporter(BsMap, Y.Map()); Y.Export(Xs, XsExporter, Insert); if (config->libName == "Belos" || config->schurSolver == "Amesos") { // clean up } else { delete solver; } return 0; }//end shylu_dist_solve <epetra,epetra>