/* PostSetSubKSP - Optional user-defined routine that reset SubKSP options when hierarchical bjacobi PC is used e.g, mpiexec -n 8 ./ex3 -nox -n 10000 -ksp_type fgmres -pc_type bjacobi -pc_bjacobi_blocks 4 -sub_ksp_type gmres -sub_ksp_max_it 3 -post_setsubksp -sub_ksp_rtol 1.e-16 Set by SNESLineSearchSetPostCheck(). Input Parameters: linesearch - the LineSearch context xcurrent - current solution y - search direction and length x - the new candidate iterate Output Parameters: y - proposed step (search direction and length) (possibly changed) x - current iterate (possibly modified) */ PetscErrorCode PostSetSubKSP(SNESLineSearch linesearch,Vec xcurrent,Vec y,Vec x,PetscBool *changed_y,PetscBool *changed_x, void * ctx) { PetscErrorCode ierr; SetSubKSPCtx *check; PetscInt iter,its,sub_its,maxit; KSP ksp,sub_ksp,*sub_ksps; PC pc; PetscReal ksp_ratio; SNES snes; PetscFunctionBeginUser; ierr = SNESLineSearchGetSNES(linesearch, &snes);CHKERRQ(ierr); check = (SetSubKSPCtx*)ctx; ierr = SNESGetIterationNumber(snes,&iter);CHKERRQ(ierr); ierr = SNESGetKSP(snes,&ksp);CHKERRQ(ierr); ierr = KSPGetPC(ksp,&pc);CHKERRQ(ierr); ierr = PCBJacobiGetSubKSP(pc,NULL,NULL,&sub_ksps);CHKERRQ(ierr); sub_ksp = sub_ksps[0]; ierr = KSPGetIterationNumber(ksp,&its);CHKERRQ(ierr); /* outer KSP iteration number */ ierr = KSPGetIterationNumber(sub_ksp,&sub_its);CHKERRQ(ierr); /* inner KSP iteration number */ if (iter) { ierr = PetscPrintf(PETSC_COMM_WORLD," ...PostCheck snes iteration %D, ksp_it %d %d, subksp_it %d\n",iter,check->its0,its,sub_its);CHKERRQ(ierr); ksp_ratio = ((PetscReal)(its))/check->its0; maxit = (PetscInt)(ksp_ratio*sub_its + 0.5); if (maxit < 2) maxit = 2; ierr = KSPSetTolerances(sub_ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,maxit);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD," ...ksp_ratio %g, new maxit %d\n\n",ksp_ratio,maxit);CHKERRQ(ierr); } check->its0 = its; /* save current outer KSP iteration number */ PetscFunctionReturn(0); }
/*@C KSPMonitorDynamicTolerance - Recompute the inner tolerance in every outer iteration in an adaptive way. Collective on KSP Input Parameters: + ksp - iterative context . n - iteration number (not used) . fnorm - the current residual norm . dummy - some context as a C struct. fields: coef: a scaling coefficient. default 1.0. can be passed through -sub_ksp_dynamic_tolerance_param bnrm: norm of the right-hand side. store it to avoid repeated calculation Notes: This may be useful for a flexibly preconditioner Krylov method to control the accuracy of the inner solves needed to gaurantee the convergence of the outer iterations. Level: advanced .keywords: KSP, inner tolerance .seealso: KSPMonitorDynamicToleranceDestroy() @*/ PetscErrorCode KSPMonitorDynamicTolerance(KSP ksp,PetscInt its,PetscReal fnorm,void *dummy) { PetscErrorCode ierr; PC pc; PetscReal outer_rtol, outer_abstol, outer_dtol, inner_rtol; PetscInt outer_maxits,nksp,first,i; KSPDynTolCtx *scale = (KSPDynTolCtx*)dummy; KSP kspinner = NULL, *subksp = NULL; PetscFunctionBegin; ierr = KSPGetPC(ksp, &pc); CHKERRQ(ierr); /* compute inner_rtol */ if (scale->bnrm < 0.0) { Vec b; ierr = KSPGetRhs(ksp, &b); CHKERRQ(ierr); ierr = VecNorm(b, NORM_2, &(scale->bnrm)); CHKERRQ(ierr); } ierr = KSPGetTolerances(ksp, &outer_rtol, &outer_abstol, &outer_dtol, &outer_maxits); CHKERRQ(ierr); inner_rtol = PetscMin(scale->coef * scale->bnrm * outer_rtol / fnorm, 0.999); /*ierr = PetscPrintf(PETSC_COMM_WORLD, " Inner rtol = %g\n", (double)inner_rtol);CHKERRQ(ierr);*/ /* if pc is ksp */ ierr = PCKSPGetKSP(pc, &kspinner); CHKERRQ(ierr); if (kspinner) { ierr = KSPSetTolerances(kspinner, inner_rtol, outer_abstol, outer_dtol, outer_maxits); CHKERRQ(ierr); PetscFunctionReturn(0); } /* if pc is bjacobi */ ierr = PCBJacobiGetSubKSP(pc, &nksp, &first, &subksp); CHKERRQ(ierr); if (subksp) { for (i=0; i<nksp; i++) { ierr = KSPSetTolerances(subksp[i], inner_rtol, outer_abstol, outer_dtol, outer_maxits); CHKERRQ(ierr); } PetscFunctionReturn(0); } /* todo: dynamic tolerance may apply to other types of pc too */ PetscFunctionReturn(0); }
void PetscPreconditioner<T>::set_petsc_subpreconditioner_type(const PCType type, PC& pc) #endif { // For catching PETSc error return codes int ierr = 0; // get the communicator from the PETSc object Parallel::communicator comm; PetscObjectGetComm((PetscObject)pc, &comm); Parallel::Communicator communicator(comm); // All docs say must call KSPSetUp or PCSetUp before calling PCBJacobiGetSubKSP. // You must call PCSetUp after the preconditioner operators have been set, otherwise you get the: // // "Object is in wrong state!" // "Matrix must be set first." // // error messages... ierr = PCSetUp(pc); CHKERRABORT(comm,ierr); // To store array of local KSP contexts on this processor KSP* subksps; // the number of blocks on this processor PetscInt n_local; // The global number of the first block on this processor. // This is not used, so we just pass PETSC_NULL instead. // int first_local; // Fill array of local KSP contexts ierr = PCBJacobiGetSubKSP(pc, &n_local, PETSC_NULL, &subksps); CHKERRABORT(comm,ierr); // Loop over sub-ksp objects, set ILU preconditioner for (PetscInt i=0; i<n_local; ++i) { // Get pointer to sub KSP object's PC PC subpc; ierr = KSPGetPC(subksps[i], &subpc); CHKERRABORT(comm,ierr); // Set requested type on the sub PC ierr = PCSetType(subpc, type); CHKERRABORT(comm,ierr); } }
int main(int argc,char **args) { Vec x,b,u; /* approx solution, RHS, exact solution */ Mat A; /* linear system matrix */ KSP ksp; /* KSP context */ KSP *subksp; /* array of local KSP contexts on this processor */ PC pc; /* PC context */ PC subpc; /* PC context for subdomain */ PetscReal norm; /* norm of solution error */ PetscErrorCode ierr; PetscInt i,j,Ii,J,*blks,m = 4,n; PetscMPIInt rank,size; PetscInt its,nlocal,first,Istart,Iend; PetscScalar v,one = 1.0,none = -1.0; PetscBool isbjacobi; PetscInitialize(&argc,&args,(char*)0,help); ierr = PetscOptionsGetInt(NULL,"-m",&m,NULL);CHKERRQ(ierr); ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr); ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr); n = m+2; /* ------------------------------------------------------------------- Compute the matrix and right-hand-side vector that define the linear system, Ax = b. ------------------------------------------------------------------- */ /* Create and assemble parallel matrix */ ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr); ierr = MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);CHKERRQ(ierr); ierr = MatSetFromOptions(A);CHKERRQ(ierr); ierr = MatMPIAIJSetPreallocation(A,5,NULL,5,NULL);CHKERRQ(ierr); ierr = MatSeqAIJSetPreallocation(A,5,NULL);CHKERRQ(ierr); ierr = MatGetOwnershipRange(A,&Istart,&Iend);CHKERRQ(ierr); for (Ii=Istart; Ii<Iend; Ii++) { v = -1.0; i = Ii/n; j = Ii - i*n; if (i>0) {J = Ii - n; ierr = MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);CHKERRQ(ierr);} if (i<m-1) {J = Ii + n; ierr = MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);CHKERRQ(ierr);} if (j>0) {J = Ii - 1; ierr = MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);CHKERRQ(ierr);} if (j<n-1) {J = Ii + 1; ierr = MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);CHKERRQ(ierr);} v = 4.0; ierr = MatSetValues(A,1,&Ii,1,&Ii,&v,ADD_VALUES);CHKERRQ(ierr); } ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); /* Create parallel vectors */ ierr = VecCreate(PETSC_COMM_WORLD,&u);CHKERRQ(ierr); ierr = VecSetSizes(u,PETSC_DECIDE,m*n);CHKERRQ(ierr); ierr = VecSetFromOptions(u);CHKERRQ(ierr); ierr = VecDuplicate(u,&b);CHKERRQ(ierr); ierr = VecDuplicate(b,&x);CHKERRQ(ierr); /* Set exact solution; then compute right-hand-side vector. */ ierr = VecSet(u,one);CHKERRQ(ierr); ierr = MatMult(A,u,b);CHKERRQ(ierr); /* Create linear solver context */ ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr); /* Set operators. Here the matrix that defines the linear system also serves as the preconditioning matrix. */ ierr = KSPSetOperators(ksp,A,A);CHKERRQ(ierr); /* Set default preconditioner for this program to be block Jacobi. This choice can be overridden at runtime with the option -pc_type <type> */ ierr = KSPGetPC(ksp,&pc);CHKERRQ(ierr); ierr = PCSetType(pc,PCBJACOBI);CHKERRQ(ierr); /* ------------------------------------------------------------------- Define the problem decomposition ------------------------------------------------------------------- */ /* Call PCBJacobiSetTotalBlocks() to set individually the size of each block in the preconditioner. This could also be done with the runtime option -pc_bjacobi_blocks <blocks> Also, see the command PCBJacobiSetLocalBlocks() to set the local blocks. Note: The default decomposition is 1 block per processor. */ ierr = PetscMalloc1(m,&blks);CHKERRQ(ierr); for (i=0; i<m; i++) blks[i] = n; ierr = PCBJacobiSetTotalBlocks(pc,m,blks);CHKERRQ(ierr); ierr = PetscFree(blks);CHKERRQ(ierr); /* ------------------------------------------------------------------- Set the linear solvers for the subblocks ------------------------------------------------------------------- */ /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Basic method, should be sufficient for the needs of most users. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - By default, the block Jacobi method uses the same solver on each block of the problem. To set the same solver options on all blocks, use the prefix -sub before the usual PC and KSP options, e.g., -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4 */ /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Advanced method, setting different solvers for various blocks. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - Note that each block's KSP context is completely independent of the others, and the full range of uniprocessor KSP options is available for each block. The following section of code is intended to be a simple illustration of setting different linear solvers for the individual blocks. These choices are obviously not recommended for solving this particular problem. */ ierr = PetscObjectTypeCompare((PetscObject)pc,PCBJACOBI,&isbjacobi);CHKERRQ(ierr); if (isbjacobi) { /* Call KSPSetUp() to set the block Jacobi data structures (including creation of an internal KSP context for each block). Note: KSPSetUp() MUST be called before PCBJacobiGetSubKSP(). */ ierr = KSPSetUp(ksp);CHKERRQ(ierr); /* Extract the array of KSP contexts for the local blocks */ ierr = PCBJacobiGetSubKSP(pc,&nlocal,&first,&subksp);CHKERRQ(ierr); /* Loop over the local blocks, setting various KSP options for each block. */ for (i=0; i<nlocal; i++) { ierr = KSPGetPC(subksp[i],&subpc);CHKERRQ(ierr); if (!rank) { if (i%2) { ierr = PCSetType(subpc,PCILU);CHKERRQ(ierr); } else { ierr = PCSetType(subpc,PCNONE);CHKERRQ(ierr); ierr = KSPSetType(subksp[i],KSPBCGS);CHKERRQ(ierr); ierr = KSPSetTolerances(subksp[i],1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);CHKERRQ(ierr); } } else { ierr = PCSetType(subpc,PCJACOBI);CHKERRQ(ierr); ierr = KSPSetType(subksp[i],KSPGMRES);CHKERRQ(ierr); ierr = KSPSetTolerances(subksp[i],1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);CHKERRQ(ierr); } } } /* ------------------------------------------------------------------- Solve the linear system ------------------------------------------------------------------- */ /* Set runtime options */ ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); /* Solve the linear system */ ierr = KSPSolve(ksp,b,x);CHKERRQ(ierr); /* ------------------------------------------------------------------- Check solution and clean up ------------------------------------------------------------------- */ /* Check the error */ ierr = VecAXPY(x,none,u);CHKERRQ(ierr); ierr = VecNorm(x,NORM_2,&norm);CHKERRQ(ierr); ierr = KSPGetIterationNumber(ksp,&its);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);CHKERRQ(ierr); /* Free work space. All PETSc objects should be destroyed when they are no longer needed. */ ierr = KSPDestroy(&ksp);CHKERRQ(ierr); ierr = VecDestroy(&u);CHKERRQ(ierr); ierr = VecDestroy(&x);CHKERRQ(ierr); ierr = VecDestroy(&b);CHKERRQ(ierr); ierr = MatDestroy(&A);CHKERRQ(ierr); ierr = PetscFinalize(); return 0; }
/*! \brief solve complex use Krylov solver in combination with Block-Jacobi + Nested * * * Solve the system using block-jacobi preconditioner + nested solver for each block * */ void solve_complex(Mat & A_, const Vec & b_, Vec & x_) { // We get the size of the Matrix A PetscInt row; PetscInt col; PetscInt row_loc; PetscInt col_loc; PetscInt * blks; PetscInt nlocal; PetscInt first; KSP *subksp; PC subpc; PETSC_SAFE_CALL(MatGetSize(A_,&row,&col)); PETSC_SAFE_CALL(MatGetLocalSize(A_,&row_loc,&col_loc)); // Set the preconditioner to Block-jacobi PC pc; KSPGetPC(ksp,&pc); PCSetType(pc,PCBJACOBI); PETSC_SAFE_CALL(KSPSetType(ksp,KSPGMRES)); PetscInt m = 1; PetscMalloc1(m,&blks); for (size_t i = 0 ; i < m ; i++) blks[i] = row_loc; PCBJacobiSetLocalBlocks(pc,m,blks); PetscFree(blks); KSPSetUp(ksp); PCSetUp(pc); PCBJacobiGetSubKSP(pc,&nlocal,&first,&subksp); for (size_t i = 0; i < nlocal; i++) { KSPGetPC(subksp[i],&subpc); PCSetType(subpc,PCLU); // PCSetType(subpc,PCJACOBI); KSPSetType(subksp[i],KSPPREONLY); // PETSC_SAFE_CALL(KSPSetConvergenceTest(ksp,KSPConvergedDefault,NULL,NULL)); // KSPSetTolerances(subksp[i],1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT); /* if (!rank) { if (i%2) { PCSetType(subpc,PCILU); } else { PCSetType(subpc,PCNONE); KSPSetType(subksp[i],KSPBCGS); KSPSetTolerances(subksp[i],1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT); } } else { PCSetType(subpc,PCJACOBI); KSPSetType(subksp[i],KSPGMRES); KSPSetTolerances(subksp[i],1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT); }*/ } KSPSolve(ksp,b_,x_); auto & v_cl = create_vcluster(); if (try_solve == true) { // calculate error statistic about the solution solError err = statSolutionError(A_,b_,x_); if (v_cl.getProcessUnitID() == 0) { std::cout << "Method: " << s_type << " " << " pre-conditoner: " << PCJACOBI << " iterations: " << err.its << std::endl; std::cout << "Norm of error: " << err.err_norm << " Norm infinity: " << err.err_inf << std::endl; } } }