// ------------------------------------------------------------- // MatCreateDenseGA // ------------------------------------------------------------- PetscErrorCode MatCreateDenseGA(MPI_Comm comm, PetscInt m,PetscInt n,PetscInt M,PetscInt N, Mat *A) { PetscErrorCode ierr = 0; struct MatGACtx *ctx; int lrows, grows, lcols, gcols; ierr = PetscMalloc(sizeof(struct MatGACtx), &ctx); CHKERRQ(ierr); ierr = MPIComm2GApgroup(comm, &(ctx->gaGroup)); CHKERRQ(ierr); lrows = m; lcols = n; grows = M; gcols = N; if (lrows == PETSC_DECIDE || lrows == PETSC_DETERMINE || grows == PETSC_DECIDE || grows == PETSC_DETERMINE) { ierr = PetscSplitOwnership(comm, &lrows, &grows); CHKERRXX(ierr); } if (lcols == PETSC_DECIDE || lcols == PETSC_DETERMINE || gcols == PETSC_DECIDE || gcols == PETSC_DETERMINE) { ierr = PetscSplitOwnership(comm, &lcols, &gcols); CHKERRXX(ierr); } ierr = CreateMatGA(ctx->gaGroup, lrows, lcols, grows, gcols, &(ctx->ga)); CHKERRQ(ierr); ierr = MatCreateShell(comm, lrows, lcols, grows, gcols, ctx, A); CHKERRQ(ierr); ierr = MatSetOperations_DenseGA(*A); return ierr; }
void Field_solver::set_solution_at_nodes_of_inner_regions( Spatial_mesh &spat_mesh, Inner_region &inner_region ) { int nx = spat_mesh.x_n_nodes; int ny = spat_mesh.y_n_nodes; int nz = spat_mesh.z_n_nodes; std::vector<int> occupied_nodes_global_indices = list_of_nodes_global_indices_in_matrix( inner_region.inner_nodes_not_at_domain_edge, nx, ny, nz ); PetscErrorCode ierr; PetscInt num_of_elements_to_set = occupied_nodes_global_indices.size(); if( num_of_elements_to_set != 0 ){ std::vector<PetscScalar> phi_inside_region(num_of_elements_to_set); std::fill( phi_inside_region.begin(), phi_inside_region.end(), inner_region.potential ); PetscInt *global_indices = &occupied_nodes_global_indices[0]; PetscScalar *values = &phi_inside_region[0]; ierr = VecSetValues( phi_vec, num_of_elements_to_set, global_indices, values, INSERT_VALUES); CHKERRXX( ierr ); ierr = VecAssemblyBegin( phi_vec ); CHKERRXX( ierr ); ierr = VecAssemblyEnd( phi_vec ); CHKERRXX( ierr ); } return; }
void Field_solver::construct_d2dy2_in_3d( Mat *d2dy2_3d, int nx, int ny, int nz, PetscInt rstart, PetscInt rend ) { PetscErrorCode ierr; //int nrow = ( nx - 2 ) * ( ny - 2 ) * ( nz - 2 ); //int ncol = nrow; int at_boundary_pattern_size = 2; int no_boundaries_pattern_size = 3; PetscScalar at_top_boundary_pattern[at_boundary_pattern_size]; PetscScalar no_boundaries_pattern[no_boundaries_pattern_size]; PetscScalar at_bottom_boundary_pattern[at_boundary_pattern_size]; PetscInt cols[ no_boundaries_pattern_size ]; at_bottom_boundary_pattern[0] = -2.0; at_bottom_boundary_pattern[1] = 1.0; no_boundaries_pattern[0] = 1.0; no_boundaries_pattern[1] = -2.0; no_boundaries_pattern[2] = 1.0; at_top_boundary_pattern[0] = 1.0; at_top_boundary_pattern[1] = -2.0; int i, j, k; for( int row_idx = rstart; row_idx < rend; row_idx++ ) { global_index_in_matrix_to_node_ijk( row_idx, &i, &j, &k, nx, ny, nz ); if ( j == 1 ) { // bottom boundary cols[0] = row_idx; cols[1] = row_idx + ( nx - 2 ); ierr = MatSetValues( *d2dy2_3d, 1, &row_idx, at_boundary_pattern_size, cols, at_bottom_boundary_pattern, INSERT_VALUES ); CHKERRXX( ierr ); } else if ( j == ny - 2 ) { // top boundary cols[0] = row_idx - ( nx - 2 ); cols[1] = row_idx; ierr = MatSetValues( *d2dy2_3d, 1, &row_idx, at_boundary_pattern_size, cols, at_top_boundary_pattern, INSERT_VALUES ); CHKERRXX( ierr ); } else { // center cols[0] = row_idx - ( nx - 2 ); cols[1] = row_idx; cols[2] = row_idx + ( nx - 2 ); ierr = MatSetValues( *d2dy2_3d, 1, &row_idx, no_boundaries_pattern_size, cols, no_boundaries_pattern, INSERT_VALUES ); CHKERRXX( ierr ); } //printf( "d2dx2 loop: i = %d \n", i ); } ierr = MatAssemblyBegin( *d2dy2_3d, MAT_FINAL_ASSEMBLY ); CHKERRXX( ierr ); ierr = MatAssemblyEnd( *d2dy2_3d, MAT_FINAL_ASSEMBLY ); CHKERRXX( ierr ); return; }
/// Solve again w/ the specified RHS, put result in specified vector (specialized) void p_resolveImpl(const VectorType& b, VectorType& x) const { PetscErrorCode ierr(0); int me(this->processor_rank()); try { const Vec *bvec(PETScVector(b)); Vec *xvec(PETScVector(x)); ierr = KSPSolve(p_KSP, *bvec, *xvec); CHKERRXX(ierr); int its; KSPConvergedReason reason; PetscReal rnorm; ierr = KSPGetIterationNumber(p_KSP, &its); CHKERRXX(ierr); ierr = KSPGetConvergedReason(p_KSP, &reason); CHKERRXX(ierr); ierr = KSPGetResidualNorm(p_KSP, &rnorm); CHKERRXX(ierr); std::string msg; if (reason < 0) { msg = boost::str(boost::format("%d: PETSc KSP diverged after %d iterations, reason: %d") % me % its % reason); throw Exception(msg); } else if (me == 0) { msg = boost::str(boost::format("%d: PETSc KSP converged after %d iterations, reason: %d") % me % its % reason); std::cerr << msg << std::endl; } } catch (const PETSC_EXCEPTION_TYPE& e) { throw PETScException(ierr, e); } catch (const Exception& e) { throw e; } }
void Field_solver::modify_equation_near_object_boundaries( Mat *A, int nx, int ny, int nz, double dx, double dy, double dz, Inner_region &inner_region ) { PetscErrorCode ierr; int max_possible_neighbours = 6; // in 3d case; todo: make max_possible_nbr a property of Node_reference std::vector<PetscScalar> zeroes( max_possible_neighbours, 0.0 ); for( auto &node : inner_region.near_boundary_nodes_not_at_domain_edge ){ PetscInt modify_single_row = 1; PetscInt row_to_modify = node_global_index_in_matrix( node, nx, ny, nz ); std::vector<PetscInt> cols_to_modify = adjacent_nodes_not_at_domain_edge_and_inside_inner_region( node, inner_region, nx, ny, nz, dx, dy, dz ); PetscInt n_of_cols_to_modify = cols_to_modify.size(); if( n_of_cols_to_modify != 0 ){ PetscInt *col_indices = &cols_to_modify[0]; ierr = MatSetValues( *A, modify_single_row, &row_to_modify, n_of_cols_to_modify, col_indices, &zeroes[0], INSERT_VALUES ); CHKERRXX( ierr ); } } ierr = MatAssemblyBegin( *A, MAT_FINAL_ASSEMBLY ); CHKERRXX( ierr ); ierr = MatAssemblyEnd( *A, MAT_FINAL_ASSEMBLY ); CHKERRXX( ierr ); }
/// Solve w/ the specified RHS and estimate (result in x) void p_solveImpl(MatrixType& A, const VectorType& b, VectorType& x) const { PetscErrorCode ierr(0); try { Mat *Amat(PETScMatrix(A)); if (p_matrixSet && this->p_constSerialMatrix) { // KSPSetOperators can be skipped } else { #if PETSC_VERSION_LT(3,5,0) ierr = KSPSetOperators(p_KSP, *Amat, *Amat, SAME_NONZERO_PATTERN); CHKERRXX(ierr); #else ierr = KSPSetOperators(p_KSP, *Amat, *Amat); CHKERRXX(ierr); #endif p_matrixSet = true; } this->p_resolveImpl(b, x); } catch (const PETSC_EXCEPTION_TYPE& e) { throw PETScException(ierr, e); } catch (const Exception& e) { throw e; } }
// ------------------------------------------------------------- // PetscNonlinearSolverImplementation::p_solve // ------------------------------------------------------------- void PetscNonlinearSolverImplementation::p_solve(void) { PetscErrorCode ierr(0); p_petsc_X = PETScVector(*p_X); int me(this->processor_rank()); try { ierr = SNESSolve(p_snes, NULL, *p_petsc_X); CHKERRXX(ierr); SNESConvergedReason reason; PetscInt iter; ierr = SNESGetConvergedReason(p_snes, &reason); CHKERRXX(ierr); ierr = SNESGetIterationNumber(p_snes, &iter); CHKERRXX(ierr); std::string msg; if (reason < 0) { msg = boost::str(boost::format("%d: PETSc SNES diverged after %d iterations, reason: %d") % me % iter % reason); throw Exception(msg); } else if (me == 0) { msg = boost::str(boost::format("%d: PETSc SNES converged after %d iterations, reason: %d") % me % iter % reason); std::cerr << msg << std::endl; } } catch (const PETSC_EXCEPTION_TYPE& e) { throw PETScException(ierr, e); } catch (const Exception& e) { throw e; } }
void Field_solver::construct_equation_matrix_in_full_domain( Mat *A, int nx, int ny, int nz, double dx, double dy, double dz, PetscInt nlocal, PetscInt rstart, PetscInt rend ) { PetscErrorCode ierr; Mat d2dy2, d2dz2; int nrow = ( nx - 2 ) * ( ny - 2 ) * ( nz - 2 ); int ncol = nrow; PetscInt nonzero_per_row = 7; // approx construct_d2dx2_in_3d( A, nx, ny, nz, rstart, rend ); ierr = MatScale( *A, dy * dy * dz * dz ); CHKERRXX( ierr ); alloc_petsc_matrix( &d2dy2, nlocal, nlocal, nrow, ncol, nonzero_per_row ); construct_d2dy2_in_3d( &d2dy2, nx, ny, nz, rstart, rend ); ierr = MatAXPY( *A, dx * dx * dz * dz, d2dy2, DIFFERENT_NONZERO_PATTERN ); CHKERRXX( ierr ); ierr = MatDestroy( &d2dy2 ); CHKERRXX( ierr ); alloc_petsc_matrix( &d2dz2, nlocal, nlocal, nrow, ncol, nonzero_per_row ); construct_d2dz2_in_3d( &d2dz2, nx, ny, nz, rstart, rend ); ierr = MatAXPY( *A, dx * dx * dy * dy, d2dz2, DIFFERENT_NONZERO_PATTERN ); CHKERRXX( ierr ); ierr = MatDestroy( &d2dz2 ); CHKERRXX( ierr ); return; }
void Field_solver::get_vector_ownership_range_and_local_size_for_each_process( Vec *x, PetscInt *rstart, PetscInt *rend, PetscInt *nlocal ) { PetscErrorCode ierr; ierr = VecGetOwnershipRange( *x, rstart, rend ); CHKERRXX(ierr); ierr = VecGetLocalSize( *x, nlocal ); CHKERRXX(ierr); return; }
Field_solver::~Field_solver() { PetscErrorCode ierr; ierr = VecDestroy( &phi_vec ); CHKERRXX( ierr ); ierr = VecDestroy( &rhs ); CHKERRXX( ierr ); ierr = MatDestroy( &A ); CHKERRXX( ierr ); ierr = KSPDestroy( &ksp ); CHKERRXX( ierr ); }
void Field_solver::alloc_petsc_matrix_seqaij( Mat *A, PetscInt nrow, PetscInt ncol, PetscInt nonzero_per_row ) { PetscErrorCode ierr; ierr = MatCreateSeqAIJ( PETSC_COMM_SELF, nrow, ncol, nonzero_per_row, NULL, A ); CHKERRXX( ierr ); ierr = MatSetUp( *A ); CHKERRXX( ierr ); return; }
void Field_solver::alloc_petsc_vector( Vec *x, int size, const char *name ) { PetscErrorCode ierr; ierr = VecCreate( PETSC_COMM_WORLD, x ); CHKERRXX( ierr ); ierr = PetscObjectSetName( (PetscObject) *x, name ); CHKERRXX( ierr ); ierr = VecSetSizes( *x, PETSC_DECIDE, size ); CHKERRXX( ierr ); ierr = VecSetFromOptions( *x ); CHKERRXX( ierr ); return; }
void Field_solver::construct_d2dz2_in_3d( Mat *d2dz2_3d, int nx, int ny, int nz, PetscInt rstart, PetscInt rend ) { PetscErrorCode ierr; //int nrow = ( nx - 2 ) * ( ny - 2 ) * ( nz - 2 ); //int ncol = nrow; const int at_boundary_pattern_size = 2; const int no_boundaries_pattern_size = 3; PetscScalar at_near_boundary_pattern[at_boundary_pattern_size]; PetscScalar no_boundaries_pattern[no_boundaries_pattern_size]; PetscScalar at_far_boundary_pattern[at_boundary_pattern_size]; PetscInt cols[ no_boundaries_pattern_size ]; at_near_boundary_pattern[0] = -2.0; at_near_boundary_pattern[1] = 1.0; no_boundaries_pattern[0] = 1.0; no_boundaries_pattern[1] = -2.0; no_boundaries_pattern[2] = 1.0; at_far_boundary_pattern[0] = 1.0; at_far_boundary_pattern[1] = -2.0; for( int i = rstart; i < rend; i++ ) { if ( i < ( nx - 2 ) * ( ny - 2 ) ) { // near boundary cols[0] = i; cols[1] = i + ( nx - 2 ) * ( ny - 2 ); ierr = MatSetValues( *d2dz2_3d, 1, &i, at_boundary_pattern_size, cols, at_near_boundary_pattern, INSERT_VALUES ); CHKERRXX( ierr ); } else if ( i >= ( nx - 2 ) * ( ny - 2 ) * ( nz - 3 ) ) { // far boundary cols[0] = i - ( nx - 2 ) * ( ny - 2 ); cols[1] = i; ierr = MatSetValues( *d2dz2_3d, 1, &i, at_boundary_pattern_size, cols, at_far_boundary_pattern, INSERT_VALUES ); CHKERRXX( ierr ); } else { // center cols[0] = i - ( nx - 2 ) * ( ny - 2 ); cols[1] = i; cols[2] = i + ( nx - 2 ) * ( ny - 2 ); ierr = MatSetValues( *d2dz2_3d, 1, &i, no_boundaries_pattern_size, cols, no_boundaries_pattern, INSERT_VALUES ); CHKERRXX( ierr ); } } ierr = MatAssemblyBegin( *d2dz2_3d, MAT_FINAL_ASSEMBLY ); CHKERRXX( ierr ); ierr = MatAssemblyEnd( *d2dz2_3d, MAT_FINAL_ASSEMBLY ); CHKERRXX( ierr ); return; }
// ------------------------------------------------------------- // Initialize // ------------------------------------------------------------- /// Does whatever is necessary to start up the PETSc library void Initialize(void) { if (Initialized()) return; PetscErrorCode ierr(0); PetscBool flg; #if USE_PROGRESS_RANKS gridpack::parallel::Communicator comm; MPI_Comm world = GA_MPI_Comm(); PETSC_COMM_WORLD = world; #endif try { // Turn this on to enable PETSc logging. #if 0 int argc = 2; char **args; args = new char*[2]; args[0] = new char[32]; args[1] = new char[32]; sprintf(args[0],"powerflow.x"); sprintf(args[1],"-log_summary"); ierr = PetscInitialize(&argc,&args,NULL,NULL); CHKERRXX(ierr); delete [] args[1]; delete [] args[0]; delete [] args; #else ierr = PetscInitializeNoArguments(); CHKERRXX(ierr); #endif PetscOptionsHasName( #if PETSC_VERSION_GE(3,7,0) NULL, #endif NULL, "-log_summary", &flg); ierr = PetscOptionsInsertFile(PETSC_COMM_WORLD, #if PETSC_VERSION_GE(3,7,0) NULL, #endif "gridpack.petscrc", PETSC_FALSE); CHKERRXX(ierr); } catch (const PETSC_EXCEPTION_TYPE& e) { throw PETScException(ierr, e); } // Print out some information on processor configuration int mpi_err, me, nproc; mpi_err = MPI_Comm_rank(PETSC_COMM_WORLD, &me); mpi_err = MPI_Comm_size(PETSC_COMM_WORLD, &nproc); if (mpi_err > 0) { throw gridpack::Exception("MPI initialization failed"); } if (me == 0) { printf("\nGridPACK math module configured on %d processors\n",nproc); } }
PetscNonlinearSolverImplementation::~PetscNonlinearSolverImplementation(void) { PetscErrorCode ierr(0); try { PetscBool ok; ierr = PetscInitialized(&ok); CHKERRXX(ierr); if (ok) { ierr = SNESDestroy(&p_snes); CHKERRXX(ierr); } } catch (...) { // just eat it } }
/// Do what is necessary to build this instance void p_build(const std::string& option_prefix) { PetscErrorCode ierr; try { parallel::Communicator comm(this->communicator()); if (this->p_doSerial) { comm = this->communicator().self(); } ierr = KSPCreate(comm, &p_KSP); CHKERRXX(ierr); if (!this->p_guessZero) { ierr = KSPSetInitialGuessNonzero(p_KSP,PETSC_TRUE); CHKERRXX(ierr); } else { ierr = KSPSetInitialGuessNonzero(p_KSP,PETSC_FALSE); CHKERRXX(ierr); } ierr = KSPSetOptionsPrefix(p_KSP, option_prefix.c_str()); CHKERRXX(ierr); PC pc; ierr = KSPGetPC(p_KSP, &pc); CHKERRXX(ierr); ierr = PCSetOptionsPrefix(pc, option_prefix.c_str()); CHKERRXX(ierr); ierr = KSPSetTolerances(p_KSP, LinearSolverImplementation<T, I>::p_relativeTolerance, LinearSolverImplementation<T, I>::p_solutionTolerance, PETSC_DEFAULT, LinearSolverImplementation<T, I>::p_maxIterations); CHKERRXX(ierr); ierr = KSPSetFromOptions(p_KSP);CHKERRXX(ierr); } catch (const PETSC_EXCEPTION_TYPE& e) { throw PETScException(ierr, e); } }
void Field_solver::init_rhs_vector_in_full_domain( Spatial_mesh &spat_mesh ) { PetscErrorCode ierr; int nx = spat_mesh.x_n_nodes; int ny = spat_mesh.y_n_nodes; int nz = spat_mesh.z_n_nodes; //int nrow = (nx-2)*(ny-2); double dx = spat_mesh.x_cell_size; double dy = spat_mesh.y_cell_size; double dz = spat_mesh.z_cell_size; double rhs_at_node; // todo: split into separate functions for ( int k = 1; k <= nz-2; k++ ) { for ( int j = 1; j <= ny-2; j++ ) { for ( int i = 1; i <= nx-2; i++ ) { // - 4 * pi * rho * dx^2 * dy^2 rhs_at_node = -4.0 * M_PI * spat_mesh.charge_density[i][j][k]; rhs_at_node = rhs_at_node * dx * dx * dy * dy * dz * dz; // left and right boundary rhs_at_node = rhs_at_node - dy * dy * dz * dz * ( kronecker_delta(i,1) * spat_mesh.potential[0][j][k] + kronecker_delta(i,nx-2) * spat_mesh.potential[nx-1][j][k] ); // top and bottom boundary rhs_at_node = rhs_at_node - dx * dx * dz * dz * ( kronecker_delta(j,1) * spat_mesh.potential[i][0][k] + kronecker_delta(j,ny-2) * spat_mesh.potential[i][ny-1][k] ); // near and far boundary rhs_at_node = rhs_at_node - dx * dx * dy * dy * ( kronecker_delta(k,1) * spat_mesh.potential[i][j][0] + kronecker_delta(k,nz-2) * spat_mesh.potential[i][j][nz-1] ); // set rhs vector values ierr = VecSetValue( rhs, node_ijk_to_global_index_in_matrix( i, j, k, nx, ny, nz ), rhs_at_node, INSERT_VALUES ); CHKERRXX( ierr ); } } } ierr = VecAssemblyBegin( rhs ); CHKERRXX( ierr ); ierr = VecAssemblyEnd( rhs ); CHKERRXX( ierr ); return; }
Field_solver::Field_solver( Spatial_mesh &spat_mesh, Inner_regions_manager &inner_regions ) { int nx = spat_mesh.x_n_nodes; int ny = spat_mesh.y_n_nodes; int nz = spat_mesh.z_n_nodes; PetscInt nrows = (nx-2)*(ny-2)*(nz-2); PetscInt ncols = nrows; PetscErrorCode ierr; PetscInt A_approx_nonzero_per_row = 7; int mpi_n_of_proc, mpi_process_rank; MPI_Comm_size( PETSC_COMM_WORLD, &mpi_n_of_proc ); MPI_Comm_rank( PETSC_COMM_WORLD, &mpi_process_rank ); alloc_petsc_vector( &phi_vec, nrows, "Solution" ); ierr = VecSet( phi_vec, 0.0 ); CHKERRXX( ierr ); get_vector_ownership_range_and_local_size_for_each_process( &phi_vec, &rstart, &rend, &nlocal ); alloc_petsc_vector( &rhs, nrows, "RHS" ); alloc_petsc_matrix( &A, nlocal, nlocal, nrows, ncols, A_approx_nonzero_per_row ); construct_equation_matrix( &A, spat_mesh, inner_regions, nlocal, rstart, rend ); create_solver_and_preconditioner( &ksp, &pc, &A ); }
// ------------------------------------------------------------- // PetscNonlinearSolverImplementation::p_build // ------------------------------------------------------------- void PetscNonlinearSolverImplementation::p_build(const std::string& option_prefix) { PetscErrorCode ierr(0); try { ierr = SNESCreate(this->communicator(), &p_snes); CHKERRXX(ierr); p_petsc_F = PETScVector(*p_F); if (!p_function.empty()) { ierr = SNESSetFunction(p_snes, *p_petsc_F, FormFunction, static_cast<void *>(this)); CHKERRXX(ierr); } p_petsc_J = PETScMatrix(*p_J); if (!p_jacobian.empty()) { ierr = SNESSetJacobian(p_snes, *p_petsc_J, *p_petsc_J, FormJacobian, static_cast<void *>(this)); CHKERRXX(ierr); } // set the ierr = SNESSetOptionsPrefix(p_snes, option_prefix.c_str()); CHKERRXX(ierr); KSP ksp; ierr = SNESGetKSP(p_snes, &ksp); CHKERRXX(ierr); ierr = KSPSetOptionsPrefix(ksp, option_prefix.c_str()); CHKERRXX(ierr); PC pc; ierr = KSPGetPC(ksp, &pc); CHKERRXX(ierr); ierr = PCSetOptionsPrefix(pc, option_prefix.c_str()); CHKERRXX(ierr); ierr = SNESMonitorSet(p_snes, MonitorNorms, PETSC_NULL, PETSC_NULL); CHKERRXX(ierr); ierr = SNESSetTolerances(p_snes, p_functionTolerance, PETSC_DEFAULT, p_solutionTolerance, p_maxIterations, PETSC_DEFAULT); ierr = SNESSetFromOptions(p_snes); CHKERRXX(ierr); } catch (const PETSC_EXCEPTION_TYPE& e) { throw PETScException(ierr, e); } }
// ------------------------------------------------------------- // MatConvertToDenseGA // ------------------------------------------------------------- PetscErrorCode MatConvertToDenseGA(Mat A, Mat *B) { PetscErrorCode ierr = 0; MPI_Comm comm; int lrows, grows, lcols, gcols; ierr = PetscObjectGetComm((PetscObject)A, &comm); CHKERRQ(ierr); ierr = MatGetSize(A, &grows, &gcols); CHKERRQ(ierr); ierr = MatGetLocalSize(A, &lrows, &lcols); CHKERRQ(ierr); ierr = MatCreateDenseGA(comm, lrows, lcols, grows, gcols, B); CHKERRXX(ierr); ierr = MatCopy(A, *B, SAME_NONZERO_PATTERN); CHKERRXX(ierr); return ierr; }
void Field_solver::deallocate_phi_array( double *local_phi_values, int proc, int mpi_process_rank ) { PetscErrorCode ierr; if( proc == mpi_process_rank ){ ierr = VecRestoreArray( phi_vec, &local_phi_values ); CHKERRXX( ierr ); } else { delete[] local_phi_values; } }
// ------------------------------------------------------------- // Finalize // ------------------------------------------------------------- /// Does whatever is necessary to shut down the PETSc library void Finalize(void) { if (!Initialized()) return; PetscErrorCode ierr(0); try { ierr = PetscFinalize(); CHKERRXX(ierr); } catch (const PETSC_EXCEPTION_TYPE& e) { throw PETScException(ierr, e); } }
void Field_solver::allocate_and_populate_phi_array( double **local_phi_values, int recieved_nlocal, int proc, int mpi_process_rank ) { PetscErrorCode ierr; if( proc == mpi_process_rank ){ ierr = VecGetArray( phi_vec, local_phi_values ); CHKERRXX( ierr ); } else { *local_phi_values = new double [recieved_nlocal]; } MPI_Bcast( *local_phi_values, recieved_nlocal, MPI_DOUBLE, proc, PETSC_COMM_WORLD ); }
static PetscErrorCode fill_pattern(Mat A, InsertMode addv) { PetscErrorCode ierr(0); PetscScalar x(0.0); PetscInt lo, hi; ierr = MatGetOwnershipRange(A, &lo, &hi); CHKERRXX(ierr); for (int i = lo; i < hi; ++i) { for (int j = lo; j < hi; ++j) { ierr = MatSetValues(A, 1, &i, 1, &j, &x, addv); CHKERRXX(ierr); x += 1.0; } } ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY); CHKERRXX(ierr); ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY); CHKERRXX(ierr); return ierr; }
void Field_solver::alloc_petsc_matrix( Mat *A, PetscInt nrow_local, PetscInt ncol_local, PetscInt nrow, PetscInt ncol, PetscInt nonzero_per_row ) { PetscErrorCode ierr; // PetscInt approx_nonzero_per_row = 7; ierr = MatCreate( PETSC_COMM_WORLD, A ); CHKERRXX( ierr ); ierr = MatSetSizes( *A, nrow_local, ncol_local, nrow, ncol ); CHKERRXX( ierr ); ierr = MatSetFromOptions( *A ); CHKERRXX( ierr ); ierr = MatSetType( *A, MATAIJ ); CHKERRXX( ierr ); // redo; set nonzero_per_row more accurately // if nlocal >= (nx-2)*(ny-2): max_diag_nonzero_per_row = 7, max_offdiag_nonzer_per_row = 3 // (nx-2) <= nlocal < (nx-2)*(ny-2) : max_diag_nonzero_per_row = 5, max_offdiag_nonzer_per_row = 4 // probably. ierr = MatMPIAIJSetPreallocation( *A, nonzero_per_row, NULL, nonzero_per_row, NULL); CHKERRXX( ierr ); ierr = MatSetUp( *A ); CHKERRXX( ierr ); return; }
// ------------------------------------------------------------- // Initialized // ------------------------------------------------------------- bool Initialized(void) { PetscErrorCode ierr(0); PetscBool result; try { ierr = PetscInitialized(&result); CHKERRXX(ierr); } catch (const PETSC_EXCEPTION_TYPE& e) { throw PETScException(ierr, e); } return result; }
static void convert_and_check(Mat A) { PetscErrorCode ierr(0); PetscInt lo, hi; Mat B; ierr = MatConvertToDenseGA(A, &B); CHKERRXX(ierr); ierr = MatGetOwnershipRange(B, &lo, &hi); CHKERRXX(ierr); for (int i = lo; i < hi; ++i) { for (int j = lo; j < hi; ++j) { PetscScalar x; PetscScalar y; ierr = MatGetValues(A, 1, &i, 1, &j, &x); CHKERRXX(ierr); ierr = MatGetValues(B, 1, &i, 1, &j, &y); CHKERRXX(ierr); BOOST_CHECK_EQUAL(x, y); } } ierr = MatDestroy(&B); CHKERRXX(ierr); }
/// Destructor ~PETScLinearSolverImplementation(void) { PetscErrorCode ierr(0); try { PetscBool ok; ierr = PetscInitialized(&ok); if (ok) { ierr = KSPDestroy(&p_KSP); CHKERRXX(ierr); } } catch (...) { // just eat it } }
void Field_solver::modify_rhs_near_object_boundaries( Spatial_mesh &spat_mesh, Inner_region &inner_region ) { int nx = spat_mesh.x_n_nodes; int ny = spat_mesh.y_n_nodes; int nz = spat_mesh.z_n_nodes; //int nrow = (nx-2)*(ny-2); double dx = spat_mesh.x_cell_size; double dy = spat_mesh.y_cell_size; double dz = spat_mesh.z_cell_size; PetscErrorCode ierr; std::vector<PetscInt> indices_of_nodes_near_boundaries; std::vector<PetscScalar> rhs_modification_for_nodes_near_boundaries; indicies_of_near_boundary_nodes_and_rhs_modifications( indices_of_nodes_near_boundaries, rhs_modification_for_nodes_near_boundaries, nx, ny, nz, dx, dy, dz, inner_region ); PetscInt number_of_elements = indices_of_nodes_near_boundaries.size(); if( number_of_elements != 0 ){ PetscInt *indices = &indices_of_nodes_near_boundaries[0]; PetscScalar *values = &rhs_modification_for_nodes_near_boundaries[0]; // ADD_VALUES gathers values from all processes. // Therefore, only a single process // should be responsible for calculation of rhs_modification // for a given node. ierr = VecSetValues( rhs, number_of_elements, indices, values, ADD_VALUES ); CHKERRXX( ierr ); CHKERRXX( ierr ); ierr = VecAssemblyBegin( rhs ); CHKERRXX( ierr ); ierr = VecAssemblyEnd( rhs ); CHKERRXX( ierr ); } }
void Field_solver::set_rhs_at_nodes_occupied_by_objects( Spatial_mesh &spat_mesh, Inner_region &inner_region ) { int nx = spat_mesh.x_n_nodes; int ny = spat_mesh.y_n_nodes; int nz = spat_mesh.z_n_nodes; std::vector<PetscInt> indices_of_inner_nodes_not_at_domain_edge; indices_of_inner_nodes_not_at_domain_edge = list_of_nodes_global_indices_in_matrix( inner_region.inner_nodes_not_at_domain_edge, nx, ny, nz ); PetscErrorCode ierr; PetscInt num_of_elements = indices_of_inner_nodes_not_at_domain_edge.size(); if( num_of_elements != 0 ){ PetscInt *global_indices = &indices_of_inner_nodes_not_at_domain_edge[0]; std::vector<PetscScalar> zeroes( num_of_elements, 0.0 ); ierr = VecSetValues( rhs, num_of_elements, global_indices, &zeroes[0], INSERT_VALUES ); CHKERRXX( ierr ); ierr = VecAssemblyBegin( rhs ); CHKERRXX( ierr ); ierr = VecAssemblyEnd( rhs ); CHKERRXX( ierr ); } }