PetscErrorCode TaoLineSearchSetType(TaoLineSearch ls, const TaoLineSearchType type) { PetscErrorCode ierr; PetscErrorCode (*r)(TaoLineSearch); PetscBool flg; PetscFunctionBegin; PetscValidHeaderSpecific(ls,TAOLINESEARCH_CLASSID,1); PetscValidCharPointer(type,2); ierr = PetscObjectTypeCompare((PetscObject)ls, type, &flg);CHKERRQ(ierr); if (flg) PetscFunctionReturn(0); ierr = PetscFunctionListFind(TaoLineSearchList,type, (void (**)(void)) &r);CHKERRQ(ierr); if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested TaoLineSearch type %s",type); if (ls->ops->destroy) { ierr = (*(ls)->ops->destroy)(ls);CHKERRQ(ierr); } ls->max_funcs=30; ls->ftol = 0.0001; ls->gtol = 0.9; #if defined(PETSC_USE_REAL_SINGLE) ls->rtol = 1.0e-5; #else ls->rtol = 1.0e-10; #endif ls->stepmin=1.0e-20; ls->stepmax=1.0e+20; ls->nfeval=0; ls->ngeval=0; ls->nfgeval=0; ls->ops->setup=0; ls->ops->apply=0; ls->ops->view=0; ls->ops->setfromoptions=0; ls->ops->destroy=0; ls->setupcalled = PETSC_FALSE; ierr = (*r)(ls);CHKERRQ(ierr); ierr = PetscObjectChangeTypeName((PetscObject)ls, type);CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@ PetscDrawSplitViewPort - Splits a window shared by several processes into smaller view ports. One for each process. Collective on PetscDraw Input Parameter: . draw - the drawing context Level: advanced Concepts: drawing^in subset of window .seealso: PetscDrawDivideViewPort(), PetscDrawSetViewPort() @*/ PetscErrorCode PetscDrawSplitViewPort(PetscDraw draw) { PetscErrorCode ierr; PetscMPIInt rank,size; PetscInt n; PetscBool isnull; PetscReal xl,xr,yl,yr,h; PetscFunctionBegin; PetscValidHeaderSpecific(draw,PETSC_DRAW_CLASSID,1); ierr = PetscObjectTypeCompare((PetscObject)draw,PETSC_DRAW_NULL,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)draw),&rank);CHKERRQ(ierr); ierr = MPI_Comm_size(PetscObjectComm((PetscObject)draw),&size);CHKERRQ(ierr); n = (PetscInt)(.1 + PetscSqrtReal((PetscReal)size)); while (n*n < size) n++; h = 1.0/n; xl = (rank % n)*h; xr = xl + h; yl = (rank/n)*h; yr = yl + h; ierr = PetscDrawLine(draw,xl,yl,xl,yr,PETSC_DRAW_BLACK);CHKERRQ(ierr); ierr = PetscDrawLine(draw,xl,yr,xr,yr,PETSC_DRAW_BLACK);CHKERRQ(ierr); ierr = PetscDrawLine(draw,xr,yr,xr,yl,PETSC_DRAW_BLACK);CHKERRQ(ierr); ierr = PetscDrawLine(draw,xr,yl,xl,yl,PETSC_DRAW_BLACK);CHKERRQ(ierr); ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr); draw->port_xl = xl + .1*h; draw->port_xr = xr - .1*h; draw->port_yl = yl + .1*h; draw->port_yr = yr - .1*h; if (draw->ops->setviewport) { ierr = (*draw->ops->setviewport)(draw,xl,yl,xr,yr);CHKERRQ(ierr); } PetscFunctionReturn(0); }
PetscErrorCode MatView_SeqAIJ_Inode(Mat A,PetscViewer viewer) { Mat_SeqAIJ *a=(Mat_SeqAIJ*)A->data; PetscErrorCode ierr; PetscBool iascii; PetscViewerFormat format; PetscFunctionBegin; ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); if (iascii) { ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); if (format == PETSC_VIEWER_ASCII_INFO_DETAIL || format == PETSC_VIEWER_ASCII_INFO) { if (a->inode.size) { ierr = PetscViewerASCIIPrintf(viewer,"using I-node routines: found %D nodes, limit used is %D\n",a->inode.node_count,a->inode.limit);CHKERRQ(ierr); } else { ierr = PetscViewerASCIIPrintf(viewer,"not using I-node routines\n");CHKERRQ(ierr); } } } PetscFunctionReturn(0); }
PetscErrorCode AOView_Mapping(AO ao, PetscViewer viewer) { AO_Mapping *aomap = (AO_Mapping*) ao->data; PetscMPIInt rank; PetscInt i; PetscBool iascii; PetscErrorCode ierr; PetscFunctionBegin; ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)ao), &rank);CHKERRQ(ierr); if (rank) PetscFunctionReturn(0); ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERASCII, &iascii);CHKERRQ(ierr); if (iascii) { PetscViewerASCIIPrintf(viewer, "Number of elements in ordering %D\n", aomap->N); PetscViewerASCIIPrintf(viewer, " App. PETSc\n"); for (i = 0; i < aomap->N; i++) { PetscViewerASCIIPrintf(viewer, "%D %D %D\n", i, aomap->app[i], aomap->petsc[aomap->appPerm[i]]); } } PetscFunctionReturn(0); }
/*@ VecGhostUpdateEnd - End the vector scatter to update the vector from local representation to global or global representation to local. Neighbor-wise Collective on Vec Input Parameters: + g - the vector (obtained with VecCreateGhost() or VecDuplicate()) . insertmode - one of ADD_VALUES or INSERT_VALUES - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE Notes: Use the following to update the ghost regions with correct values from the owning process .vb VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); .ve Use the following to accumulate the ghost region values onto the owning processors .vb VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); .ve To accumulate the ghost region values onto the owning processors and then update the ghost regions correctly, call the later followed by the former, i.e., .vb VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE); VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE); VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD); VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD); .ve Level: advanced .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),VecCreateGhostWithArray() @*/ PetscErrorCode VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode) { Vec_MPI *v; PetscErrorCode ierr; PetscBool ismpi; PetscFunctionBegin; PetscValidHeaderSpecific(g,VEC_CLASSID,1); ierr = PetscObjectTypeCompare((PetscObject)g,VECMPI,&ismpi);CHKERRQ(ierr); if (ismpi) { v = (Vec_MPI*)g->data; if (!v->localrep) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Vector is not ghosted"); if (!v->localupdate) PetscFunctionReturn(0); if (scattermode == SCATTER_REVERSE) { ierr = VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);CHKERRQ(ierr); } else { ierr = VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);CHKERRQ(ierr); } } PetscFunctionReturn(0); }
extern PetscErrorCode MatLMVMSetPrev(Mat M, Vec x, Vec g) { MatLMVMCtx *ctx; PetscErrorCode ierr; PetscBool same; PetscFunctionBegin; PetscValidHeaderSpecific(x,VEC_CLASSID,2); PetscValidHeaderSpecific(g,VEC_CLASSID,3); ierr = PetscObjectTypeCompare((PetscObject)M,MATSHELL,&same);CHKERRQ(ierr); if (!same) SETERRQ(PETSC_COMM_SELF,1,"Matrix M is not type MatLMVM"); ierr = MatShellGetContext(M,(void**)&ctx);CHKERRQ(ierr); if (ctx->nupdates == 0) { ierr = MatLMVMUpdate(M,x,g);CHKERRQ(ierr); } else { ierr = VecCopy(x,ctx->Xprev);CHKERRQ(ierr); ierr = VecCopy(g,ctx->Gprev);CHKERRQ(ierr); /* TODO scaling specific terms */ } PetscFunctionReturn(0); }
static PetscErrorCode PCView_SPAI(PC pc,PetscViewer viewer) { PC_SPAI *ispai = (PC_SPAI*)pc->data; PetscErrorCode ierr; PetscBool iascii; PetscFunctionBegin; ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); if (iascii) { ierr = PetscViewerASCIIPrintf(viewer," SPAI preconditioner\n");CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," epsilon %g\n", (double)ispai->epsilon);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," nbsteps %d\n", ispai->nbsteps);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," max %d\n", ispai->max);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," maxnew %d\n", ispai->maxnew);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," block_size %d\n",ispai->block_size);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," cache_size %d\n",ispai->cache_size);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," verbose %d\n", ispai->verbose);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," sp %d\n", ispai->sp);CHKERRQ(ierr); } PetscFunctionReturn(0); }
static PetscErrorCode PCView_ILU(PC pc,PetscViewer viewer) { PC_ILU *ilu = (PC_ILU*)pc->data; PetscErrorCode ierr; PetscBool iascii; PetscFunctionBegin; ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); if (iascii) { if (ilu->inplace) { ierr = PetscViewerASCIIPrintf(viewer," ILU: in-place factorization\n");CHKERRQ(ierr); } else { ierr = PetscViewerASCIIPrintf(viewer," ILU: out-of-place factorization\n");CHKERRQ(ierr); } if (ilu->reusefill) {ierr = PetscViewerASCIIPrintf(viewer," ILU: Reusing fill from past factorization\n");CHKERRQ(ierr);} if (ilu->reuseordering) {ierr = PetscViewerASCIIPrintf(viewer," ILU: Reusing reordering from past factorization\n");CHKERRQ(ierr);} } ierr = PCView_Factor(pc,viewer);CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@C KSPGMRESMonitorKrylov - Calls VecView() for each direction in the GMRES accumulated Krylov space. Collective on KSP Input Parameters: + ksp - the KSP context . its - iteration number . fgnorm - 2-norm of residual (or gradient) - a viewers object created with PetscViewersCreate() Level: intermediate .keywords: KSP, nonlinear, vector, monitor, view, Krylov space .seealso: KSPMonitorSet(), KSPMonitorDefault(), VecView(), PetscViewersCreate(), PetscViewersDestroy() @*/ PetscErrorCode KSPGMRESMonitorKrylov(KSP ksp,PetscInt its,PetscReal fgnorm,void *dummy) { PetscViewers viewers = (PetscViewers)dummy; KSP_GMRES *gmres = (KSP_GMRES*)ksp->data; PetscErrorCode ierr; Vec x; PetscViewer viewer; PetscBool flg; PetscFunctionBegin; ierr = PetscViewersGetViewer(viewers,gmres->it+1,&viewer);CHKERRQ(ierr); ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&flg);CHKERRQ(ierr); if (!flg) { ierr = PetscViewerSetType(viewer,PETSCVIEWERDRAW);CHKERRQ(ierr); ierr = PetscViewerDrawSetInfo(viewer,NULL,"Krylov GMRES Monitor",PETSC_DECIDE,PETSC_DECIDE,300,300);CHKERRQ(ierr); } x = VEC_VV(gmres->it+1); ierr = VecView(x,viewer);CHKERRQ(ierr); PetscFunctionReturn(0); }
PetscErrorCode DMLibMeshGetVariables(DM dm, PetscInt *n, char*** varnames) { PetscErrorCode ierr; PetscFunctionBegin; PetscValidHeaderSpecific(dm,DM_CLASSID,1); PetscBool islibmesh; PetscInt i; ierr = PetscObjectTypeCompare((PetscObject)dm, DMLIBMESH,&islibmesh); if(!islibmesh) SETERRQ2(((PetscObject)dm)->comm, PETSC_ERR_ARG_WRONG, "Got DM oftype %s, not of type %s", ((PetscObject)dm)->type_name, DMLIBMESH); DM_libMesh *dlm = (DM_libMesh *)(dm->data); PetscValidPointer(n,2); *n = dlm->varids->size(); if(!varnames) PetscFunctionReturn(0); ierr = PetscMalloc(*n*sizeof(char*), varnames); CHKERRQ(ierr); i = 0; for(std::map<std::string, unsigned int>::const_iterator it = dlm->varids->begin(); it != dlm->varids->end(); ++it){ ierr = PetscStrallocpy(it->first.c_str(), *varnames+i); CHKERRQ(ierr); ++i; } PetscFunctionReturn(0); }
PetscErrorCode BVView_Mat(BV bv,PetscViewer viewer) { PetscErrorCode ierr; BV_MAT *ctx = (BV_MAT*)bv->data; PetscViewerFormat format; PetscBool isascii; PetscFunctionBegin; ierr = MatView(ctx->A,viewer);CHKERRQ(ierr); ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii);CHKERRQ(ierr); if (isascii) { ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); if (format == PETSC_VIEWER_ASCII_MATLAB) { ierr = PetscViewerASCIIPrintf(viewer,"%s=%s;clear %s\n",((PetscObject)bv)->name,((PetscObject)ctx->A)->name,((PetscObject)ctx->A)->name);CHKERRQ(ierr); if (bv->nc) { ierr = PetscViewerASCIIPrintf(viewer,"%s=%s(:,%D:end);\n",((PetscObject)bv)->name,((PetscObject)bv)->name,bv->nc+1);CHKERRQ(ierr); } } } PetscFunctionReturn(0); }
/*@ MatMeshToCellGraph - Uses the ParMETIS package to convert a Mat that represents a mesh to a Mat the represents the graph of the coupling between cells (the "dual" graph) and is suitable for partitioning with the MatPartitioning object. Use this to partition cells of a mesh. Collective on Mat Input Parameter: + mesh - the graph that represents the mesh - ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and quadralaterials, 3 for tetrahedrals and 4 for hexahedrals Output Parameter: . dual - the dual graph Notes: Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual() The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that rows cell. The number of rows in mesh is number of cells, the number of columns is the number of vertices. Level: advanced .seealso: MatMeshToVertexGraph(), MatCreateMPIAdj(), MatPartitioningCreate() @*/ PetscErrorCode MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual) { PetscErrorCode ierr; PetscInt *newxadj,*newadjncy; PetscInt numflag=0; Mat_MPIAdj *adj = (Mat_MPIAdj *)mesh->data,*newadj; PetscBool flg; int status; PetscFunctionBegin; ierr = PetscObjectTypeCompare((PetscObject)mesh,MATMPIADJ,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Must use MPIAdj matrix type"); CHKMEMQ; status = ParMETIS_V3_Mesh2Dual(mesh->rmap->range,adj->i,adj->j,&numflag,&ncommonnodes,&newxadj,&newadjncy,&((PetscObject)mesh)->comm);CHKERRQPARMETIS(status); CHKMEMQ; ierr = MatCreateMPIAdj(((PetscObject)mesh)->comm,mesh->rmap->n,mesh->rmap->N,newxadj,newadjncy,PETSC_NULL,dual);CHKERRQ(ierr); newadj = (Mat_MPIAdj *)(*dual)->data; newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */ PetscFunctionReturn(0); }
static PetscErrorCode TSView_RK(TS ts,PetscViewer viewer) { TS_RK *rk = (TS_RK*)ts->data; PetscBool iascii; PetscErrorCode ierr; PetscFunctionBegin; ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); if (iascii) { RKTableau tab = rk->tableau; TSRKType rktype; char buf[512]; ierr = TSRKGetType(ts,&rktype);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," RK %s\n",rktype);CHKERRQ(ierr); ierr = PetscFormatRealArray(buf,sizeof(buf),"% 8.6f",tab->s,tab->c);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," Abscissa c = %s\n",buf);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer,"FSAL: %s\n",tab->FSAL ? "yes" : "no");CHKERRQ(ierr); } if (ts->adapt) {ierr = TSAdaptView(ts->adapt,viewer);CHKERRQ(ierr);} PetscFunctionReturn(0); }
PetscErrorCode FormJacobian(SNES snes,Vec X,Mat *J,Mat *B,MatStructure *flag,void *ptr) { AppCtx *user = (AppCtx *) ptr; PetscErrorCode ierr; KSP ksp; PC pc; PetscBool ismg; *flag = SAME_NONZERO_PATTERN; ierr = FormJacobian_Grid(user,&user->fine,X,J,B);CHKERRQ(ierr); /* create coarse grid jacobian for preconditioner */ ierr = SNESGetKSP(snes,&ksp);CHKERRQ(ierr); ierr = KSPGetPC(ksp,&pc);CHKERRQ(ierr); ierr = PetscObjectTypeCompare((PetscObject)pc,PCMG,&ismg);CHKERRQ(ierr); if (ismg) { ierr = KSPSetOperators(user->ksp_fine,user->fine.J,user->fine.J,SAME_NONZERO_PATTERN);CHKERRQ(ierr); /* restrict X to coarse grid */ ierr = MatMult(user->R,X,user->coarse.x);CHKERRQ(ierr); ierr = VecPointwiseMult(user->coarse.x,user->coarse.x,user->Rscale);CHKERRQ(ierr); /* form Jacobian on coarse grid */ if (user->redundant_build) { /* get copy of coarse X onto each processor */ ierr = VecScatterBegin(user->tolocalall,user->coarse.x,user->localall,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); ierr = VecScatterEnd(user->tolocalall,user->coarse.x,user->localall,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); ierr = FormJacobian_Coarse(user,&user->coarse,user->localall,&user->coarse.J,&user->coarse.J);CHKERRQ(ierr); } else { /* coarse grid Jacobian computed in parallel */ ierr = FormJacobian_Grid(user,&user->coarse,user->coarse.x,&user->coarse.J,&user->coarse.J);CHKERRQ(ierr); } ierr = KSPSetOperators(user->ksp_coarse,user->coarse.J,user->coarse.J,SAME_NONZERO_PATTERN);CHKERRQ(ierr); } return 0; }
int main(int argc,char **argv) { PetscInt i,n,start,stride; const PetscInt *ii; IS is; PetscBool flg; PetscErrorCode ierr; ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr); /* Test IS of size 0 */ ierr = ISCreateStride(PETSC_COMM_SELF,0,0,2,&is);CHKERRQ(ierr); ierr = ISGetSize(is,&n);CHKERRQ(ierr); if (n != 0) SETERRQ(PETSC_COMM_SELF,1,"ISCreateStride"); ierr = ISStrideGetInfo(is,&start,&stride);CHKERRQ(ierr); if (start != 0) SETERRQ(PETSC_COMM_SELF,1,"ISStrideGetInfo"); if (stride != 2) SETERRQ(PETSC_COMM_SELF,1,"ISStrideGetInfo"); ierr = PetscObjectTypeCompare((PetscObject)is,ISSTRIDE,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_SELF,1,"ISStride"); ierr = ISGetIndices(is,&ii);CHKERRQ(ierr); ierr = ISRestoreIndices(is,&ii);CHKERRQ(ierr); ierr = ISDestroy(&is);CHKERRQ(ierr); /* Test ISGetIndices() */ ierr = ISCreateStride(PETSC_COMM_SELF,10000,-8,3,&is);CHKERRQ(ierr); ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); ierr = ISGetIndices(is,&ii);CHKERRQ(ierr); for (i=0; i<10000; i++) { if (ii[i] != -8 + 3*i) SETERRQ(PETSC_COMM_SELF,1,"ISGetIndices"); } ierr = ISRestoreIndices(is,&ii);CHKERRQ(ierr); ierr = ISDestroy(&is);CHKERRQ(ierr); ierr = PetscFinalize(); return 0; }
PetscErrorCode MatDestroy_SuperLU_DIST(Mat A) { PetscErrorCode ierr; Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; PetscBool flg; PetscFunctionBegin; if (lu && lu->CleanUpSuperLU_Dist) { /* Deallocate SuperLU_DIST storage */ if (lu->MatInputMode == GLOBAL) { Destroy_CompCol_Matrix_dist(&lu->A_sup); } else { Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); if ( lu->options.SolveInitialized ) { #if defined(PETSC_USE_COMPLEX) zSolveFinalize(&lu->options, &lu->SOLVEstruct); #else dSolveFinalize(&lu->options, &lu->SOLVEstruct); #endif } } Destroy_LU(A->cmap->N, &lu->grid, &lu->LUstruct); ScalePermstructFree(&lu->ScalePermstruct); LUstructFree(&lu->LUstruct); /* Release the SuperLU_DIST process grid. */ superlu_gridexit(&lu->grid); ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr); } ierr = PetscFree(A->spptr);CHKERRQ(ierr); ierr = PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&flg);CHKERRQ(ierr); if (flg) { ierr = MatDestroy_SeqAIJ(A);CHKERRQ(ierr); } else { ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); } PetscFunctionReturn(0); }
/* Compose an IS with an ISLocalToGlobalMapping to map from IS source indices to global indices */ static PetscErrorCode ISL2GCompose(IS is,ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping *cltog) { PetscErrorCode ierr; const PetscInt *idx; PetscInt m,*idxm; PetscBool isblock; PetscFunctionBegin; PetscValidHeaderSpecific(is,IS_CLASSID,1); PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,2); PetscValidPointer(cltog,3); ierr = PetscObjectTypeCompare((PetscObject)is,ISBLOCK,&isblock);CHKERRQ(ierr); if (isblock) { PetscInt bs,lbs; ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingGetBlockSize(ltog,&lbs);CHKERRQ(ierr); if (bs == lbs) { ierr = ISGetLocalSize(is,&m);CHKERRQ(ierr); m = m/bs; ierr = ISBlockGetIndices(is,&idx);CHKERRQ(ierr); ierr = PetscMalloc1(m,&idxm);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingApplyBlock(ltog,m,idx,idxm);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)is),bs,m,idxm,PETSC_OWN_POINTER,cltog);CHKERRQ(ierr); ierr = ISBlockRestoreIndices(is,&idx);CHKERRQ(ierr); PetscFunctionReturn(0); } } ierr = ISGetLocalSize(is,&m);CHKERRQ(ierr); ierr = ISGetIndices(is,&idx);CHKERRQ(ierr); ierr = PetscMalloc1(m,&idxm);CHKERRQ(ierr); if (ltog) { ierr = ISLocalToGlobalMappingApply(ltog,m,idx,idxm);CHKERRQ(ierr); } else { ierr = PetscMemcpy(idxm,idx,m*sizeof(PetscInt));CHKERRQ(ierr); } ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)is),1,m,idxm,PETSC_OWN_POINTER,cltog);CHKERRQ(ierr); ierr = ISRestoreIndices(is,&idx);CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@ MatMeshToCellGraph - Uses the ParMETIS package to convert a Mat that represents a mesh to a Mat the represents the graph of the coupling between cells (the "dual" graph) and is suitable for partitioning with the MatPartitioning object. Use this to partition cells of a mesh. Collective on Mat Input Parameter: + mesh - the graph that represents the mesh - ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and quadralaterials, 3 for tetrahedrals and 4 for hexahedrals Output Parameter: . dual - the dual graph Notes: Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual() $ Each row of the mesh object represents a single cell in the mesh. For triangles it has 3 entries, quadralaterials 4 entries, $ tetrahedrals 4 entries and hexahedrals 8 entries. You can mix triangles and quadrilaterals in the same mesh, but cannot $ mix tetrahedrals and hexahedrals $ The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that row's cell. $ The number of rows in mesh is number of cells, the number of columns is the number of vertices. Level: advanced .seealso: MatMeshToVertexGraph(), MatCreateMPIAdj(), MatPartitioningCreate() @*/ PetscErrorCode MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual) { PetscErrorCode ierr; PetscInt *newxadj,*newadjncy; PetscInt numflag=0; Mat_MPIAdj *adj = (Mat_MPIAdj*)mesh->data,*newadj; PetscBool flg; int status; MPI_Comm comm; PetscFunctionBegin; ierr = PetscObjectTypeCompare((PetscObject)mesh,MATMPIADJ,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Must use MPIAdj matrix type"); ierr = PetscObjectGetComm((PetscObject)mesh,&comm);CHKERRQ(ierr); PetscStackCallParmetis(ParMETIS_V3_Mesh2Dual,((idx_t*)mesh->rmap->range,(idx_t*)adj->i,(idx_t*)adj->j,(idx_t*)&numflag,(idx_t*)&ncommonnodes,(idx_t**)&newxadj,(idx_t**)&newadjncy,&comm)); ierr = MatCreateMPIAdj(PetscObjectComm((PetscObject)mesh),mesh->rmap->n,mesh->rmap->N,newxadj,newadjncy,NULL,dual);CHKERRQ(ierr); newadj = (Mat_MPIAdj*)(*dual)->data; newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */ PetscFunctionReturn(0); }
PetscErrorCode EPSSetUp_JD(EPS eps) { PetscErrorCode ierr; PetscBool t; KSP ksp; PetscFunctionBegin; /* Setup common for all davidson solvers */ ierr = EPSSetUp_XD(eps);CHKERRQ(ierr); /* Set the default options of the KSP */ ierr = STGetKSP(eps->st,&ksp);CHKERRQ(ierr); if (!((PetscObject)ksp)->type_name) { ierr = KSPSetType(ksp,KSPBCGSL);CHKERRQ(ierr); ierr = KSPSetTolerances(ksp,1e-4,PETSC_DEFAULT,PETSC_DEFAULT,90);CHKERRQ(ierr); } /* Check some constraints */ ierr = PetscObjectTypeCompare((PetscObject)ksp,KSPPREONLY,&t);CHKERRQ(ierr); if (t) SETERRQ(PetscObjectComm((PetscObject)eps),PETSC_ERR_SUP,"EPSJD does not work with KSPPREONLY"); PetscFunctionReturn(0); }
/* SNESMatrixFreeView2_Private - Views matrix-free parameters. */ PetscErrorCode SNESMatrixFreeView2_Private(Mat J,PetscViewer viewer) { PetscErrorCode ierr; MFCtx_Private *ctx; PetscBool iascii; PetscFunctionBegin; ierr = MatShellGetContext(J,(void**)&ctx);CHKERRQ(ierr); ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); if (iascii) { ierr = PetscViewerASCIIPrintf(viewer," SNES matrix-free approximation:\n");CHKERRQ(ierr); if (ctx->jorge) { ierr = PetscViewerASCIIPrintf(viewer," using Jorge's method of determining differencing parameter\n");CHKERRQ(ierr); } ierr = PetscViewerASCIIPrintf(viewer," err=%g (relative error in function evaluation)\n",(double)ctx->error_rel);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," umin=%g (minimum iterate parameter)\n",(double)ctx->umin);CHKERRQ(ierr); if (ctx->compute_err) { ierr = PetscViewerASCIIPrintf(viewer," freq_err=%D (frequency for computing err)\n",ctx->compute_err_freq);CHKERRQ(ierr); } } PetscFunctionReturn(0); }
/*@C ISSetType - Builds a index set, for a particular implementation. Collective on IS Input Parameters: + is - The index set object - method - The name of the index set type Options Database Key: . -is_type <type> - Sets the index set type; use -help for a list of available types Notes: See "petsc/include/petscis.h" for available istor types (for instance, ISGENERAL, ISSTRIDE, or ISBLOCK). Use ISDuplicate() to make a duplicate Level: intermediate .seealso: ISGetType(), ISCreate() @*/ PetscErrorCode ISSetType(IS is, ISType method) { PetscErrorCode (*r)(IS); PetscBool match; PetscErrorCode ierr; PetscFunctionBegin; PetscValidHeaderSpecific(is, IS_CLASSID,1); ierr = PetscObjectTypeCompare((PetscObject) is, method, &match);CHKERRQ(ierr); if (match) PetscFunctionReturn(0); if (!ISRegisterAllCalled) {ierr = ISRegisterAll(PETSC_NULL);CHKERRQ(ierr);} ierr = PetscFunctionListFind( ((PetscObject)is)->comm,ISList, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr); if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown IS type: %s", method); if (is->ops->destroy) { ierr = (*is->ops->destroy)(is);CHKERRQ(ierr); is->ops->destroy = PETSC_NULL; } ierr = (*r)(is);CHKERRQ(ierr); ierr = PetscObjectChangeTypeName((PetscObject)is,method);CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@ ISAllGather - Given an index set (IS) on each processor, generates a large index set (same on each processor) by concatenating together each processors index set. Collective on IS Input Parameter: . is - the distributed index set Output Parameter: . isout - the concatenated index set (same on all processors) Notes: ISAllGather() is clearly not scalable for large index sets. The IS created on each processor must be created with a common communicator (e.g., PETSC_COMM_WORLD). If the index sets were created with PETSC_COMM_SELF, this routine will not work as expected, since each process will generate its own new IS that consists only of itself. The communicator for this new IS is PETSC_COMM_SELF Level: intermediate Concepts: gather^index sets Concepts: index sets^gathering to all processors Concepts: IS^gathering to all processors .seealso: ISCreateGeneral(), ISCreateStride(), ISCreateBlock() @*/ PetscErrorCode ISAllGather(IS is,IS *isout) { PetscErrorCode ierr; PetscInt *indices,n,i,N,step,first; const PetscInt *lindices; MPI_Comm comm; PetscMPIInt size,*sizes = NULL,*offsets = NULL,nn; PetscBool stride; PetscFunctionBegin; PetscValidHeaderSpecific(is,IS_CLASSID,1); PetscValidPointer(isout,2); ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr); ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr); ierr = PetscObjectTypeCompare((PetscObject)is,ISSTRIDE,&stride);CHKERRQ(ierr); if (size == 1 && stride) { /* should handle parallel ISStride also */ ierr = ISStrideGetInfo(is,&first,&step);CHKERRQ(ierr); ierr = ISCreateStride(PETSC_COMM_SELF,n,first,step,isout);CHKERRQ(ierr); } else { ierr = PetscMalloc2(size,&sizes,size,&offsets);CHKERRQ(ierr); ierr = PetscMPIIntCast(n,&nn);CHKERRQ(ierr); ierr = MPI_Allgather(&nn,1,MPI_INT,sizes,1,MPI_INT,comm);CHKERRQ(ierr); offsets[0] = 0; for (i=1; i<size; i++) offsets[i] = offsets[i-1] + sizes[i-1]; N = offsets[size-1] + sizes[size-1]; ierr = PetscMalloc1(N,&indices);CHKERRQ(ierr); ierr = ISGetIndices(is,&lindices);CHKERRQ(ierr); ierr = MPI_Allgatherv((void*)lindices,nn,MPIU_INT,indices,sizes,offsets,MPIU_INT,comm);CHKERRQ(ierr); ierr = ISRestoreIndices(is,&lindices);CHKERRQ(ierr); ierr = PetscFree2(sizes,offsets);CHKERRQ(ierr); ierr = ISCreateGeneral(PETSC_COMM_SELF,N,indices,PETSC_OWN_POINTER,isout);CHKERRQ(ierr); } PetscFunctionReturn(0); }
/*@C AOSetType - Builds an application ordering for a particular implementation. Collective on AO Input Parameters: + ao - The AO object - method - The name of the AO type Options Database Key: . -ao_type <type> - Sets the AO type; use -help for a list of available types Notes: See "petsc/include/petscao.h" for available AO types (for instance, AOBASIC and AOMEMORYSCALABLE). Level: intermediate .keywords: ao, set, type .seealso: AOGetType(), AOCreate() @*/ PetscErrorCode AOSetType(AO ao, AOType method) { PetscErrorCode (*r)(AO); PetscBool match; PetscErrorCode ierr; PetscFunctionBegin; PetscValidHeaderSpecific(ao, AO_CLASSID,1); ierr = PetscObjectTypeCompare((PetscObject)ao, method, &match);CHKERRQ(ierr); if (match) PetscFunctionReturn(0); ierr = AORegisterAll();CHKERRQ(ierr); ierr = PetscFunctionListFind(AOList,method,&r);CHKERRQ(ierr); if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown AO type: %s", method); if (ao->ops->destroy) { ierr = (*ao->ops->destroy)(ao);CHKERRQ(ierr); ao->ops->destroy = NULL; } ierr = (*r)(ao);CHKERRQ(ierr); PetscFunctionReturn(0); }
PetscErrorCode MatView_PaStiX(Mat A,PetscViewer viewer) { PetscErrorCode ierr; PetscBool iascii; PetscViewerFormat format; PetscFunctionBegin; ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); if (iascii) { ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); if (format == PETSC_VIEWER_ASCII_INFO) { Mat_Pastix *lu=(Mat_Pastix*)A->spptr; ierr = PetscViewerASCIIPrintf(viewer,"PaStiX run parameters:\n");CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," Matrix type : %s \n",((lu->iparm[IPARM_SYM] == API_SYM_YES) ? "Symmetric" : "Unsymmetric"));CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," Level of printing (0,1,2): %d \n",lu->iparm[IPARM_VERBOSE]);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(viewer," Number of refinements iterations : %d \n",lu->iparm[IPARM_NBITER]);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_SELF," Error : %g \n",lu->dparm[DPARM_RELATIVE_ERROR]);CHKERRQ(ierr); } } PetscFunctionReturn(0); }
/*@ MatCompositeSetType - Indicates if the matrix is defined as the sum of a set of matrices or the product Collective on MPI_Comm Input Parameters: . mat - the composite matrix Level: advanced Notes: The MatType of the resulting matrix will be the same as the MatType of the FIRST matrix in the composite matrix. .seealso: MatDestroy(), MatMult(), MatCompositeAddMat(), MatCreateComposite(), MATCOMPOSITE @*/ PetscErrorCode MatCompositeSetType(Mat mat,MatCompositeType type) { Mat_Composite *b = (Mat_Composite*)mat->data; PetscBool flg; PetscErrorCode ierr; PetscFunctionBegin; ierr = PetscObjectTypeCompare((PetscObject)mat,MATCOMPOSITE,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Can only use with composite matrix"); if (type == MAT_COMPOSITE_MULTIPLICATIVE) { mat->ops->getdiagonal = 0; mat->ops->mult = MatMult_Composite_Multiplicative; mat->ops->multtranspose = MatMultTranspose_Composite_Multiplicative; b->type = MAT_COMPOSITE_MULTIPLICATIVE; } else { mat->ops->getdiagonal = MatGetDiagonal_Composite; mat->ops->mult = MatMult_Composite; mat->ops->multtranspose = MatMultTranspose_Composite; b->type = MAT_COMPOSITE_ADDITIVE; } PetscFunctionReturn(0); }
EXTERN_C_END #undef __FUNCT__ #define __FUNCT__ "PetscViewerStringSetString" /*@C PetscViewerStringSetString - sets the string that a string viewer will print to Logically Collective on PetscViewer Input Parameters: + viewer - string viewer you wish to attach string to . string - the string to print data into - len - the length of the string Level: advanced .seealso: PetscViewerStringOpen() @*/ PetscErrorCode PetscViewerStringSetString(PetscViewer viewer,char string[],PetscInt len) { PetscViewer_String *vstr = (PetscViewer_String*)viewer->data; PetscErrorCode ierr; PetscBool isstring; PetscFunctionBegin; PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,1); PetscValidCharPointer(string,2); ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); if (!isstring) PetscFunctionReturn(0); if (len <= 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"String must have length at least 2"); ierr = PetscMemzero(string,len*sizeof(char));CHKERRQ(ierr); vstr->string = string; vstr->head = string; vstr->curlen = 0; vstr->maxlen = len; PetscFunctionReturn(0); }
/*@C DMPlexVTKWriteAll - Write a file containing all the fields that have been provided to the viewer Collective Input Arguments: + odm - The DMPlex specifying the mesh, passed as a PetscObject - viewer - viewer of type VTK Level: developer Note: This function is a callback used by the VTK viewer to actually write the file. The reason for this odd model is that the VTK file format does not provide any way to write one field at a time. Instead, metadata for the entire file needs to be available up-front before you can start writing the file. .seealso: PETSCVIEWERVTK @*/ PetscErrorCode DMPlexVTKWriteAll(PetscObject odm, PetscViewer viewer) { DM dm = (DM) odm; PetscBool isvtk; PetscErrorCode ierr; PetscFunctionBegin; PetscValidHeaderSpecific(dm, DM_CLASSID, 1); PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); ierr = PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERVTK, &isvtk);CHKERRQ(ierr); if (!isvtk) SETERRQ1(PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_INCOMP, "Cannot use viewer type %s", ((PetscObject)viewer)->type_name); switch (viewer->format) { case PETSC_VIEWER_ASCII_VTK: ierr = DMPlexVTKWriteAll_ASCII(dm, viewer);CHKERRQ(ierr); break; case PETSC_VIEWER_VTK_VTU: ierr = DMPlexVTKWriteAll_VTU(dm, viewer);CHKERRQ(ierr); break; default: SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No support for format '%s'", PetscViewerFormats[viewer->format]); } PetscFunctionReturn(0); }
/*@C MatCoarsenView - Prints the coarsen data structure. Collective on MatCoarsen Input Parameters: . agg - the coarsen context . viewer - optional visualization context Level: intermediate Note: The available visualization contexts include + PETSC_VIEWER_STDOUT_SELF - standard output (default) - PETSC_VIEWER_STDOUT_WORLD - synchronized standard output where only the first processor opens the file. All other processors send their data to the first processor to print. The user can open alternative visualization contexts with . PetscViewerASCIIOpen() - output to a specified file .keywords: Coarsen, view .seealso: PetscViewerASCIIOpen() @*/ PetscErrorCode MatCoarsenView(MatCoarsen agg,PetscViewer viewer) { PetscErrorCode ierr; PetscBool iascii; PetscFunctionBegin; PetscValidHeaderSpecific(agg,MAT_COARSEN_CLASSID,1); if (!viewer) { ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)agg),&viewer);CHKERRQ(ierr); } PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2); PetscCheckSameComm(agg,1,viewer,2); ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); ierr = PetscObjectPrintClassNamePrefixType((PetscObject)agg,viewer);CHKERRQ(ierr); if (agg->ops->view) { ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); ierr = (*agg->ops->view)(agg,viewer);CHKERRQ(ierr); ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); } PetscFunctionReturn(0); }
/*@C RGSetType - Selects the type for the RG object. Logically Collective on RG Input Parameter: + rg - the region context - type - a known type Level: intermediate .seealso: RGGetType() @*/ PetscErrorCode RGSetType(RG rg,RGType type) { PetscErrorCode ierr,(*r)(RG); PetscBool match; PetscFunctionBegin; PetscValidHeaderSpecific(rg,RG_CLASSID,1); PetscValidCharPointer(type,2); ierr = PetscObjectTypeCompare((PetscObject)rg,type,&match);CHKERRQ(ierr); if (match) PetscFunctionReturn(0); ierr = PetscFunctionListFind(RGList,type,&r);CHKERRQ(ierr); if (!r) SETERRQ1(PetscObjectComm((PetscObject)rg),PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested RG type %s",type); if (rg->ops->destroy) { ierr = (*rg->ops->destroy)(rg);CHKERRQ(ierr); } ierr = PetscMemzero(rg->ops,sizeof(struct _RGOps));CHKERRQ(ierr); ierr = PetscObjectChangeTypeName((PetscObject)rg,type);CHKERRQ(ierr); ierr = (*r)(rg);CHKERRQ(ierr); PetscFunctionReturn(0); }
static PetscErrorCode ISView_Block(IS is, PetscViewer viewer) { IS_Block *sub = (IS_Block*)is->data; PetscErrorCode ierr; PetscInt i,bs,n,*idx = sub->idx; PetscBool iascii; PetscFunctionBegin; ierr = PetscLayoutGetBlockSize(is->map, &bs); CHKERRQ(ierr); ierr = PetscLayoutGetLocalSize(is->map, &n); CHKERRQ(ierr); n /= bs; ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii); CHKERRQ(ierr); if (iascii) { ierr = PetscViewerASCIIPushSynchronized(viewer); CHKERRQ(ierr); if (is->isperm) { ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Block Index set is permutation\n"); CHKERRQ(ierr); } ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Block size %D\n",bs); CHKERRQ(ierr); ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Number of block indices in set %D\n",n); CHKERRQ(ierr); ierr = PetscViewerASCIISynchronizedPrintf(viewer,"The first indices of each block are\n"); CHKERRQ(ierr); for (i=0; i<n; i++) { ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Block %D Index %D\n",i,idx[i]); CHKERRQ(ierr); } ierr = PetscViewerFlush(viewer); CHKERRQ(ierr); ierr = PetscViewerASCIIPopSynchronized(viewer); CHKERRQ(ierr); } PetscFunctionReturn(0); }