/*@ MatMeshToCellGraph - Uses the ParMETIS package to convert a Mat that represents a mesh to a Mat the represents the graph of the coupling between cells (the "dual" graph) and is suitable for partitioning with the MatPartitioning object. Use this to partition cells of a mesh. Collective on Mat Input Parameter: + mesh - the graph that represents the mesh - ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and quadralaterials, 3 for tetrahedrals and 4 for hexahedrals Output Parameter: . dual - the dual graph Notes: Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual() The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that rows cell. The number of rows in mesh is number of cells, the number of columns is the number of vertices. Level: advanced .seealso: MatMeshToVertexGraph(), MatCreateMPIAdj(), MatPartitioningCreate() @*/ PetscErrorCode MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual) { PetscErrorCode ierr; PetscInt *newxadj,*newadjncy; PetscInt numflag=0; Mat_MPIAdj *adj = (Mat_MPIAdj *)mesh->data,*newadj; PetscBool flg; int status; PetscFunctionBegin; ierr = PetscObjectTypeCompare((PetscObject)mesh,MATMPIADJ,&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Must use MPIAdj matrix type"); CHKMEMQ; status = ParMETIS_V3_Mesh2Dual(mesh->rmap->range,adj->i,adj->j,&numflag,&ncommonnodes,&newxadj,&newadjncy,&((PetscObject)mesh)->comm);CHKERRQPARMETIS(status); CHKMEMQ; ierr = MatCreateMPIAdj(((PetscObject)mesh)->comm,mesh->rmap->n,mesh->rmap->N,newxadj,newadjncy,PETSC_NULL,dual);CHKERRQ(ierr); newadj = (Mat_MPIAdj *)(*dual)->data; newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */ PetscFunctionReturn(0); }
int main(int argc, char *argv[]) { PetscErrorCode ierr; PetscBool flg; PetscMPIInt rank, size; int i, status; idx_t ni,isize,*vtxdist, *xadj, *adjncy, *vwgt, *part; idx_t wgtflag=0, numflag=0, ncon=1, ndims=3, edgecut=0; idx_t options[5]; real_t *xyz, *tpwgts, ubvec[1]; MPI_Comm comm; FILE *fp; char fname[PETSC_MAX_PATH_LEN],prefix[PETSC_MAX_PATH_LEN] = ""; PetscInitialize(&argc,&argv,NULL,help); #if defined(PETSC_USE_64BIT_INDICES) ierr = PetscPrintf(PETSC_COMM_WORLD,"This example only works with 32 bit indices\n"); PetscFinalize(); #endif MPI_Comm_rank(PETSC_COMM_WORLD,&rank); MPI_Comm_size(PETSC_COMM_WORLD,&size); ierr = PetscOptionsBegin(PETSC_COMM_WORLD,NULL,"Parmetis test options","");CHKERRQ(ierr); ierr = PetscOptionsString("-prefix","Path and prefix of test file","",prefix,prefix,sizeof(prefix),&flg);CHKERRQ(ierr); if (!flg) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Must specify -prefix");CHKERRQ(ierr); ierr = PetscOptionsEnd();CHKERRQ(ierr); ierr = PetscMalloc1(size+1,&vtxdist);CHKERRQ(ierr); ierr = PetscSNPrintf(fname,sizeof(fname),"%s.%d.graph",prefix,rank);CHKERRQ(ierr); ierr = PetscFOpen(PETSC_COMM_SELF,fname,"r",&fp);CHKERRQ(ierr); fread(vtxdist, sizeof(idx_t), size+1, fp); ni = vtxdist[rank+1]-vtxdist[rank]; ierr = PetscMalloc1(ni+1,&xadj);CHKERRQ(ierr); fread(xadj, sizeof(idx_t), ni+1, fp); ierr = PetscMalloc1(xadj[ni],&adjncy);CHKERRQ(ierr); for (i=0; i<ni; i++) fread(&adjncy[xadj[i]], sizeof(idx_t), xadj[i+1]-xadj[i], fp); ierr = PetscFClose(PETSC_COMM_SELF,fp);CHKERRQ(ierr); ierr = PetscSNPrintf(fname,sizeof(fname),"%s.%d.graph.xyz",prefix,rank);CHKERRQ(ierr); ierr = PetscFOpen(PETSC_COMM_SELF,fname,"r",&fp);CHKERRQ(ierr); ierr = PetscMalloc3(ni*ndims,&xyz,ni,&part,size,&tpwgts);CHKERRQ(ierr); fread(xyz, sizeof(real_t), ndims*ni, fp); ierr = PetscFClose(PETSC_COMM_SELF,fp);CHKERRQ(ierr); vwgt = NULL; for (i = 0; i < size; i++) tpwgts[i] = 1. / size; isize = size; ubvec[0] = 1.05; options[0] = 1; options[1] = 2; options[2] = 15; options[3] = 0; options[4] = 0; ierr = MPI_Comm_dup(MPI_COMM_WORLD, &comm);CHKERRQ(ierr); status = ParMETIS_V3_PartGeomKway(vtxdist, xadj, adjncy, vwgt, NULL, &wgtflag, &numflag, &ndims, xyz, &ncon, &isize, tpwgts, ubvec, options, &edgecut, part, &comm);CHKERRQPARMETIS(status); ierr = MPI_Comm_free(&comm);CHKERRQ(ierr); ierr = PetscFree(vtxdist);CHKERRQ(ierr); ierr = PetscFree(xadj);CHKERRQ(ierr); ierr = PetscFree(adjncy);CHKERRQ(ierr); ierr = PetscFree3(xyz,part,tpwgts);CHKERRQ(ierr); PetscFinalize(); return 0; }
static PetscErrorCode MatPartitioningApply_Parmetis(MatPartitioning part,IS *partitioning) { MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis*)part->data; PetscErrorCode ierr; PetscInt *locals = PETSC_NULL; Mat mat = part->adj,amat,pmat; PetscBool flg; PetscInt bs = 1; PetscFunctionBegin; ierr = PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);CHKERRQ(ierr); if (flg) { amat = mat; PetscObjectReference((PetscObject)amat);CHKERRQ(ierr); } else { /* bs indicates if the converted matrix is "reduced" from the original and hence the resulting partition results need to be stretched to match the original matrix */ ierr = MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&amat);CHKERRQ(ierr); if (amat->rmap->n > 0) bs = mat->rmap->n/amat->rmap->n; } ierr = MatMPIAdjCreateNonemptySubcommMat(amat,&pmat);CHKERRQ(ierr); ierr = MPI_Barrier(((PetscObject)part)->comm);CHKERRQ(ierr); if (pmat) { MPI_Comm pcomm = ((PetscObject)pmat)->comm,comm_pmetis; Mat_MPIAdj *adj = (Mat_MPIAdj*)pmat->data; PetscInt *vtxdist = pmat->rmap->range; PetscInt *xadj = adj->i; PetscInt *adjncy = adj->j; PetscInt itmp = 0,wgtflag=0, numflag=0, ncon=1, nparts=part->n, options[24], i, j; real_t *tpwgts,*ubvec; int status; #if defined(PETSC_USE_DEBUG) /* check that matrix has no diagonal entries */ { PetscInt rstart; ierr = MatGetOwnershipRange(pmat,&rstart,PETSC_NULL);CHKERRQ(ierr); for (i=0; i<pmat->rmap->n; i++) { for (j=xadj[i]; j<xadj[i+1]; j++) { if (adjncy[j] == i+rstart) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Row %d has diagonal entry; Parmetis forbids diagonal entry",i+rstart); } } } #endif ierr = PetscMalloc(amat->rmap->n*sizeof(PetscInt),&locals);CHKERRQ(ierr); if (PetscLogPrintInfo) {itmp = parmetis->printout; parmetis->printout = 127;} ierr = PetscMalloc(ncon*nparts*sizeof(real_t),&tpwgts);CHKERRQ(ierr); for (i=0; i<ncon; i++) { for (j=0; j<nparts; j++) { if (part->part_weights) { tpwgts[i*nparts+j] = part->part_weights[i*nparts+j]; } else { tpwgts[i*nparts+j] = 1./nparts; } } } ierr = PetscMalloc(ncon*sizeof(real_t),&ubvec);CHKERRQ(ierr); for (i=0; i<ncon; i++) { ubvec[i] = 1.05; } /* This sets the defaults */ options[0] = 0; for (i=1; i<24; i++) { options[i] = -1; } /* Duplicate the communicator to be sure that ParMETIS attribute caching does not interfere with PETSc. */ ierr = MPI_Comm_dup(pcomm,&comm_pmetis);CHKERRQ(ierr); status = ParMETIS_V3_PartKway(vtxdist,xadj,adjncy,part->vertex_weights,adj->values,&wgtflag,&numflag,&ncon,&nparts,tpwgts,ubvec,options,&parmetis->cuts,locals,&comm_pmetis);CHKERRQPARMETIS(status); ierr = MPI_Comm_free(&comm_pmetis);CHKERRQ(ierr); ierr = PetscFree(tpwgts);CHKERRQ(ierr); ierr = PetscFree(ubvec);CHKERRQ(ierr); if (PetscLogPrintInfo) {parmetis->printout = itmp;} } if (bs > 1) { PetscInt i,j,*newlocals; ierr = PetscMalloc(bs*amat->rmap->n*sizeof(PetscInt),&newlocals);CHKERRQ(ierr); for (i=0; i<amat->rmap->n; i++) { for (j=0; j<bs; j++) { newlocals[bs*i + j] = locals[i]; } } ierr = PetscFree(locals);CHKERRQ(ierr); ierr = ISCreateGeneral(((PetscObject)part)->comm,bs*amat->rmap->n,newlocals,PETSC_OWN_POINTER,partitioning);CHKERRQ(ierr); } else { ierr = ISCreateGeneral(((PetscObject)part)->comm,amat->rmap->n,locals,PETSC_OWN_POINTER,partitioning);CHKERRQ(ierr); } ierr = MatDestroy(&pmat);CHKERRQ(ierr); ierr = MatDestroy(&amat);CHKERRQ(ierr); PetscFunctionReturn(0); }