PetscErrorCode CheckMat(Mat A, Mat B, PetscBool usemult, const char* func) { Mat Bcheck; PetscReal error; PetscErrorCode ierr; PetscFunctionBeginUser; if (!usemult) { if (B) { MatType Btype; ierr = MatGetType(B,&Btype);CHKERRQ(ierr); ierr = MatConvert(A,Btype,MAT_INITIAL_MATRIX,&Bcheck);CHKERRQ(ierr); } else { ierr = MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&Bcheck);CHKERRQ(ierr); } if (B) { /* if B is present, subtract it */ ierr = MatAXPY(Bcheck,-1.,B,DIFFERENT_NONZERO_PATTERN);CHKERRQ(ierr); } ierr = MatNorm(Bcheck,NORM_INFINITY,&error);CHKERRQ(ierr); if (error > PETSC_SQRT_MACHINE_EPSILON) { ISLocalToGlobalMapping rl2g,cl2g; ierr = PetscObjectSetName((PetscObject)Bcheck,"Assembled Bcheck");CHKERRQ(ierr); ierr = MatView(Bcheck,NULL);CHKERRQ(ierr); if (B) { ierr = PetscObjectSetName((PetscObject)B,"Assembled AIJ");CHKERRQ(ierr); ierr = MatView(B,NULL);CHKERRQ(ierr); ierr = MatDestroy(&Bcheck);CHKERRQ(ierr); ierr = MatConvert(A,MATAIJ,MAT_INITIAL_MATRIX,&Bcheck);CHKERRQ(ierr); ierr = PetscObjectSetName((PetscObject)Bcheck,"Assembled IS");CHKERRQ(ierr); ierr = MatView(Bcheck,NULL);CHKERRQ(ierr); } ierr = MatDestroy(&Bcheck);CHKERRQ(ierr); ierr = PetscObjectSetName((PetscObject)A,"MatIS");CHKERRQ(ierr); ierr = MatView(A,NULL);CHKERRQ(ierr); ierr = MatGetLocalToGlobalMapping(A,&rl2g,&cl2g);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(rl2g,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(cl2g,NULL);CHKERRQ(ierr); SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_PLIB,"ERROR ON %s: %g",func,error); } ierr = MatDestroy(&Bcheck);CHKERRQ(ierr); } else { PetscBool ok,okt; ierr = MatMultEqual(A,B,3,&ok);CHKERRQ(ierr); ierr = MatMultTransposeEqual(A,B,3,&okt);CHKERRQ(ierr); if (!ok || !okt) SETERRQ3(PETSC_COMM_WORLD,PETSC_ERR_PLIB,"ERROR ON %s: mult ok ? %d, multtranspose ok ? %d",func,ok,okt); } PetscFunctionReturn(0); }
int main(int argc,char **argv) { PetscErrorCode ierr; PetscInt i,n = 4,indices[] = {0,3,9,12},m = 2,input[] = {0,2}; PetscInt output[2],inglobals[13],outlocals[13]; ISLocalToGlobalMapping mapping; ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr; /* Create a local to global mapping. Each processor independently creates a mapping */ ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,1,n,indices,PETSC_COPY_VALUES,&mapping);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingSetFromOptions(mapping);CHKERRQ(ierr); /* Map a set of local indices to their global values */ ierr = ISLocalToGlobalMappingApply(mapping,m,input,output);CHKERRQ(ierr); ierr = PetscIntView(m,output,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); /* Map some global indices to local, retaining the ones without a local index by -1 */ for (i=0; i<13; i++) inglobals[i] = i; ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,13,inglobals,NULL,outlocals);CHKERRQ(ierr); ierr = PetscIntView(13,outlocals,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); /* Map some global indices to local, dropping the ones without a local index. */ ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_DROP,13,inglobals,&m,outlocals);CHKERRQ(ierr); ierr = PetscIntView(m,outlocals,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(mapping,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); /* Free the space used by the local to global mapping */ ierr = ISLocalToGlobalMappingDestroy(&mapping);CHKERRQ(ierr); ierr = PetscFinalize(); return ierr; }
int main(int argc,char **argv) { PetscMPIInt rank; PetscInt M = 13,s=1,dof=1; DMDABoundaryType bx = DMDA_BOUNDARY_PERIODIC; PetscErrorCode ierr; DM da; PetscViewer viewer; Vec local,global; PetscScalar value; PetscDraw draw; PetscBool flg = PETSC_FALSE; ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr); /* Create viewers */ ierr = PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",280,480,600,200,&viewer);CHKERRQ(ierr); ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); ierr = PetscDrawSetDoubleBuffer(draw);CHKERRQ(ierr); /* Readoptions */ ierr = PetscOptionsGetInt(NULL,"-M",&M,NULL);CHKERRQ(ierr); ierr = PetscOptionsGetEnum(NULL,"-wrap",DMDABoundaryTypes,(PetscEnum*)&bx,NULL);CHKERRQ(ierr); ierr = PetscOptionsGetInt(NULL,"-dof",&dof,NULL);CHKERRQ(ierr); ierr = PetscOptionsGetInt(NULL,"-s",&s,NULL);CHKERRQ(ierr); /* Create distributed array and get vectors */ ierr = DMDACreate1d(PETSC_COMM_WORLD,bx,M,dof,s,NULL,&da);CHKERRQ(ierr); ierr = DMView(da,viewer);CHKERRQ(ierr); ierr = DMCreateGlobalVector(da,&global);CHKERRQ(ierr); ierr = DMCreateLocalVector(da,&local);CHKERRQ(ierr); /* Set global vector; send ghost points to local vectors */ value = 1; ierr = VecSet(global,value);CHKERRQ(ierr); ierr = DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);CHKERRQ(ierr); ierr = DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);CHKERRQ(ierr); /* Scale local vectors according to processor rank; pass to global vector */ ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr); value = rank+1; ierr = VecScale(local,value);CHKERRQ(ierr); ierr = DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);CHKERRQ(ierr); ierr = DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);CHKERRQ(ierr); ierr = VecView(global,viewer);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vector:\n");CHKERRQ(ierr); ierr = VecView(global,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"\n");CHKERRQ(ierr); /* Send ghost points to local vectors */ ierr = DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);CHKERRQ(ierr); ierr = DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);CHKERRQ(ierr); ierr = PetscOptionsGetBool(NULL,"-local_print",&flg,NULL);CHKERRQ(ierr); if (flg) { PetscViewer sviewer; ISLocalToGlobalMapping is; ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD,PETSC_TRUE);CHKERRQ(ierr); ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);CHKERRQ(ierr); ierr = PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr); ierr = VecView(local,sviewer);CHKERRQ(ierr); ierr = PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr); ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);CHKERRQ(ierr); ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal to global mapping: processor %d\n",rank);CHKERRQ(ierr); ierr = PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr); ierr = DMGetLocalToGlobalMapping(da,&is);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(is,sviewer);CHKERRQ(ierr); ierr = PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr); ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);CHKERRQ(ierr); } /* Free memory */ ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); ierr = VecDestroy(&global);CHKERRQ(ierr); ierr = VecDestroy(&local);CHKERRQ(ierr); ierr = DMDestroy(&da);CHKERRQ(ierr); ierr = PetscFinalize(); return 0; }
/*@C DMPlexDistribute - Distributes the mesh and any associated sections. Not Collective Input Parameter: + dm - The original DMPlex object . partitioner - The partitioning package, or NULL for the default - overlap - The overlap of partitions, 0 is the default Output Parameter: + sf - The PetscSF used for point distribution - parallelMesh - The distributed DMPlex object, or NULL Note: If the mesh was not distributed, the return value is NULL. The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function representation on the mesh. Level: intermediate .keywords: mesh, elements .seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure() @*/ PetscErrorCode DMPlexDistribute(DM dm, const char partitioner[], PetscInt overlap, PetscSF *sf, DM *dmParallel) { DM_Plex *mesh = (DM_Plex*) dm->data, *pmesh; MPI_Comm comm; const PetscInt height = 0; PetscInt dim, numRemoteRanks; IS origCellPart, origPart, cellPart, part; PetscSection origCellPartSection, origPartSection, cellPartSection, partSection; PetscSFNode *remoteRanks; PetscSF partSF, pointSF, coneSF; ISLocalToGlobalMapping renumbering; PetscSection originalConeSection, newConeSection; PetscInt *remoteOffsets; PetscInt *cones, *newCones, newConesSize; PetscBool flg; PetscMPIInt rank, numProcs, p; PetscErrorCode ierr; PetscFunctionBegin; PetscValidHeaderSpecific(dm, DM_CLASSID, 1); if (sf) PetscValidPointer(sf,4); PetscValidPointer(dmParallel,5); ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr); ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); *dmParallel = NULL; if (numProcs == 1) PetscFunctionReturn(0); ierr = DMPlexGetDimension(dm, &dim);CHKERRQ(ierr); /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */ ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr); if (overlap > 1) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Overlap > 1 not yet implemented"); ierr = DMPlexCreatePartition(dm, partitioner, height, overlap > 0 ? PETSC_TRUE : PETSC_FALSE, &cellPartSection, &cellPart, &origCellPartSection, &origCellPart);CHKERRQ(ierr); /* Create SF assuming a serial partition for all processes: Could check for IS length here */ if (!rank) numRemoteRanks = numProcs; else numRemoteRanks = 0; ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr); for (p = 0; p < numRemoteRanks; ++p) { remoteRanks[p].rank = p; remoteRanks[p].index = 0; } ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr); ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr); ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr); if (flg) { ierr = PetscPrintf(comm, "Cell Partition:\n");CHKERRQ(ierr); ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = ISView(cellPart, NULL);CHKERRQ(ierr); if (origCellPart) { ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr); ierr = PetscSectionView(origCellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = ISView(origCellPart, NULL);CHKERRQ(ierr); } ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr); } /* Close the partition over the mesh */ ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr); ierr = ISDestroy(&cellPart);CHKERRQ(ierr); ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr); /* Create new mesh */ ierr = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr); ierr = DMPlexSetDimension(*dmParallel, dim);CHKERRQ(ierr); ierr = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr); pmesh = (DM_Plex*) (*dmParallel)->data; /* Distribute sieve points and the global point numbering (replaces creating remote bases) */ ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr); if (flg) { ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr); ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = ISView(part, NULL);CHKERRQ(ierr); ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr); ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr); } ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr); ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); /* Distribute cone section */ ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr); ierr = DMPlexGetConeSection(*dmParallel, &newConeSection);CHKERRQ(ierr); ierr = PetscSFDistributeSection(pointSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr); ierr = DMSetUp(*dmParallel);CHKERRQ(ierr); { PetscInt pStart, pEnd, p; ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr); for (p = pStart; p < pEnd; ++p) { PetscInt coneSize; ierr = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr); pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize); } } /* Communicate and renumber cones */ ierr = PetscSFCreateSectionSF(pointSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr); ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr); ierr = DMPlexGetCones(*dmParallel, &newCones);CHKERRQ(ierr); ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr); ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr); ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr); if (flg) { ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr); ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr); ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr); } ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr); ierr = DMPlexGetConeOrientations(*dmParallel, &newCones);CHKERRQ(ierr); ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr); ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr); /* Create supports and stratify sieve */ { PetscInt pStart, pEnd; ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr); ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr); } ierr = DMPlexSymmetrize(*dmParallel);CHKERRQ(ierr); ierr = DMPlexStratify(*dmParallel);CHKERRQ(ierr); /* Distribute Coordinates */ { PetscSection originalCoordSection, newCoordSection; Vec originalCoordinates, newCoordinates; PetscInt bs; const char *name; ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr); ierr = DMGetCoordinateSection(*dmParallel, &newCoordSection);CHKERRQ(ierr); ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr); ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr); ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr); ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr); ierr = DMPlexDistributeField(dm, pointSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr); ierr = DMSetCoordinatesLocal(*dmParallel, newCoordinates);CHKERRQ(ierr); ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr); ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr); ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr); } /* Distribute labels */ ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); { DMLabel next = mesh->labels, newNext = pmesh->labels; PetscInt numLabels = 0, l; /* Bcast number of labels */ while (next) {++numLabels; next = next->next;} ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr); next = mesh->labels; for (l = 0; l < numLabels; ++l) { DMLabel labelNew; PetscBool isdepth; /* Skip "depth" because it is recreated */ if (!rank) {ierr = PetscStrcmp(next->name, "depth", &isdepth);CHKERRQ(ierr);} ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr); if (isdepth) {if (!rank) next = next->next; continue;} ierr = DMLabelDistribute(next, partSection, part, renumbering, &labelNew);CHKERRQ(ierr); /* Insert into list */ if (newNext) newNext->next = labelNew; else pmesh->labels = labelNew; newNext = labelNew; if (!rank) next = next->next; } } ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr); /* Setup hybrid structure */ { const PetscInt *gpoints; PetscInt depth, n, d; for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];} ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr); ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr); for (d = 0; d <= dim; ++d) { PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p; if (pmax < 0) continue; ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr); ierr = DMPlexGetDepthStratum(*dmParallel, d, NULL, &pEnd);CHKERRQ(ierr); ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr); for (p = 0; p < n; ++p) { const PetscInt point = gpoints[p]; if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax; } if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax; else pmesh->hybridPointMax[d] = -1; } ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr); } /* Cleanup Partition */ ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr); ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr); ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr); ierr = ISDestroy(&part);CHKERRQ(ierr); /* Create point SF for parallel mesh */ ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); { const PetscInt *leaves; PetscSFNode *remotePoints, *rowners, *lowners; PetscInt numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints; PetscInt pStart, pEnd; ierr = DMPlexGetChart(*dmParallel, &pStart, &pEnd);CHKERRQ(ierr); ierr = PetscSFGetGraph(pointSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr); ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr); for (p=0; p<numRoots; p++) { rowners[p].rank = -1; rowners[p].index = -1; } if (origCellPart) { /* Make sure points in the original partition are not assigned to other procs */ const PetscInt *origPoints; ierr = DMPlexCreatePartitionClosure(dm, origCellPartSection, origCellPart, &origPartSection, &origPart);CHKERRQ(ierr); ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr); for (p = 0; p < numProcs; ++p) { PetscInt dof, off, d; ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr); ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr); for (d = off; d < off+dof; ++d) { rowners[origPoints[d]].rank = p; } } ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr); ierr = ISDestroy(&origPart);CHKERRQ(ierr); ierr = PetscSectionDestroy(&origPartSection);CHKERRQ(ierr); } ierr = ISDestroy(&origCellPart);CHKERRQ(ierr); ierr = PetscSectionDestroy(&origCellPartSection);CHKERRQ(ierr); ierr = PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); ierr = PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); for (p = 0; p < numLeaves; ++p) { if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */ lowners[p].rank = rank; lowners[p].index = leaves ? leaves[p] : p; } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */ lowners[p].rank = -2; lowners[p].index = -2; } } for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */ rowners[p].rank = -3; rowners[p].index = -3; } ierr = PetscSFReduceBegin(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); ierr = PetscSFReduceEnd(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr); ierr = PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); ierr = PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr); for (p = 0; p < numLeaves; ++p) { if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed"); if (lowners[p].rank != rank) ++numGhostPoints; } ierr = PetscMalloc1(numGhostPoints, &ghostPoints);CHKERRQ(ierr); ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr); for (p = 0, gp = 0; p < numLeaves; ++p) { if (lowners[p].rank != rank) { ghostPoints[gp] = leaves ? leaves[p] : p; remotePoints[gp].rank = lowners[p].rank; remotePoints[gp].index = lowners[p].index; ++gp; } } ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr); ierr = PetscSFSetGraph((*dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr); ierr = PetscSFSetFromOptions((*dmParallel)->sf);CHKERRQ(ierr); } pmesh->useCone = mesh->useCone; pmesh->useClosure = mesh->useClosure; ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr); /* Copy BC */ ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr); /* Cleanup */ if (sf) {*sf = pointSF;} else {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);} ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr); ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr); PetscFunctionReturn(0); }
/* Distribute cones - Partitioning: input partition point map and naive sf, output sf with inverse of map, distribute points - Distribute section: input current sf, communicate sizes and offsets, output local section and offsets (only use for new sf) - Create SF for values: input current sf and offsets, output new sf - Distribute values: input new sf, communicate values */ PetscErrorCode DistributeMesh(DM dm, AppCtx *user, PetscSF *pointSF, DM *parallelDM) { MPI_Comm comm = ((PetscObject) dm)->comm; const PetscInt height = 0; PetscInt dim, numRemoteRanks; IS cellPart, part; PetscSection cellPartSection, partSection; PetscSFNode *remoteRanks; PetscSF partSF; ISLocalToGlobalMapping renumbering; PetscSF coneSF; PetscSection originalConeSection, newConeSection; PetscInt *remoteOffsets, newConesSize; PetscInt *cones, *newCones; PetscMPIInt numProcs, rank, p; PetscErrorCode ierr; PetscFunctionBegin; ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr); ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); ierr = DMMeshGetDimension(dm, &dim);CHKERRQ(ierr); /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */ ierr = DMMeshCreatePartition(dm, &cellPartSection, &cellPart, height);CHKERRQ(ierr); /* Create SF assuming a serial partition for all processes: Could check for IS length here */ if (!rank) { numRemoteRanks = numProcs; } else { numRemoteRanks = 0; } ierr = PetscMalloc(numRemoteRanks * sizeof(PetscSFNode), &remoteRanks);CHKERRQ(ierr); for(p = 0; p < numRemoteRanks; ++p) { remoteRanks[p].rank = p; remoteRanks[p].index = 0; } ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr); ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, PETSC_NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr); /* Debugging */ ierr = PetscPrintf(comm, "Cell Partition:\n");CHKERRQ(ierr); ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = ISView(cellPart, PETSC_NULL);CHKERRQ(ierr); ierr = PetscSFView(partSF, PETSC_NULL);CHKERRQ(ierr); /* Close the partition over the mesh */ ierr = DMMeshCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr); ierr = ISDestroy(&cellPart);CHKERRQ(ierr); ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr); /* Create new mesh */ ierr = DMMeshCreate(comm, parallelDM);CHKERRQ(ierr); ierr = DMMeshSetDimension(*parallelDM, dim);CHKERRQ(ierr); ierr = PetscObjectSetName((PetscObject) *parallelDM, "Parallel Mesh");CHKERRQ(ierr); /* Distribute sieve points and the global point numbering (replaces creating remote bases) */ ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, pointSF);CHKERRQ(ierr); /* Debugging */ ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr); ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = ISView(part, PETSC_NULL);CHKERRQ(ierr); ierr = PetscSFView(*pointSF, PETSC_NULL);CHKERRQ(ierr); ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(renumbering, PETSC_NULL);CHKERRQ(ierr); /* Cleanup */ ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr); ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr); ierr = ISDestroy(&part);CHKERRQ(ierr); /* Distribute cone section */ ierr = DMMeshGetConeSection(dm, &originalConeSection);CHKERRQ(ierr); ierr = DMMeshGetConeSection(*parallelDM, &newConeSection);CHKERRQ(ierr); ierr = PetscSFDistributeSection(*pointSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr); ierr = DMMeshSetUp(*parallelDM);CHKERRQ(ierr); /* Communicate and renumber cones */ ierr = PetscSFCreateSectionSF(*pointSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr); ierr = DMMeshGetCones(dm, &cones);CHKERRQ(ierr); ierr = DMMeshGetCones(*parallelDM, &newCones);CHKERRQ(ierr); ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr); ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr); ierr = ISGlobalToLocalMappingApply(renumbering, IS_GTOLM_MASK, newConesSize, newCones, PETSC_NULL, newCones);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr); /* Debugging */ ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr); ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr); ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = PetscSFView(coneSF, PETSC_NULL);CHKERRQ(ierr); ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr); /* Create supports and stratify sieve */ ierr = DMMeshSymmetrize(*parallelDM);CHKERRQ(ierr); ierr = DMMeshStratify(*parallelDM);CHKERRQ(ierr); PetscFunctionReturn(0); }
int main(int argc,char **argv) { PetscErrorCode ierr; PetscInt nredundant1 = 5,nredundant2 = 2,i; ISLocalToGlobalMapping *ltog; PetscMPIInt rank,size; DM packer; Vec global,local1,local2,redundant1,redundant2; PF pf; DM da1,da2,dmred1,dmred2; PetscScalar *redundant1a,*redundant2a; PetscViewer sviewer; PetscBool gather_add = PETSC_FALSE; ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr); ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr); ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr); ierr = PetscOptionsGetBool(PETSC_NULL,"-gather_add",&gather_add,PETSC_NULL);CHKERRQ(ierr); ierr = DMCompositeCreate(PETSC_COMM_WORLD,&packer);CHKERRQ(ierr); ierr = DMRedundantCreate(PETSC_COMM_WORLD,0,nredundant1,&dmred1);CHKERRQ(ierr); ierr = DMCreateLocalVector(dmred1,&redundant1);CHKERRQ(ierr); ierr = DMCompositeAddDM(packer,dmred1);CHKERRQ(ierr); ierr = DMDACreate1d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,8,1,1,PETSC_NULL,&da1);CHKERRQ(ierr); ierr = DMCreateLocalVector(da1,&local1);CHKERRQ(ierr); ierr = DMCompositeAddDM(packer,da1);CHKERRQ(ierr); ierr = DMRedundantCreate(PETSC_COMM_WORLD,1%size,nredundant2,&dmred2);CHKERRQ(ierr); ierr = DMCreateLocalVector(dmred2,&redundant2);CHKERRQ(ierr); ierr = DMCompositeAddDM(packer,dmred2);CHKERRQ(ierr); ierr = DMDACreate1d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,6,1,1,PETSC_NULL,&da2);CHKERRQ(ierr); ierr = DMCreateLocalVector(da2,&local2);CHKERRQ(ierr); ierr = DMCompositeAddDM(packer,da2);CHKERRQ(ierr); ierr = DMCreateGlobalVector(packer,&global);CHKERRQ(ierr); ierr = PFCreate(PETSC_COMM_WORLD,1,1,&pf);CHKERRQ(ierr); ierr = PFSetType(pf,PFIDENTITY,PETSC_NULL);CHKERRQ(ierr); ierr = PFApplyVec(pf,PETSC_NULL,global);CHKERRQ(ierr); ierr = PFDestroy(&pf);CHKERRQ(ierr); ierr = VecView(global,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = DMCompositeScatter(packer,global,redundant1,local1,redundant2,local2);CHKERRQ(ierr); ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD,PETSC_TRUE);CHKERRQ(ierr); ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_WORLD,"[%d] My part of redundant1 vector\n",rank);CHKERRQ(ierr); ierr = PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr); ierr = VecView(redundant1,sviewer);CHKERRQ(ierr); ierr = PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr); ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD,PETSC_TRUE);CHKERRQ(ierr); ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_WORLD,"[%d] My part of da1 vector\n",rank);CHKERRQ(ierr); ierr = PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr); ierr = VecView(local1,sviewer);CHKERRQ(ierr); ierr = PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr); ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD,PETSC_TRUE);CHKERRQ(ierr); ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_WORLD,"[%d] My part of redundant2 vector\n",rank);CHKERRQ(ierr); ierr = PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr); ierr = VecView(redundant2,sviewer);CHKERRQ(ierr); ierr = PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr); ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD,PETSC_TRUE);CHKERRQ(ierr); ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_WORLD,"[%d] My part of da2 vector\n",rank);CHKERRQ(ierr); ierr = PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr); ierr = VecView(local2,sviewer);CHKERRQ(ierr); ierr = PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr); ierr = VecGetArray(redundant1,&redundant1a);CHKERRQ(ierr); ierr = VecGetArray(redundant2,&redundant2a);CHKERRQ(ierr); for (i=0; i<nredundant1; i++) redundant1a[i] = (rank+2)*i; for (i=0; i<nredundant2; i++) redundant2a[i] = (rank+10)*i; ierr = VecRestoreArray(redundant1,&redundant1a);CHKERRQ(ierr); ierr = VecRestoreArray(redundant2,&redundant2a);CHKERRQ(ierr); ierr = DMCompositeGather(packer,global,gather_add?ADD_VALUES:INSERT_VALUES,redundant1,local1,redundant2,local2);CHKERRQ(ierr); ierr = VecView(global,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); /* get the global numbering for each subvector element */ ierr = DMCompositeGetISLocalToGlobalMappings(packer,<og);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"Local to global mapping of redundant1 vector\n");CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(ltog[0],PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"Local to global mapping of local1 vector\n");CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(ltog[1],PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"Local to global mapping of redundant2 vector\n");CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(ltog[2],PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"Local to global mapping of local2 vector\n");CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(ltog[3],PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); for (i=0; i<4; i++) {ierr = ISLocalToGlobalMappingDestroy(<og[i]);CHKERRQ(ierr);} ierr = PetscFree(ltog);CHKERRQ(ierr); ierr = DMDestroy(&da1);CHKERRQ(ierr); ierr = DMDestroy(&dmred1);CHKERRQ(ierr); ierr = DMDestroy(&dmred2);CHKERRQ(ierr); ierr = DMDestroy(&da2);CHKERRQ(ierr); ierr = VecDestroy(&redundant1);CHKERRQ(ierr); ierr = VecDestroy(&redundant2);CHKERRQ(ierr); ierr = VecDestroy(&local1);CHKERRQ(ierr); ierr = VecDestroy(&local2);CHKERRQ(ierr); ierr = VecDestroy(&global);CHKERRQ(ierr); ierr = DMDestroy(&packer);CHKERRQ(ierr); ierr = PetscFinalize(); return 0; }
void PETSC_STDCALL islocaltoglobalmappingview_(ISLocalToGlobalMapping *mapping,PetscViewer *viewer,PetscErrorCode *ierr) { PetscViewer v; PetscPatchDefaultViewers_Fortran(viewer,v); *ierr = ISLocalToGlobalMappingView(*mapping,v); }
int main(int argc,char **argv) { PetscErrorCode ierr; PetscInt indices[] = {0,1,2,3,-1,-1,-1,-1,4,5,6,7}; PetscInt indices2[] = {0,1,2,3,4,5,-1,-1,-1,-1,-1,-1,6,7,8,9,10,11}; ISLocalToGlobalMapping map; ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr; ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,1,12,indices,PETSC_COPY_VALUES,&map);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingSetBlockSize(map,2);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingSetBlockSize(map,4);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingSetBlockSize(map,2);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingSetBlockSize(map,1);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingDestroy(&map);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,1,18,indices2,PETSC_COPY_VALUES,&map);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingSetBlockSize(map,3);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingSetBlockSize(map,6);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingSetBlockSize(map,3);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingSetBlockSize(map,1);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingDestroy(&map);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,5,2,indices2,PETSC_COPY_VALUES,&map);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingSetBlockSize(map,2);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr); ierr = ISLocalToGlobalMappingDestroy(&map);CHKERRQ(ierr); ierr = PetscFinalize(); return ierr; }