Exemplo n.º 1
0
/*@
   ISConcatenate - Forms a new IS by locally concatenating the indices from an IS list without reordering.
 

   Collective on comm.

   Input Parameter:
+  comm    - communicator of the concatenated IS.
.  len     - size of islist array (nonnegative)
-  islist  - array of index sets



   Output Parameters:
.  isout   - The concatenated index set; empty, if len == 0.

   Notes: The semantics of calling this on comm imply that the comms of the members if islist also contain this rank.

   Level: intermediate

.seealso: ISDifference(), ISSum(), ISExpand()

   Concepts: index sets^concatenation
   Concepts: IS^concatenation

@*/
PetscErrorCode ISConcatenate(MPI_Comm comm, PetscInt len, const IS islist[], IS *isout)
{
  PetscErrorCode ierr;
  PetscInt i,n,N;
  const PetscInt *iidx;
  PetscInt *idx;

  PetscFunctionBegin;
  PetscValidPointer(islist,2);
#if defined(PETSC_USE_DEBUG)
  for(i = 0; i < len; ++i) {
    PetscValidHeaderSpecific(islist[i], IS_CLASSID, 1);
  }
#endif
  PetscValidPointer(isout, 5);
  if(!len) {
    ierr = ISCreateStride(comm, 0,0,0, isout); CHKERRQ(ierr);
    PetscFunctionReturn(0);
  }
  if(len < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Negative array length: %D", len);
  N = 0; 
  for(i = 0; i < len; ++i) {
    ierr = ISGetLocalSize(islist[i], &n); CHKERRQ(ierr);
    N += n;
  }
  ierr = PetscMalloc(sizeof(PetscInt)*N, &idx); CHKERRQ(ierr);
  N = 0; 
  for(i = 0; i < len; ++i) {
    ierr = ISGetLocalSize(islist[i], &n); CHKERRQ(ierr);
    ierr = ISGetIndices(islist[i], &iidx); CHKERRQ(ierr);
    ierr = PetscMemcpy(idx+N,iidx, sizeof(PetscInt)*n); CHKERRQ(ierr);
    N += n;
  }
  ierr = ISCreateGeneral(comm, N, idx, PETSC_OWN_POINTER, isout); CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 2
0
Arquivo: ao.c Projeto: 00liujj/petsc
/*@
   AOApplicationToPetscIS - Maps an index set in the application-defined
   ordering to the PETSc ordering.

   Collective on AO and IS

   Input Parameters:
+  ao - the application ordering context
-  is - the index set; this is replaced with its mapped values

   Output Parameter:
.  is - the mapped index set

   Level: beginner

   Note:
   The index set cannot be of type stride or block

   Any integers in ia[] that are negative are left unchanged. This
   allows one to convert, for example, neighbor lists that use negative
   entries to indicate nonexistent neighbors due to boundary conditions, etc.

.keywords: application ordering, mapping

.seealso: AOCreateBasic(), AOView(), AOPetscToApplication(),
          AOPetscToApplicationIS(), AOApplicationToPetsc()
@*/
PetscErrorCode  AOApplicationToPetscIS(AO ao,IS is)
{
  PetscErrorCode ierr;
  PetscInt       n,*ia;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(ao,AO_CLASSID,1);
  PetscValidHeaderSpecific(is,IS_CLASSID,2);
  ierr = ISToGeneral(is);CHKERRQ(ierr);
  /* we cheat because we know the is is general and that we can change the indices */
  ierr = ISGetIndices(is,(const PetscInt**)&ia);CHKERRQ(ierr);
  ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
  ierr = (*ao->ops->applicationtopetsc)(ao,n,ia);CHKERRQ(ierr);
  ierr = ISRestoreIndices(is,(const PetscInt**)&ia);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 3
0
static PetscErrorCode ISView_Block(IS is, PetscViewer viewer)
{
  IS_Block       *sub = (IS_Block*)is->data;
  PetscErrorCode ierr;
  PetscInt       i,bs,n,*idx = sub->idx;
  PetscBool      iascii;

  PetscFunctionBegin;
  ierr = PetscLayoutGetBlockSize(is->map, &bs);CHKERRQ(ierr);
  ierr = PetscLayoutGetLocalSize(is->map, &n);CHKERRQ(ierr);
  n   /= bs;
  ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
  if (iascii) {
    PetscViewerFormat fmt;

    ierr = PetscViewerGetFormat(viewer,&fmt);CHKERRQ(ierr);
    if (fmt == PETSC_VIEWER_ASCII_MATLAB) {
      IS             ist;
      const char     *name;
      const PetscInt *idx;
      PetscInt       n;

      ierr = PetscObjectGetName((PetscObject)is,&name);CHKERRQ(ierr);
      ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
      ierr = ISGetIndices(is,&idx);CHKERRQ(ierr);
      ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),n,idx,PETSC_USE_POINTER,&ist);CHKERRQ(ierr);
      ierr = PetscObjectSetName((PetscObject)ist,name);CHKERRQ(ierr);
      ierr = ISView(ist,viewer);CHKERRQ(ierr);
      ierr = ISDestroy(&ist);CHKERRQ(ierr);
      ierr = ISRestoreIndices(is,&idx);CHKERRQ(ierr);
    } else {
      ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
      if (is->isperm) {
        ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Block Index set is permutation\n");CHKERRQ(ierr);
      }
      ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Block size %D\n",bs);CHKERRQ(ierr);
      ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Number of block indices in set %D\n",n);CHKERRQ(ierr);
      ierr = PetscViewerASCIISynchronizedPrintf(viewer,"The first indices of each block are\n");CHKERRQ(ierr);
      for (i=0; i<n; i++) {
        ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Block %D Index %D\n",i,idx[i]);CHKERRQ(ierr);
      }
      ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
      ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
    }
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 4
0
/*@
    ISPartitioningToNumbering - Takes an ISPartitioning and on each processor
    generates an IS that contains a new global node number for each index based
    on the partitioing.

    Collective on IS

    Input Parameters
.   partitioning - a partitioning as generated by MatPartitioningApply()

    Output Parameter:
.   is - on each processor the index set that defines the global numbers
         (in the new numbering) for all the nodes currently (before the partitioning)
         on that processor

   Level: advanced

.seealso: MatPartitioningCreate(), AOCreateBasic(), ISPartitioningCount()

@*/
PetscErrorCode  ISPartitioningToNumbering(IS part,IS *is)
{
  MPI_Comm       comm;
  PetscInt       i,np,npt,n,*starts = NULL,*sums = NULL,*lsizes = NULL,*newi = NULL;
  const PetscInt *indices = NULL;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);

  /* count the number of partitions, i.e., virtual processors */
  ierr = ISGetLocalSize(part,&n);CHKERRQ(ierr);
  ierr = ISGetIndices(part,&indices);CHKERRQ(ierr);
  np   = 0;
  for (i=0; i<n; i++) np = PetscMax(np,indices[i]);
  ierr = MPI_Allreduce(&np,&npt,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
  np   = npt+1; /* so that it looks like a MPI_Comm_size output */

  /*
        lsizes - number of elements of each partition on this particular processor
        sums - total number of "previous" nodes for any particular partition
        starts - global number of first element in each partition on this processor
  */
  ierr = PetscMalloc3(np,&lsizes,np,&starts,np,&sums);CHKERRQ(ierr);
  ierr = PetscMemzero(lsizes,np*sizeof(PetscInt));CHKERRQ(ierr);
  for (i=0; i<n; i++) lsizes[indices[i]]++;
  ierr = MPI_Allreduce(lsizes,sums,np,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
  ierr = MPI_Scan(lsizes,starts,np,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
  for (i=0; i<np; i++) starts[i] -= lsizes[i];
  for (i=1; i<np; i++) {
    sums[i]   += sums[i-1];
    starts[i] += sums[i-1];
  }

  /*
      For each local index give it the new global number
  */
  ierr = PetscMalloc1(n,&newi);CHKERRQ(ierr);
  for (i=0; i<n; i++) newi[i] = starts[indices[i]]++;
  ierr = PetscFree3(lsizes,starts,sums);CHKERRQ(ierr);

  ierr = ISRestoreIndices(part,&indices);CHKERRQ(ierr);
  ierr = ISCreateGeneral(comm,n,newi,PETSC_OWN_POINTER,is);CHKERRQ(ierr);
  ierr = ISSetPermutation(*is);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 5
0
/*@
  VecISAXPY - Adds a reduced vector to the appropriate elements of a full-space vector. 
                  vfull[is[i]] += alpha*vreduced[i]

  Input Parameters:
+ vfull - the full-space vector
. vreduced - the reduced-space vector
- is - the index set for the reduced space

  Output Parameters:
. vfull - the sum of the full-space vector and reduced-space vector

  Level: advanced

.seealso:  VecAXPY()
@*/
PetscErrorCode VecISAXPY(Vec vfull, IS is, PetscScalar alpha,Vec vreduced)
{
  PetscInt       nfull,nreduced;
  MPI_Comm       comm;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(vfull,VEC_CLASSID,1);
  PetscValidHeaderSpecific(vreduced,VEC_CLASSID,2);
  PetscValidHeaderSpecific(is,IS_CLASSID,3);
  ierr = VecGetSize(vfull,&nfull);CHKERRQ(ierr);
  ierr = VecGetSize(vreduced,&nreduced);CHKERRQ(ierr);

  if (nfull == nreduced) { /* Also takes care of masked vectors */
    ierr = VecAXPY(vfull,alpha,vreduced);CHKERRQ(ierr);
  } else {
    PetscScalar      *y;
    const PetscScalar *x;
    PetscInt          i,n,m,rstart;
    const PetscInt    *id;

    ierr = PetscObjectGetComm((PetscObject)vfull,&comm);CHKERRQ(ierr);
    ierr = VecGetArray(vfull,&y);CHKERRQ(ierr);
    ierr = VecGetArrayRead(vreduced,&x);CHKERRQ(ierr);
    ierr = ISGetIndices(is,&id);CHKERRQ(ierr);
    ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
    ierr = VecGetLocalSize(vreduced,&m);CHKERRQ(ierr);
    if (m != n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"IS local length not equal to Vec local length");
    ierr = VecGetOwnershipRange(vfull,&rstart,NULL);CHKERRQ(ierr);
    y -= rstart;
    if (alpha == 1.0) {
      for (i=0; i<n; i++) {
        y[id[i]] += x[i];
      }
    } else {
      for (i=0; i<n; i++) {
        y[id[i]] += alpha*x[i];
      }
    }
    y += rstart;
    ierr = ISRestoreIndices(is,&id);CHKERRQ(ierr);
    ierr = VecRestoreArray(vfull,&y);CHKERRQ(ierr);
    ierr = VecRestoreArrayRead(vreduced,&x);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 6
0
Arquivo: ex1.c Projeto: petsc/petsc
static PetscErrorCode ISGetBlockGlobalIS(IS is, Vec vec, PetscInt bs, IS *isBlockGlobal)
{
  const PetscInt *idxin;
  PetscInt       *idxout, i, n, rstart;
  PetscLayout    map;
  PetscErrorCode ierr;

  PetscFunctionBegin;

  ierr = VecGetLayout(vec,&map);CHKERRQ(ierr);
  rstart = map->rstart / bs;
  ierr = ISGetLocalSize(is, &n);CHKERRQ(ierr);
  ierr = PetscMalloc1(n, &idxout);CHKERRQ(ierr);
  ierr = ISGetIndices(is, &idxin);CHKERRQ(ierr);
  for (i = 0; i < n; i++) idxout[i] = rstart + idxin[i];
  ierr = ISRestoreIndices(is, &idxin);CHKERRQ(ierr);
  ierr = ISCreateBlock(PetscObjectComm((PetscObject)vec),bs,n,idxout,PETSC_OWN_POINTER,isBlockGlobal);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 7
0
static PetscErrorCode ISGatherTotal_Private(IS is)
{
    PetscErrorCode ierr;
    PetscInt       i,n,N;
    const PetscInt *lindices;
    MPI_Comm       comm;
    PetscMPIInt    rank,size,*sizes = NULL,*offsets = NULL,nn;

    PetscFunctionBegin;
    PetscValidHeaderSpecific(is,IS_CLASSID,1);

    ierr = PetscObjectGetComm((PetscObject)is,&comm);
    CHKERRQ(ierr);
    ierr = MPI_Comm_size(comm,&size);
    CHKERRQ(ierr);
    ierr = MPI_Comm_rank(comm,&rank);
    CHKERRQ(ierr);
    ierr = ISGetLocalSize(is,&n);
    CHKERRQ(ierr);
    ierr = PetscMalloc2(size,&sizes,size,&offsets);
    CHKERRQ(ierr);

    ierr = PetscMPIIntCast(n,&nn);
    CHKERRQ(ierr);
    ierr = MPI_Allgather(&nn,1,MPI_INT,sizes,1,MPI_INT,comm);
    CHKERRQ(ierr);
    offsets[0] = 0;
    for (i=1; i<size; ++i) offsets[i] = offsets[i-1] + sizes[i-1];
    N = offsets[size-1] + sizes[size-1];

    ierr = PetscMalloc1(N,&(is->total));
    CHKERRQ(ierr);
    ierr = ISGetIndices(is,&lindices);
    CHKERRQ(ierr);
    ierr = MPI_Allgatherv((void*)lindices,nn,MPIU_INT,is->total,sizes,offsets,MPIU_INT,comm);
    CHKERRQ(ierr);
    ierr = ISRestoreIndices(is,&lindices);
    CHKERRQ(ierr);
    is->local_offset = offsets[rank];
    ierr = PetscFree2(sizes,offsets);
    CHKERRQ(ierr);
    PetscFunctionReturn(0);
}
Exemplo n.º 8
0
/*@
    ISLocalToGlobalMappingCreateIS - Creates a mapping between a local (0 to n)
    ordering and a global parallel ordering.

    Not collective

    Input Parameter:
.   is - index set containing the global numbers for each local

    Output Parameter:
.   mapping - new mapping data structure

    Level: advanced

    Concepts: mapping^local to global

.seealso: ISLocalToGlobalMappingDestroy(), ISLocalToGlobalMappingCreate()
@*/
PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingCreateIS(IS is,ISLocalToGlobalMapping *mapping)
{
  PetscErrorCode ierr;
  PetscInt       n;
  const PetscInt *indices;
  MPI_Comm       comm;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(is,IS_COOKIE,1);
  PetscValidPointer(mapping,2);

  ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
  ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
  ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingCreate(comm,n,indices,mapping);CHKERRQ(ierr);
  ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);

  PetscFunctionReturn(0);
}
Exemplo n.º 9
0
/*@
    ISPartitioningCount - Takes a ISPartitioning and determines the number of
    resulting elements on each (partition) process

    Collective on IS

    Input Parameters:
+   partitioning - a partitioning as generated by MatPartitioningApply()
-   len - length of the array count, this is the total number of partitions

    Output Parameter:
.   count - array of length size, to contain the number of elements assigned
        to each partition, where size is the number of partitions generated
         (see notes below).

   Level: advanced

    Notes:
        By default the number of partitions generated (and thus the length
        of count) is the size of the communicator associated with IS,
        but it can be set by MatPartitioningSetNParts. The resulting array
        of lengths can for instance serve as input of PCBJacobiSetTotalBlocks.


.seealso: MatPartitioningCreate(), AOCreateBasic(), ISPartitioningToNumbering(),
        MatPartitioningSetNParts(), MatPartitioningApply()

@*/
PetscErrorCode  ISPartitioningCount(IS part,PetscInt len,PetscInt count[])
{
  MPI_Comm       comm;
  PetscInt       i,n,*lsizes;
  const PetscInt *indices;
  PetscErrorCode ierr;
  PetscMPIInt    npp;

  PetscFunctionBegin;
  ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
  if (len == PETSC_DEFAULT) {
    PetscMPIInt size;
    ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
    len  = (PetscInt) size;
  }

  /* count the number of partitions */
  ierr = ISGetLocalSize(part,&n);CHKERRQ(ierr);
  ierr = ISGetIndices(part,&indices);CHKERRQ(ierr);
#if defined(PETSC_USE_DEBUG)
  {
    PetscInt np = 0,npt;
    for (i=0; i<n; i++) np = PetscMax(np,indices[i]);
    ierr = MPI_Allreduce(&np,&npt,1,MPIU_INT,MPI_MAX,comm);CHKERRQ(ierr);
    np   = npt+1; /* so that it looks like a MPI_Comm_size output */
    if (np > len) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Length of count array %D is less than number of partitions %D",len,np);
  }
#endif

  /*
        lsizes - number of elements of each partition on this particular processor
        sums - total number of "previous" nodes for any particular partition
        starts - global number of first element in each partition on this processor
  */
  ierr = PetscMalloc(len*sizeof(PetscInt),&lsizes);CHKERRQ(ierr);
  ierr = PetscMemzero(lsizes,len*sizeof(PetscInt));CHKERRQ(ierr);
  for (i=0; i<n; i++) lsizes[indices[i]]++;
  ierr = ISRestoreIndices(part,&indices);CHKERRQ(ierr);
  ierr = PetscMPIIntCast(len,&npp);CHKERRQ(ierr);
  ierr = MPI_Allreduce(lsizes,count,npp,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
  ierr = PetscFree(lsizes);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 10
0
static PetscErrorCode VecSetUp_NestIS_Private(Vec V,PetscInt nb,IS is[])
{
  Vec_Nest       *ctx = (Vec_Nest*)V->data;
  PetscInt       i,offset,m,n,M,N;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  if (is) {                     /* Do some consistency checks and reference the is */
    offset = V->map->rstart;
    for (i=0; i<ctx->nb; i++) {
      ierr = ISGetSize(is[i],&M);CHKERRQ(ierr);
      ierr = VecGetSize(ctx->v[i],&N);CHKERRQ(ierr);
      if (M != N) SETERRQ3(PetscObjectComm((PetscObject)V),PETSC_ERR_ARG_INCOMP,"In slot %D, IS of size %D is not compatible with Vec of size %D",i,M,N);
      ierr = ISGetLocalSize(is[i],&m);CHKERRQ(ierr);
      ierr = VecGetLocalSize(ctx->v[i],&n);CHKERRQ(ierr);
      if (m != n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"In slot %D, IS of local size %D is not compatible with Vec of local size %D",i,m,n);
#if defined(PETSC_USE_DEBUG)
      {                         /* This test can be expensive */
        PetscInt  start;
        PetscBool contiguous;
        ierr = ISContiguousLocal(is[i],offset,offset+n,&start,&contiguous);CHKERRQ(ierr);
        if (!contiguous) SETERRQ1(PetscObjectComm((PetscObject)V),PETSC_ERR_SUP,"Index set %D is not contiguous with layout of matching vector",i);
        if (start != 0) SETERRQ1(PetscObjectComm((PetscObject)V),PETSC_ERR_SUP,"Index set %D introduces overlap or a hole",i);
      }
#endif
      ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);
      ctx->is[i] = is[i];
      offset += n;
    }
  } else {                      /* Create a contiguous ISStride for each entry */
    offset = V->map->rstart;
    for (i=0; i<ctx->nb; i++) {
      PetscInt bs;
      ierr = VecGetLocalSize(ctx->v[i],&n);CHKERRQ(ierr);
      ierr = VecGetBlockSize(ctx->v[i],&bs);CHKERRQ(ierr);
      ierr = ISCreateStride(((PetscObject)ctx->v[i])->comm,n,offset,1,&ctx->is[i]);CHKERRQ(ierr);
      ierr = ISSetBlockSize(ctx->is[i],bs);CHKERRQ(ierr);
      offset += n;
    }
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 11
0
/*@
    ISAllGather - Given an index set (IS) on each processor, generates a large
    index set (same on each processor) by concatenating together each
    processors index set.

    Collective on IS

    Input Parameter:
.   is - the distributed index set

    Output Parameter:
.   isout - the concatenated index set (same on all processors)

    Notes:
    ISAllGather() is clearly not scalable for large index sets.

    The IS created on each processor must be created with a common
    communicator (e.g., PETSC_COMM_WORLD). If the index sets were created
    with PETSC_COMM_SELF, this routine will not work as expected, since
    each process will generate its own new IS that consists only of
    itself.

    The communicator for this new IS is PETSC_COMM_SELF

    Level: intermediate

    Concepts: gather^index sets
    Concepts: index sets^gathering to all processors
    Concepts: IS^gathering to all processors

.seealso: ISCreateGeneral(), ISCreateStride(), ISCreateBlock()
@*/
PetscErrorCode  ISAllGather(IS is,IS *isout)
{
  PetscErrorCode ierr;
  PetscInt       *indices,n,i,N,step,first;
  const PetscInt *lindices;
  MPI_Comm       comm;
  PetscMPIInt    size,*sizes = NULL,*offsets = NULL,nn;
  PetscBool      stride;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(is,IS_CLASSID,1);
  PetscValidPointer(isout,2);

  ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
  ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
  ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
  ierr = PetscObjectTypeCompare((PetscObject)is,ISSTRIDE,&stride);CHKERRQ(ierr);
  if (size == 1 && stride) { /* should handle parallel ISStride also */
    ierr = ISStrideGetInfo(is,&first,&step);CHKERRQ(ierr);
    ierr = ISCreateStride(PETSC_COMM_SELF,n,first,step,isout);CHKERRQ(ierr);
  } else {
    ierr = PetscMalloc2(size,&sizes,size,&offsets);CHKERRQ(ierr);

    ierr       = PetscMPIIntCast(n,&nn);CHKERRQ(ierr);
    ierr       = MPI_Allgather(&nn,1,MPI_INT,sizes,1,MPI_INT,comm);CHKERRQ(ierr);
    offsets[0] = 0;
    for (i=1; i<size; i++) {
      PetscInt s = offsets[i-1] + sizes[i-1];
      ierr = PetscMPIIntCast(s,&offsets[i]);CHKERRQ(ierr);
    }
    N = offsets[size-1] + sizes[size-1];

    ierr = PetscMalloc1(N,&indices);CHKERRQ(ierr);
    ierr = ISGetIndices(is,&lindices);CHKERRQ(ierr);
    ierr = MPI_Allgatherv((void*)lindices,nn,MPIU_INT,indices,sizes,offsets,MPIU_INT,comm);CHKERRQ(ierr);
    ierr = ISRestoreIndices(is,&lindices);CHKERRQ(ierr);
    ierr = PetscFree2(sizes,offsets);CHKERRQ(ierr);

    ierr = ISCreateGeneral(PETSC_COMM_SELF,N,indices,PETSC_OWN_POINTER,isout);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 12
0
Arquivo: ex2.c Projeto: 00liujj/petsc
int main(int argc,char **argv)
{
  PetscInt       i,n,start,stride;
  const PetscInt *ii;
  IS             is;
  PetscBool      flg;
  PetscErrorCode ierr;

  ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr);

  /*
     Test IS of size 0
  */
  ierr = ISCreateStride(PETSC_COMM_SELF,0,0,2,&is);CHKERRQ(ierr);
  ierr = ISGetSize(is,&n);CHKERRQ(ierr);
  if (n != 0) SETERRQ(PETSC_COMM_SELF,1,"ISCreateStride");
  ierr = ISStrideGetInfo(is,&start,&stride);CHKERRQ(ierr);
  if (start != 0) SETERRQ(PETSC_COMM_SELF,1,"ISStrideGetInfo");
  if (stride != 2) SETERRQ(PETSC_COMM_SELF,1,"ISStrideGetInfo");
  ierr = PetscObjectTypeCompare((PetscObject)is,ISSTRIDE,&flg);CHKERRQ(ierr);
  if (!flg) SETERRQ(PETSC_COMM_SELF,1,"ISStride");
  ierr = ISGetIndices(is,&ii);CHKERRQ(ierr);
  ierr = ISRestoreIndices(is,&ii);CHKERRQ(ierr);
  ierr = ISDestroy(&is);CHKERRQ(ierr);

  /*
     Test ISGetIndices()
  */
  ierr = ISCreateStride(PETSC_COMM_SELF,10000,-8,3,&is);CHKERRQ(ierr);
  ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
  ierr = ISGetIndices(is,&ii);CHKERRQ(ierr);
  for (i=0; i<10000; i++) {
    if (ii[i] != -8 + 3*i) SETERRQ(PETSC_COMM_SELF,1,"ISGetIndices");
  }
  ierr = ISRestoreIndices(is,&ii);CHKERRQ(ierr);
  ierr = ISDestroy(&is);CHKERRQ(ierr);

  ierr = PetscFinalize();
  return 0;
}
Exemplo n.º 13
0
/*@
    ISComplement - Given an index set (IS) generates the complement index set. That is all
       all indices that are NOT in the given set.

    Collective on IS

    Input Parameter:
+   is - the index set
.   nmin - the first index desired in the local part of the complement
-   nmax - the largest index desired in the local part of the complement (note that all indices in is must be greater or equal to nmin and less than nmax)

    Output Parameter:
.   isout - the complement

    Notes:  The communicator for this new IS is the same as for the input IS

      For a parallel IS, this will generate the local part of the complement on each process

      To generate the entire complement (on each process) of a parallel IS, first call ISAllGather() and then
    call this routine.

    Level: intermediate

    Concepts: gather^index sets
    Concepts: index sets^gathering to all processors
    Concepts: IS^gathering to all processors

.seealso: ISCreateGeneral(), ISCreateStride(), ISCreateBlock(), ISAllGather()
@*/
PetscErrorCode  ISComplement(IS is,PetscInt nmin,PetscInt nmax,IS *isout)
{
  PetscErrorCode ierr;
  const PetscInt *indices;
  PetscInt       n,i,j,unique,cnt,*nindices;
  PetscBool      sorted;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(is,IS_CLASSID,1);
  PetscValidPointer(isout,3);
  if (nmin < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nmin %D cannot be negative",nmin);
  if (nmin > nmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nmin %D cannot be greater than nmax %D",nmin,nmax);
  ierr = ISSorted(is,&sorted);CHKERRQ(ierr);
  if (!sorted) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Index set must be sorted");

  ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
  ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
#if defined(PETSC_USE_DEBUG)
  for (i=0; i<n; i++) {
    if (indices[i] <  nmin) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D's value %D is smaller than minimum given %D",i,indices[i],nmin);
    if (indices[i] >= nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D's value %D is larger than maximum given %D",i,indices[i],nmax);
  }
#endif
  /* Count number of unique entries */
  unique = (n>0);
  for (i=0; i<n-1; i++) {
    if (indices[i+1] != indices[i]) unique++;
  }
  ierr = PetscMalloc1(nmax-nmin-unique,&nindices);CHKERRQ(ierr);
  cnt  = 0;
  for (i=nmin,j=0; i<nmax; i++) {
    if (j<n && i==indices[j]) do { j++; } while (j<n && i==indices[j]);
    else nindices[cnt++] = i;
  }
  if (cnt != nmax-nmin-unique) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Number of entries found in complement %D does not match expected %D",cnt,nmax-nmin-unique);
  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),cnt,nindices,PETSC_OWN_POINTER,isout);CHKERRQ(ierr);
  ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 14
0
/* Compose an IS with an ISLocalToGlobalMapping to map from IS source indices to global indices */
static PetscErrorCode ISL2GCompose(IS is,ISLocalToGlobalMapping ltog,ISLocalToGlobalMapping *cltog)
{
  PetscErrorCode ierr;
  const PetscInt *idx;
  PetscInt       m,*idxm;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(is,IS_CLASSID,1);
  PetscValidHeaderSpecific(ltog,IS_LTOGM_CLASSID,2);
  PetscValidPointer(cltog,3);
  ierr = ISGetLocalSize(is,&m);CHKERRQ(ierr);
  ierr = ISGetIndices(is,&idx);CHKERRQ(ierr);
  ierr = PetscMalloc1(m,&idxm);CHKERRQ(ierr);
  if (ltog) {
    ierr = ISLocalToGlobalMappingApply(ltog,m,idx,idxm);CHKERRQ(ierr);
  } else {
    ierr = PetscMemcpy(idxm,idx,m*sizeof(PetscInt));CHKERRQ(ierr);
  }
  ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)is),m,idxm,PETSC_OWN_POINTER,cltog);CHKERRQ(ierr);
  ierr = ISRestoreIndices(is,&idx);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 15
0
static PetscErrorCode ISToGeneral_Block(IS inis)
{
  IS_Block       *sub   = (IS_Block*)inis->data;
  PetscInt       bs,n;
  const PetscInt *idx;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = ISGetBlockSize(inis,&bs);CHKERRQ(ierr);
  ierr = ISGetLocalSize(inis,&n);CHKERRQ(ierr);
  ierr = ISGetIndices(inis,&idx);CHKERRQ(ierr);
  if (bs == 1) {
    PetscCopyMode mode = sub->allocated ? PETSC_OWN_POINTER : PETSC_USE_POINTER;
    sub->allocated = PETSC_FALSE; /* prevent deallocation when changing the subtype*/
    ierr = ISSetType(inis,ISGENERAL);CHKERRQ(ierr);
    ierr = ISGeneralSetIndices(inis,n,idx,mode);CHKERRQ(ierr);
  } else {
    ierr = ISSetType(inis,ISGENERAL);CHKERRQ(ierr);
    ierr = ISGeneralSetIndices(inis,n,idx,PETSC_OWN_POINTER);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 16
0
PetscErrorCode DMLabelPermute(DMLabel label, IS permutation, DMLabel *labelNew)
{
  const PetscInt *perm;
  PetscInt        numValues, numPoints, v, q;
  PetscErrorCode  ierr;

  PetscFunctionBegin;
  ierr = DMLabelMakeAllValid_Private(label);CHKERRQ(ierr);
  ierr = DMLabelDuplicate(label, labelNew);CHKERRQ(ierr);
  ierr = DMLabelGetNumValues(*labelNew, &numValues);CHKERRQ(ierr);
  ierr = ISGetLocalSize(permutation, &numPoints);CHKERRQ(ierr);
  ierr = ISGetIndices(permutation, &perm);CHKERRQ(ierr);
  for (v = 0; v < numValues; ++v) {
    const PetscInt size   = (*labelNew)->stratumSizes[v];
    const PetscInt *points;
    PetscInt *pointsNew;

    ierr = ISGetIndices((*labelNew)->points[v],&points);CHKERRQ(ierr);
    ierr = PetscMalloc1(size,&pointsNew);CHKERRQ(ierr);
    for (q = 0; q < size; ++q) {
      const PetscInt point = points[q];

      if ((point < 0) || (point >= numPoints)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Label point %D is not in [0, %D) for the remapping", point, numPoints);
      pointsNew[q] = perm[point];
    }
    ierr = ISRestoreIndices((*labelNew)->points[v],&points);CHKERRQ(ierr);
    ierr = PetscSortInt(size, pointsNew);CHKERRQ(ierr);
    ierr = ISDestroy(&((*labelNew)->points[v]));CHKERRQ(ierr);
    ierr = ISCreateGeneral(PETSC_COMM_SELF,size,pointsNew,PETSC_OWN_POINTER,&((*labelNew)->points[v]));CHKERRQ(ierr);
    ierr = PetscObjectSetName((PetscObject) ((*labelNew)->points[v]), "indices");CHKERRQ(ierr);
  }
  ierr = ISRestoreIndices(permutation, &perm);CHKERRQ(ierr);
  if (label->bt) {
    ierr = PetscBTDestroy(&label->bt);CHKERRQ(ierr);
    ierr = DMLabelCreateIndex(label, label->pStart, label->pEnd);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 17
0
/*@
   ISGetNonlocalIS - Gather all nonlocal indices for this IS and present
                     them as another sequential index set.


   Collective on IS

   Input Parameter:
.  is - the index set

   Output Parameter:
.  complement - sequential IS with indices identical to the result of
                ISGetNonlocalIndices()

   Level: intermediate

   Notes: complement represents the result of ISGetNonlocalIndices as an IS.
          Therefore scalability issues similar to ISGetNonlocalIndices apply.
          The resulting IS must be restored using ISRestoreNonlocalIS().

   Concepts: index sets^getting nonlocal indices
.seealso: ISGetNonlocalIndices(), ISRestoreNonlocalIndices(),  ISAllGather(), ISGetSize()
@*/
PetscErrorCode  ISGetNonlocalIS(IS is, IS *complement)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(is,IS_CLASSID,1);
  PetscValidPointer(complement,2);
  /* Check if the complement exists already. */
  if (is->complement) {
    *complement = is->complement;
    ierr = PetscObjectReference((PetscObject)(is->complement));CHKERRQ(ierr);
  } else {
    PetscInt       N, n;
    const PetscInt *idx;
    ierr = ISGetSize(is, &N);CHKERRQ(ierr);
    ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
    ierr = ISGetNonlocalIndices(is, &idx);CHKERRQ(ierr);
    ierr = ISCreateGeneral(PETSC_COMM_SELF, N-n,idx, PETSC_USE_POINTER, &(is->complement));CHKERRQ(ierr);
    ierr = PetscObjectReference((PetscObject)is->complement);CHKERRQ(ierr);
    *complement = is->complement;
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 18
0
/*@C
   ISGetNonlocalIndices - Retrieve an array of indices from remote processors
                       in this communicator.

   Collective on IS

   Input Parameter:
.  is - the index set

   Output Parameter:
.  indices - indices with rank 0 indices first, and so on,  omitting
             the current rank.  Total number of indices is the difference
             total and local, obtained with ISGetSize() and ISGetLocalSize(),
             respectively.

   Level: intermediate

   Notes: restore the indices using ISRestoreNonlocalIndices().
          The same scalability considerations as those for ISGetTotalIndices
          apply here.

   Concepts: index sets^getting nonlocal indices
.seealso: ISGetTotalIndices(), ISRestoreNonlocalIndices(), ISGetSize(), ISGetLocalSize().
@*/
PetscErrorCode  ISGetNonlocalIndices(IS is, const PetscInt *indices[])
{
  PetscErrorCode ierr;
  PetscMPIInt    size;
  PetscInt       n, N;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(is,IS_CLASSID,1);
  PetscValidPointer(indices,2);
  ierr = MPI_Comm_size(PetscObjectComm((PetscObject)is), &size);CHKERRQ(ierr);
  if (size == 1) *indices = NULL;
  else {
    if (!is->total) {
      ierr = ISGatherTotal_Private(is);CHKERRQ(ierr);
    }
    ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
    ierr = ISGetSize(is,&N);CHKERRQ(ierr);
    ierr = PetscMalloc(sizeof(PetscInt)*(N-n), &(is->nonlocal));CHKERRQ(ierr);
    ierr = PetscMemcpy(is->nonlocal, is->total, sizeof(PetscInt)*is->local_offset);CHKERRQ(ierr);
    ierr = PetscMemcpy(is->nonlocal+is->local_offset, is->total+is->local_offset+n, sizeof(PetscInt)*(N - is->local_offset - n));CHKERRQ(ierr);
    *indices = is->nonlocal;
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 19
0
/*@
   VecISSet - Sets the elements of a vector, specified by an index set, to a constant

   Input Parameter:
+  V - the vector
.  S -  the locations in the vector
-  c - the constant

.seealso VecSet()

   Level: advanced
@*/
PetscErrorCode VecISSet(Vec V,IS S, PetscScalar c)
{
  PetscErrorCode ierr;
  PetscInt       nloc,low,high,i;
  const PetscInt *s;
  PetscScalar    *v;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(V,VEC_CLASSID,1);
  PetscValidHeaderSpecific(S,IS_CLASSID,2);
  PetscValidType(V,3);
  PetscCheckSameComm(V,3,S,1);

  ierr = VecGetOwnershipRange(V, &low, &high);CHKERRQ(ierr);
  ierr = ISGetLocalSize(S,&nloc);CHKERRQ(ierr);
  ierr = ISGetIndices(S, &s);CHKERRQ(ierr);
  ierr = VecGetArray(V,&v);CHKERRQ(ierr);
  for (i=0; i<nloc; i++){
    v[s[i]-low] = c;
  }
  ierr = ISRestoreIndices(S, &s);CHKERRQ(ierr);
  ierr = VecRestoreArray(V,&v);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 20
0
PetscErrorCode MatPartitioningHierarchical_AssembleSubdomain(Mat adj,IS destination,Mat *sadj, ISLocalToGlobalMapping *mapping)
{
  IS              irows,icols;
  PetscInt        irows_ln;
  PetscMPIInt     rank;
  const PetscInt *irows_indices;
  MPI_Comm        comm;
  PetscErrorCode  ierr;

  PetscFunctionBegin;
  ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
  /* figure out where data comes from  */
  ierr = ISBuildTwoSided(destination,NULL,&irows);CHKERRQ(ierr);
  ierr = ISDuplicate(irows,&icols);CHKERRQ(ierr);
  ierr = ISGetLocalSize(irows,&irows_ln);CHKERRQ(ierr);
  ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingCreate(comm,1,irows_ln,irows_indices,PETSC_COPY_VALUES,mapping);CHKERRQ(ierr);
  ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
  ierr = MatGetSubMatrices(adj,1,&irows,&icols,MAT_INITIAL_MATRIX,&sadj);CHKERRQ(ierr);
  ierr = ISDestroy(&irows);CHKERRQ(ierr);
  ierr = ISDestroy(&icols);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 21
0
/*@
    ISLocalToGlobalMappingApplyIS - Creates from an IS in the local numbering
    a new index set using the global numbering defined in an ISLocalToGlobalMapping
    context.

    Not collective

    Input Parameters:
+   mapping - mapping between local and global numbering
-   is - index set in local numbering

    Output Parameters:
.   newis - index set in global numbering

    Level: advanced

    Concepts: mapping^local to global

.seealso: ISLocalToGlobalMappingApply(), ISLocalToGlobalMappingCreate(),
          ISLocalToGlobalMappingDestroy(), ISGlobalToLocalMappingApply()
@*/
PetscErrorCode PETSCVEC_DLLEXPORT ISLocalToGlobalMappingApplyIS(ISLocalToGlobalMapping mapping,IS is,IS *newis)
{
  PetscErrorCode ierr;
  PetscInt       n,i,*idxmap,*idxout,Nmax = mapping->n;
  const PetscInt *idxin;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(mapping,IS_LTOGM_COOKIE,1);
  PetscValidHeaderSpecific(is,IS_COOKIE,2);
  PetscValidPointer(newis,3);

  ierr   = ISGetLocalSize(is,&n);CHKERRQ(ierr);
  ierr   = ISGetIndices(is,&idxin);CHKERRQ(ierr);
  idxmap = mapping->indices;
  
  ierr = PetscMalloc(n*sizeof(PetscInt),&idxout);CHKERRQ(ierr);
  for (i=0; i<n; i++) {
    if (idxin[i] >= Nmax) SETERRQ3(PETSC_ERR_ARG_OUTOFRANGE,"Local index %d too large %d (max) at %d",idxin[i],Nmax-1,i);
    idxout[i] = idxmap[idxin[i]];
  }
  ierr = ISRestoreIndices(is,&idxin);CHKERRQ(ierr);
  ierr = ISCreateGeneralNC(PETSC_COMM_SELF,n,idxout,newis);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 22
0
PetscErrorCode ISView_General_HDF5(IS is, PetscViewer viewer)
{
  hid_t           filespace;  /* file dataspace identifier */
  hid_t           chunkspace; /* chunk dataset property identifier */
  hid_t           plist_id;   /* property list identifier */
  hid_t           dset_id;    /* dataset identifier */
  hid_t           memspace;   /* memory dataspace identifier */
  hid_t           inttype;    /* int type (H5T_NATIVE_INT or H5T_NATIVE_LLONG) */
  hid_t           file_id, group;
  herr_t          status;
  hsize_t         dim, maxDims[3], dims[3], chunkDims[3], count[3],offset[3];
  PetscInt        bs, N, n, timestep, low;
  const PetscInt *ind;
  const char     *isname;
  PetscErrorCode  ierr;

  PetscFunctionBegin;
  ierr = ISGetBlockSize(is,&bs);CHKERRQ(ierr);
  ierr = PetscViewerHDF5OpenGroup(viewer, &file_id, &group);CHKERRQ(ierr);
  ierr = PetscViewerHDF5GetTimestep(viewer, &timestep);CHKERRQ(ierr);

  /* Create the dataspace for the dataset.
   *
   * dims - holds the current dimensions of the dataset
   *
   * maxDims - holds the maximum dimensions of the dataset (unlimited
   * for the number of time steps with the current dimensions for the
   * other dimensions; so only additional time steps can be added).
   *
   * chunkDims - holds the size of a single time step (required to
   * permit extending dataset).
   */
  dim = 0;
  if (timestep >= 0) {
    dims[dim]      = timestep+1;
    maxDims[dim]   = H5S_UNLIMITED;
    chunkDims[dim] = 1;
    ++dim;
  }
  ierr = ISGetSize(is, &N);CHKERRQ(ierr);
  ierr = ISGetLocalSize(is, &n);CHKERRQ(ierr);
  ierr = PetscHDF5IntCast(N/bs,dims + dim);CHKERRQ(ierr);

  maxDims[dim]   = dims[dim];
  chunkDims[dim] = dims[dim];
  ++dim;
  if (bs >= 1) {
    dims[dim]      = bs;
    maxDims[dim]   = dims[dim];
    chunkDims[dim] = dims[dim];
    ++dim;
  }
  filespace = H5Screate_simple(dim, dims, maxDims);
  if (filespace == -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot H5Screate_simple()");

#if defined(PETSC_USE_64BIT_INDICES)
  inttype = H5T_NATIVE_LLONG;
#else
  inttype = H5T_NATIVE_INT;
#endif

  /* Create the dataset with default properties and close filespace */
  ierr = PetscObjectGetName((PetscObject) is, &isname);CHKERRQ(ierr);
  if (!H5Lexists(group, isname, H5P_DEFAULT)) {
    /* Create chunk */
    chunkspace = H5Pcreate(H5P_DATASET_CREATE);
    if (chunkspace == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Pcreate()");
    status = H5Pset_chunk(chunkspace, dim, chunkDims);CHKERRQ(status);

#if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
    dset_id = H5Dcreate2(group, isname, inttype, filespace, H5P_DEFAULT, chunkspace, H5P_DEFAULT);
#else
    dset_id = H5Dcreate(group, isname, inttype, filespace, H5P_DEFAULT);
#endif
    if (dset_id == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Dcreate2()");
    status = H5Pclose(chunkspace);CHKERRQ(status);
  } else {
    dset_id = H5Dopen2(group, isname, H5P_DEFAULT);
    status  = H5Dset_extent(dset_id, dims);CHKERRQ(status);
  }
  status = H5Sclose(filespace);CHKERRQ(status);

  /* Each process defines a dataset and writes it to the hyperslab in the file */
  dim = 0;
  if (timestep >= 0) {
    count[dim] = 1;
    ++dim;
  }
  ierr = PetscHDF5IntCast(n/bs,count + dim);CHKERRQ(ierr);
  ++dim;
  if (bs >= 1) {
    count[dim] = bs;
    ++dim;
  }
  if (n > 0) {
    memspace = H5Screate_simple(dim, count, NULL);
    if (memspace == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Screate_simple()");
  } else {
    /* Can't create dataspace with zero for any dimension, so create null dataspace. */
    memspace = H5Screate(H5S_NULL);
    if (memspace == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Screate()");
  }

  /* Select hyperslab in the file */
  ierr = PetscLayoutGetRange(is->map, &low, NULL);CHKERRQ(ierr);
  dim  = 0;
  if (timestep >= 0) {
    offset[dim] = timestep;
    ++dim;
  }
  ierr = PetscHDF5IntCast(low/bs,offset + dim);CHKERRQ(ierr);
  ++dim;
  if (bs >= 1) {
    offset[dim] = 0;
    ++dim;
  }
  if (n > 0) {
    filespace = H5Dget_space(dset_id);
    if (filespace == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Dget_space()");
    status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);
  } else {
    /* Create null filespace to match null memspace. */
    filespace = H5Screate(H5S_NULL);
    if (filespace == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Screate(H5S_NULL)");
  }

  /* Create property list for collective dataset write */
  plist_id = H5Pcreate(H5P_DATASET_XFER);
  if (plist_id == -1) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_LIB, "Cannot H5Pcreate()");
#if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
  status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
#endif
  /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

  ierr   = ISGetIndices(is, &ind);CHKERRQ(ierr);
  status = H5Dwrite(dset_id, inttype, memspace, filespace, plist_id, ind);CHKERRQ(status);
  status = H5Fflush(file_id, H5F_SCOPE_GLOBAL);CHKERRQ(status);
  ierr   = ISGetIndices(is, &ind);CHKERRQ(ierr);

  /* Close/release resources */
  if (group != file_id) {status = H5Gclose(group);CHKERRQ(status);}
  status = H5Pclose(plist_id);CHKERRQ(status);
  status = H5Sclose(filespace);CHKERRQ(status);
  status = H5Sclose(memspace);CHKERRQ(status);
  status = H5Dclose(dset_id);CHKERRQ(status);
  ierr = PetscInfo1(is, "Wrote IS object with name %s\n", isname);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 23
0
/*@
  DMPlexGetOrdering - Calculate a reordering of the mesh

  Collective on DM

  Input Parameter:
+ dm - The DMPlex object
. otype - type of reordering, one of the following:
$     MATORDERINGNATURAL - Natural
$     MATORDERINGND - Nested Dissection
$     MATORDERING1WD - One-way Dissection
$     MATORDERINGRCM - Reverse Cuthill-McKee
$     MATORDERINGQMD - Quotient Minimum Degree
- label - [Optional] Label used to segregate ordering into sets, or NULL


  Output Parameter:
. perm - The point permutation as an IS, perm[old point number] = new point number

  Note: The label is used to group sets of points together by label value. This makes it easy to reorder a mesh which
  has different types of cells, and then loop over each set of reordered cells for assembly.

  Level: intermediate

.keywords: mesh
.seealso: MatGetOrdering()
@*/
PetscErrorCode DMPlexGetOrdering(DM dm, MatOrderingType otype, DMLabel label, IS *perm)
{
  PetscInt       numCells = 0;
  PetscInt      *start = NULL, *adjacency = NULL, *cperm, *clperm, *invclperm, *mask, *xls, pStart, pEnd, c, i;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
  PetscValidPointer(perm, 3);
  ierr = DMPlexCreateNeighborCSR(dm, 0, &numCells, &start, &adjacency);CHKERRQ(ierr);
  ierr = PetscMalloc3(numCells,&cperm,numCells,&mask,numCells*2,&xls);CHKERRQ(ierr);
  if (numCells) {
    /* Shift for Fortran numbering */
    for (i = 0; i < start[numCells]; ++i) ++adjacency[i];
    for (i = 0; i <= numCells; ++i)       ++start[i];
    ierr = SPARSEPACKgenrcm(&numCells, start, adjacency, cperm, mask, xls);CHKERRQ(ierr);
  }
  ierr = PetscFree(start);CHKERRQ(ierr);
  ierr = PetscFree(adjacency);CHKERRQ(ierr);
  /* Shift for Fortran numbering */
  for (c = 0; c < numCells; ++c) --cperm[c];
  /* Segregate */
  if (label) {
    IS              valueIS;
    const PetscInt *values;
    PetscInt        numValues, numPoints = 0;
    PetscInt       *sperm, *vsize, *voff, v;

    ierr = DMLabelGetValueIS(label, &valueIS);CHKERRQ(ierr);
    ierr = ISSort(valueIS);CHKERRQ(ierr);
    ierr = ISGetLocalSize(valueIS, &numValues);CHKERRQ(ierr);
    ierr = ISGetIndices(valueIS, &values);CHKERRQ(ierr);
    ierr = PetscCalloc3(numCells,&sperm,numValues,&vsize,numValues+1,&voff);CHKERRQ(ierr);
    for (v = 0; v < numValues; ++v) {
      ierr = DMLabelGetStratumSize(label, values[v], &vsize[v]);CHKERRQ(ierr);
      if (v < numValues-1) voff[v+2] += vsize[v] + voff[v+1];
      numPoints += vsize[v];
    }
    if (numPoints != numCells) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Label only covers %D cells < %D total", numPoints, numCells);
    for (c = 0; c < numCells; ++c) {
      const PetscInt oldc = cperm[c];
      PetscInt       val, vloc;

      ierr = DMLabelGetValue(label, oldc, &val);CHKERRQ(ierr);
      if (val == -1) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Cell %D not present in label", oldc);
      ierr = PetscFindInt(val, numValues, values, &vloc);CHKERRQ(ierr);
      if (vloc < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Value %D not present label", val);
      sperm[voff[vloc+1]++] = oldc;
    }
    for (v = 0; v < numValues; ++v) {
      if (voff[v+1] - voff[v] != vsize[v]) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_PLIB, "Number of %D values found is %D != %D", values[v], voff[v+1] - voff[v], vsize[v]);
    }
    ierr = ISRestoreIndices(valueIS, &values);CHKERRQ(ierr);
    ierr = ISDestroy(&valueIS);CHKERRQ(ierr);
    ierr = PetscMemcpy(cperm, sperm, numCells * sizeof(PetscInt));CHKERRQ(ierr);
    ierr = PetscFree3(sperm, vsize, voff);CHKERRQ(ierr);
  }
  /* Construct closure */
  ierr = DMPlexCreateOrderingClosure_Static(dm, numCells, cperm, &clperm, &invclperm);CHKERRQ(ierr);
  ierr = PetscFree3(cperm,mask,xls);CHKERRQ(ierr);
  ierr = PetscFree(clperm);CHKERRQ(ierr);
  /* Invert permutation */
  ierr = DMPlexGetChart(dm, &pStart, &pEnd);CHKERRQ(ierr);
  ierr = ISCreateGeneral(PetscObjectComm((PetscObject) dm), pEnd-pStart, invclperm, PETSC_OWN_POINTER, perm);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 24
0
static PetscErrorCode DMPlexVTKWriteAll_ASCII(DM dm, PetscViewer viewer)
{
  MPI_Comm                 comm;
  PetscViewer_VTK          *vtk = (PetscViewer_VTK*) viewer->data;
  FILE                     *fp;
  PetscViewerVTKObjectLink link;
  PetscSection             coordSection, globalCoordSection;
  PetscLayout              vLayout;
  Vec                      coordinates;
  PetscReal                lengthScale;
  PetscInt                 vMax, totVertices, totCells;
  PetscBool                hasPoint = PETSC_FALSE, hasCell = PETSC_FALSE, writePartition = PETSC_FALSE;
  PetscErrorCode           ierr;

  PetscFunctionBegin;
  ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
  ierr = PetscFOpen(comm, vtk->filename, "wb", &fp);CHKERRQ(ierr);
  ierr = PetscFPrintf(comm, fp, "# vtk DataFile Version 2.0\n");CHKERRQ(ierr);
  ierr = PetscFPrintf(comm, fp, "Simplicial Mesh Example\n");CHKERRQ(ierr);
  ierr = PetscFPrintf(comm, fp, "ASCII\n");CHKERRQ(ierr);
  ierr = PetscFPrintf(comm, fp, "DATASET UNSTRUCTURED_GRID\n");CHKERRQ(ierr);
  /* Vertices */
  ierr = DMPlexGetScale(dm, PETSC_UNIT_LENGTH, &lengthScale);CHKERRQ(ierr);
  ierr = DMGetCoordinateSection(dm, &coordSection);CHKERRQ(ierr);
  ierr = PetscSectionCreateGlobalSection(coordSection, dm->sf, PETSC_FALSE, PETSC_FALSE, &globalCoordSection);CHKERRQ(ierr);
  ierr = DMGetCoordinatesLocal(dm, &coordinates);CHKERRQ(ierr);
  ierr = DMPlexGetHybridBounds(dm, NULL, NULL, NULL, &vMax);CHKERRQ(ierr);
  if (vMax >= 0) {
    PetscInt pStart, pEnd, p, localSize = 0;

    ierr = PetscSectionGetChart(globalCoordSection, &pStart, &pEnd);CHKERRQ(ierr);
    pEnd = PetscMin(pEnd, vMax);
    for (p = pStart; p < pEnd; ++p) {
      PetscInt dof;

      ierr = PetscSectionGetDof(globalCoordSection, p, &dof);CHKERRQ(ierr);
      if (dof > 0) ++localSize;
    }
    ierr = PetscLayoutCreate(PetscObjectComm((PetscObject)dm), &vLayout);CHKERRQ(ierr);
    ierr = PetscLayoutSetLocalSize(vLayout, localSize);CHKERRQ(ierr);
    ierr = PetscLayoutSetBlockSize(vLayout, 1);CHKERRQ(ierr);
    ierr = PetscLayoutSetUp(vLayout);CHKERRQ(ierr);
  } else {
    ierr = PetscSectionGetPointLayout(PetscObjectComm((PetscObject)dm), globalCoordSection, &vLayout);CHKERRQ(ierr);
  }
  ierr = PetscLayoutGetSize(vLayout, &totVertices);CHKERRQ(ierr);
  ierr = PetscFPrintf(comm, fp, "POINTS %d double\n", totVertices);CHKERRQ(ierr);
  ierr = DMPlexVTKWriteSection_ASCII(dm, coordSection, globalCoordSection, coordinates, fp, 3, PETSC_DETERMINE, lengthScale);CHKERRQ(ierr);
  /* Cells */
  ierr = DMPlexVTKWriteCells_ASCII(dm, fp, &totCells);CHKERRQ(ierr);
  /* Vertex fields */
  for (link = vtk->link; link; link = link->next) {
    if ((link->ft == PETSC_VTK_POINT_FIELD) || (link->ft == PETSC_VTK_POINT_VECTOR_FIELD)) hasPoint = PETSC_TRUE;
    if ((link->ft == PETSC_VTK_CELL_FIELD)  || (link->ft == PETSC_VTK_CELL_VECTOR_FIELD))  hasCell  = PETSC_TRUE;
  }
  if (hasPoint) {
    ierr = PetscFPrintf(comm, fp, "POINT_DATA %d\n", totVertices);CHKERRQ(ierr);
    for (link = vtk->link; link; link = link->next) {
      Vec          X = (Vec) link->vec;
      DM           dmX;
      PetscSection section, globalSection, newSection = NULL;
      const char   *name;
      PetscInt     enforceDof = PETSC_DETERMINE;

      if ((link->ft != PETSC_VTK_POINT_FIELD) && (link->ft != PETSC_VTK_POINT_VECTOR_FIELD)) continue;
      if (link->ft == PETSC_VTK_POINT_VECTOR_FIELD) enforceDof = 3;
      ierr = PetscObjectGetName(link->vec, &name);CHKERRQ(ierr);
      ierr = VecGetDM(X, &dmX);CHKERRQ(ierr);
      if (dmX) {
        DMLabel  subpointMap, subpointMapX;
        PetscInt dim, dimX, pStart, pEnd, qStart, qEnd;

        ierr = DMGetDefaultSection(dmX, &section);CHKERRQ(ierr);
        /* Here is where we check whether dmX is a submesh of dm */
        ierr = DMGetDimension(dm,  &dim);CHKERRQ(ierr);
        ierr = DMGetDimension(dmX, &dimX);CHKERRQ(ierr);
        ierr = DMPlexGetChart(dm,  &pStart, &pEnd);CHKERRQ(ierr);
        ierr = DMPlexGetChart(dmX, &qStart, &qEnd);CHKERRQ(ierr);
        ierr = DMPlexGetSubpointMap(dm,  &subpointMap);CHKERRQ(ierr);
        ierr = DMPlexGetSubpointMap(dmX, &subpointMapX);CHKERRQ(ierr);
        if (((dim != dimX) || ((pEnd-pStart) < (qEnd-qStart))) && subpointMap && !subpointMapX) {
          const PetscInt *ind = NULL;
          IS              subpointIS;
          PetscInt        n = 0, q;

          ierr = PetscSectionGetChart(section, &qStart, &qEnd);CHKERRQ(ierr);
          ierr = DMPlexCreateSubpointIS(dm, &subpointIS);CHKERRQ(ierr);
          if (subpointIS) {
            ierr = ISGetLocalSize(subpointIS, &n);CHKERRQ(ierr);
            ierr = ISGetIndices(subpointIS, &ind);CHKERRQ(ierr);
          }
          ierr = PetscSectionCreate(comm, &newSection);CHKERRQ(ierr);
          ierr = PetscSectionSetChart(newSection, pStart, pEnd);CHKERRQ(ierr);
          for (q = qStart; q < qEnd; ++q) {
            PetscInt dof, off, p;

            ierr = PetscSectionGetDof(section, q, &dof);CHKERRQ(ierr);
            if (dof) {
              ierr = PetscFindInt(q, n, ind, &p);CHKERRQ(ierr);
              if (p >= pStart) {
                ierr = PetscSectionSetDof(newSection, p, dof);CHKERRQ(ierr);
                ierr = PetscSectionGetOffset(section, q, &off);CHKERRQ(ierr);
                ierr = PetscSectionSetOffset(newSection, p, off);CHKERRQ(ierr);
              }
            }
          }
          if (subpointIS) {
            ierr = ISRestoreIndices(subpointIS, &ind);CHKERRQ(ierr);
            ierr = ISDestroy(&subpointIS);CHKERRQ(ierr);
          }
          /* No need to setup section */
          section = newSection;
        }
      } else {
        ierr = PetscObjectQuery(link->vec, "section", (PetscObject*) &section);CHKERRQ(ierr);
        if (!section) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Vector %s had no PetscSection composed with it", name);
      }
      if (!section) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Vector %s had no PetscSection composed with it", name);
      ierr = PetscSectionCreateGlobalSection(section, dm->sf, PETSC_FALSE, PETSC_FALSE, &globalSection);CHKERRQ(ierr);
      ierr = DMPlexVTKWriteField_ASCII(dm, section, globalSection, X, name, fp, enforceDof, PETSC_DETERMINE, 1.0);CHKERRQ(ierr);
      ierr = PetscSectionDestroy(&globalSection);CHKERRQ(ierr);
      if (newSection) {ierr = PetscSectionDestroy(&newSection);CHKERRQ(ierr);}
    }
  }
  /* Cell Fields */
  ierr = PetscOptionsGetBool(((PetscObject) dm)->prefix, "-dm_view_partition", &writePartition, NULL);CHKERRQ(ierr);
  if (hasCell || writePartition) {
    ierr = PetscFPrintf(comm, fp, "CELL_DATA %d\n", totCells);CHKERRQ(ierr);
    for (link = vtk->link; link; link = link->next) {
      Vec          X = (Vec) link->vec;
      DM           dmX;
      PetscSection section, globalSection;
      const char   *name;
      PetscInt     enforceDof = PETSC_DETERMINE;

      if ((link->ft != PETSC_VTK_CELL_FIELD) && (link->ft != PETSC_VTK_CELL_VECTOR_FIELD)) continue;
      if (link->ft == PETSC_VTK_CELL_VECTOR_FIELD) enforceDof = 3;
      ierr = PetscObjectGetName(link->vec, &name);CHKERRQ(ierr);
      ierr = VecGetDM(X, &dmX);CHKERRQ(ierr);
      if (dmX) {
        ierr = DMGetDefaultSection(dmX, &section);CHKERRQ(ierr);
      } else {
        PetscContainer c;

        ierr = PetscObjectQuery(link->vec, "section", (PetscObject*) &c);CHKERRQ(ierr);
        if (!c) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Vector %s had no PetscSection composed with it", name);
        ierr = PetscContainerGetPointer(c, (void**) &section);CHKERRQ(ierr);
      }
      if (!section) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Vector %s had no PetscSection composed with it", name);
      ierr = PetscSectionCreateGlobalSection(section, dm->sf, PETSC_FALSE, PETSC_FALSE, &globalSection);CHKERRQ(ierr);
      ierr = DMPlexVTKWriteField_ASCII(dm, section, globalSection, X, name, fp, enforceDof, PETSC_DETERMINE, 1.0);CHKERRQ(ierr);
      ierr = PetscSectionDestroy(&globalSection);CHKERRQ(ierr);
    }
    if (writePartition) {
      ierr = PetscFPrintf(comm, fp, "SCALARS partition int 1\n");CHKERRQ(ierr);
      ierr = PetscFPrintf(comm, fp, "LOOKUP_TABLE default\n");CHKERRQ(ierr);
      ierr = DMPlexVTKWritePartition_ASCII(dm, fp);CHKERRQ(ierr);
    }
  }
  /* Cleanup */
  ierr = PetscSectionDestroy(&globalCoordSection);CHKERRQ(ierr);
  ierr = PetscLayoutDestroy(&vLayout);CHKERRQ(ierr);
  ierr = PetscFClose(comm, fp);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 25
0
PETSC_EXTERN PetscErrorCode AOCreate_MemoryScalable(AO ao)
{
    PetscErrorCode    ierr;
    IS                isapp=ao->isapp,ispetsc=ao->ispetsc;
    const PetscInt    *mypetsc,*myapp;
    PetscInt          napp,n_local,N,i,start,*petsc,*lens,*disp;
    MPI_Comm          comm;
    AO_MemoryScalable *aomems;
    PetscLayout       map;
    PetscMPIInt       size,rank;

    PetscFunctionBegin;
    /* create special struct aomems */
    ierr     = PetscNewLog(ao, AO_MemoryScalable, &aomems);
    CHKERRQ(ierr);
    ao->data = (void*) aomems;
    ierr     = PetscMemcpy(ao->ops,&AOOps_MemoryScalable,sizeof(struct _AOOps));
    CHKERRQ(ierr);
    ierr     = PetscObjectChangeTypeName((PetscObject)ao,AOMEMORYSCALABLE);
    CHKERRQ(ierr);

    /* transmit all local lengths of isapp to all processors */
    ierr = PetscObjectGetComm((PetscObject)isapp,&comm);
    CHKERRQ(ierr);
    ierr = MPI_Comm_size(comm, &size);
    CHKERRQ(ierr);
    ierr = MPI_Comm_rank(comm, &rank);
    CHKERRQ(ierr);
    ierr = PetscMalloc2(size,PetscInt,&lens,size,PetscInt,&disp);
    CHKERRQ(ierr);
    ierr = ISGetLocalSize(isapp,&napp);
    CHKERRQ(ierr);
    ierr = MPI_Allgather(&napp, 1, MPIU_INT, lens, 1, MPIU_INT, comm);
    CHKERRQ(ierr);

    N = 0;
    for (i = 0; i < size; i++) {
        disp[i] = N;
        N      += lens[i];
    }

    /* If ispetsc is 0 then use "natural" numbering */
    if (napp) {
        if (!ispetsc) {
            start = disp[rank];
            ierr  = PetscMalloc((napp+1) * sizeof(PetscInt), &petsc);
            CHKERRQ(ierr);
            for (i=0; i<napp; i++) petsc[i] = start + i;
        } else {
            ierr  = ISGetIndices(ispetsc,&mypetsc);
            CHKERRQ(ierr);
            petsc = (PetscInt*)mypetsc;
        }
    }

    /* create a map with global size N - used to determine the local sizes of ao - shall we use local napp instead of N? */
    ierr    = PetscLayoutCreate(comm,&map);
    CHKERRQ(ierr);
    map->bs = 1;
    map->N  = N;
    ierr    = PetscLayoutSetUp(map);
    CHKERRQ(ierr);

    ao->N       = N;
    ao->n       = map->n;
    aomems->map = map;

    /* create distributed indices app_loc: petsc->app and petsc_loc: app->petsc */
    n_local = map->n;
    ierr    = PetscMalloc2(n_local,PetscInt, &aomems->app_loc,n_local,PetscInt,&aomems->petsc_loc);
    CHKERRQ(ierr);
    ierr    = PetscLogObjectMemory(ao,2*n_local*sizeof(PetscInt));
    CHKERRQ(ierr);
    ierr    = PetscMemzero(aomems->app_loc,n_local*sizeof(PetscInt));
    CHKERRQ(ierr);
    ierr    = PetscMemzero(aomems->petsc_loc,n_local*sizeof(PetscInt));
    CHKERRQ(ierr);
    ierr    = ISGetIndices(isapp,&myapp);
    CHKERRQ(ierr);

    ierr = AOCreateMemoryScalable_private(comm,napp,petsc,myapp,ao,aomems->app_loc);
    CHKERRQ(ierr);
    ierr = AOCreateMemoryScalable_private(comm,napp,myapp,petsc,ao,aomems->petsc_loc);
    CHKERRQ(ierr);

    ierr = ISRestoreIndices(isapp,&myapp);
    CHKERRQ(ierr);
    if (napp) {
        if (ispetsc) {
            ierr = ISRestoreIndices(ispetsc,&mypetsc);
            CHKERRQ(ierr);
        } else {
            ierr = PetscFree(petsc);
            CHKERRQ(ierr);
        }
    }
    ierr = PetscFree2(lens,disp);
    CHKERRQ(ierr);
    PetscFunctionReturn(0);
}
Exemplo n.º 26
0
int main(int argc,char **args)
{
  Mat            Amat;
  PetscErrorCode ierr;
  SNES           snes;
  KSP            ksp;
  MPI_Comm       comm;
  PetscMPIInt    npe,rank;
  PetscLogStage  stage[7];
  PetscBool      test_nonzero_cols=PETSC_FALSE,use_nearnullspace=PETSC_TRUE;
  Vec            xx,bb;
  PetscInt       iter,i,N,dim=3,cells[3]={1,1,1},max_conv_its,local_sizes[7],run_type=1;
  DM             dm,distdm,basedm;
  PetscBool      flg;
  char           convType[256];
  PetscReal      Lx,mdisp[10],err[10];
  const char * const options[10] = {"-ex56_dm_refine 0",
                                    "-ex56_dm_refine 1",
                                    "-ex56_dm_refine 2",
                                    "-ex56_dm_refine 3",
                                    "-ex56_dm_refine 4",
                                    "-ex56_dm_refine 5",
                                    "-ex56_dm_refine 6",
                                    "-ex56_dm_refine 7",
                                    "-ex56_dm_refine 8",
                                    "-ex56_dm_refine 9"};
  PetscFunctionBeginUser;
  ierr = PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
  comm = PETSC_COMM_WORLD;
  ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
  ierr = MPI_Comm_size(comm, &npe);CHKERRQ(ierr);
  /* options */
  ierr = PetscOptionsBegin(comm,NULL,"3D bilinear Q1 elasticity options","");CHKERRQ(ierr);
  {
    i = 3;
    ierr = PetscOptionsIntArray("-cells", "Number of (flux tube) processor in each dimension", "ex56.c", cells, &i, NULL);CHKERRQ(ierr);

    Lx = 1.; /* or ne for rod */
    max_conv_its = 3;
    ierr = PetscOptionsInt("-max_conv_its","Number of iterations in convergence study","",max_conv_its,&max_conv_its,NULL);CHKERRQ(ierr);
    if (max_conv_its<=0 || max_conv_its>7) SETERRQ1(PETSC_COMM_WORLD, PETSC_ERR_USER, "Bad number of iterations for convergence test (%D)",max_conv_its);
    ierr = PetscOptionsReal("-lx","Length of domain","",Lx,&Lx,NULL);CHKERRQ(ierr);
    ierr = PetscOptionsReal("-alpha","material coefficient inside circle","",s_soft_alpha,&s_soft_alpha,NULL);CHKERRQ(ierr);
    ierr = PetscOptionsBool("-test_nonzero_cols","nonzero test","",test_nonzero_cols,&test_nonzero_cols,NULL);CHKERRQ(ierr);
    ierr = PetscOptionsBool("-use_mat_nearnullspace","MatNearNullSpace API test","",use_nearnullspace,&use_nearnullspace,NULL);CHKERRQ(ierr);
    ierr = PetscOptionsInt("-run_type","0: twisting load on cantalever, 1: 3rd order accurate convergence test","",run_type,&run_type,NULL);CHKERRQ(ierr);
    i = 3;
    ierr = PetscOptionsInt("-mat_block_size","","",i,&i,&flg);CHKERRQ(ierr);
    if (!flg || i!=3) SETERRQ2(PETSC_COMM_WORLD, PETSC_ERR_USER, "'-mat_block_size 3' must be set (%D) and = 3 (%D)",flg,flg? i : 3);
  }
  ierr = PetscOptionsEnd();CHKERRQ(ierr);
  ierr = PetscLogStageRegister("Mesh Setup", &stage[6]);CHKERRQ(ierr);
  ierr = PetscLogStageRegister("1st Setup", &stage[0]);CHKERRQ(ierr);
  ierr = PetscLogStageRegister("1st Solve", &stage[1]);CHKERRQ(ierr);

  /* create DM, Plex calls DMSetup */
  ierr = PetscLogStagePush(stage[6]);CHKERRQ(ierr);
  ierr = DMPlexCreateHexBoxMesh(comm, dim, cells, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, &dm);CHKERRQ(ierr);
  {
    DMLabel         label;
    IS              is;
    ierr = DMCreateLabel(dm, "boundary");CHKERRQ(ierr);
    ierr = DMGetLabel(dm, "boundary", &label);CHKERRQ(ierr);
    ierr = DMPlexMarkBoundaryFaces(dm, label);CHKERRQ(ierr);
    if (run_type==0) {
      ierr = DMGetStratumIS(dm, "boundary", 1,  &is);CHKERRQ(ierr);
      ierr = DMCreateLabel(dm,"Faces");CHKERRQ(ierr);
      if (is) {
        PetscInt        d, f, Nf;
        const PetscInt *faces;
        PetscInt        csize;
        PetscSection    cs;
        Vec             coordinates ;
        DM              cdm;
        ierr = ISGetLocalSize(is, &Nf);CHKERRQ(ierr);
        ierr = ISGetIndices(is, &faces);CHKERRQ(ierr);
        ierr = DMGetCoordinatesLocal(dm, &coordinates);CHKERRQ(ierr);
        ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
        ierr = DMGetDefaultSection(cdm, &cs);CHKERRQ(ierr);
        /* Check for each boundary face if any component of its centroid is either 0.0 or 1.0 */
        for (f = 0; f < Nf; ++f) {
          PetscReal   faceCoord;
          PetscInt    b,v;
          PetscScalar *coords = NULL;
          PetscInt    Nv;
          ierr = DMPlexVecGetClosure(cdm, cs, coordinates, faces[f], &csize, &coords);CHKERRQ(ierr);
          Nv   = csize/dim; /* Calculate mean coordinate vector */
          for (d = 0; d < dim; ++d) {
            faceCoord = 0.0;
            for (v = 0; v < Nv; ++v) faceCoord += PetscRealPart(coords[v*dim+d]);
            faceCoord /= Nv;
            for (b = 0; b < 2; ++b) {
              if (PetscAbs(faceCoord - b) < PETSC_SMALL) { /* domain have not been set yet, still [0,1]^3 */
                ierr = DMSetLabelValue(dm, "Faces", faces[f], d*2+b+1);CHKERRQ(ierr);
              }
            }
          }
          ierr = DMPlexVecRestoreClosure(cdm, cs, coordinates, faces[f], &csize, &coords);CHKERRQ(ierr);
        }
        ierr = ISRestoreIndices(is, &faces);CHKERRQ(ierr);
      }
      ierr = ISDestroy(&is);CHKERRQ(ierr);
      ierr = DMGetLabel(dm, "Faces", &label);CHKERRQ(ierr);
      ierr = DMPlexLabelComplete(dm, label);CHKERRQ(ierr);
    }
  }
  {
    PetscInt dimEmbed, i;
    PetscInt nCoords;
    PetscScalar *coords,bounds[] = {0,Lx,-.5,.5,-.5,.5,}; /* x_min,x_max,y_min,y_max */
    Vec coordinates;
    if (run_type==1) {
      for (i = 0; i < 2*dim; i++) bounds[i] = (i%2) ? 1 : 0;
    }
    ierr = DMGetCoordinatesLocal(dm,&coordinates);CHKERRQ(ierr);
    ierr = DMGetCoordinateDim(dm,&dimEmbed);CHKERRQ(ierr);
    if (dimEmbed != dim) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"dimEmbed != dim %D",dimEmbed);CHKERRQ(ierr);
    ierr = VecGetLocalSize(coordinates,&nCoords);CHKERRQ(ierr);
    if (nCoords % dimEmbed) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Coordinate vector the wrong size");CHKERRQ(ierr);
    ierr = VecGetArray(coordinates,&coords);CHKERRQ(ierr);
    for (i = 0; i < nCoords; i += dimEmbed) {
      PetscInt j;
      PetscScalar *coord = &coords[i];
      for (j = 0; j < dimEmbed; j++) {
        coord[j] = bounds[2 * j] + coord[j] * (bounds[2 * j + 1] - bounds[2 * j]);
      }
    }
    ierr = VecRestoreArray(coordinates,&coords);CHKERRQ(ierr);
    ierr = DMSetCoordinatesLocal(dm,coordinates);CHKERRQ(ierr);
  }

  /* convert to p4est, and distribute */

  ierr = PetscOptionsBegin(comm, "", "Mesh conversion options", "DMPLEX");CHKERRQ(ierr);
  ierr = PetscOptionsFList("-dm_type","Convert DMPlex to another format (should not be Plex!)","ex56.c",DMList,DMPLEX,convType,256,&flg);CHKERRQ(ierr);
  ierr = PetscOptionsEnd();
  if (flg) {
    DM newdm;
    ierr = DMConvert(dm,convType,&newdm);CHKERRQ(ierr);
    if (newdm) {
      const char *prefix;
      PetscBool isForest;
      ierr = PetscObjectGetOptionsPrefix((PetscObject)dm,&prefix);CHKERRQ(ierr);
      ierr = PetscObjectSetOptionsPrefix((PetscObject)newdm,prefix);CHKERRQ(ierr);
      ierr = DMIsForest(newdm,&isForest);CHKERRQ(ierr);
      if (isForest) {
      } else SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_USER, "Converted to non Forest?");
      ierr = DMDestroy(&dm);CHKERRQ(ierr);
      dm   = newdm;
    } else SETERRQ(PETSC_COMM_WORLD, PETSC_ERR_USER, "Convert failed?");
  } else {
    /* Plex Distribute mesh over processes */
    ierr = DMPlexDistribute(dm, 0, NULL, &distdm);CHKERRQ(ierr);
    if (distdm) {
      const char *prefix;
      ierr = PetscObjectGetOptionsPrefix((PetscObject)dm,&prefix);CHKERRQ(ierr);
      ierr = PetscObjectSetOptionsPrefix((PetscObject)distdm,prefix);CHKERRQ(ierr);
      ierr = DMDestroy(&dm);CHKERRQ(ierr);
      dm   = distdm;
    }
  }
  ierr = PetscLogStagePop();CHKERRQ(ierr);
  basedm = dm; dm = NULL;

  for (iter=0 ; iter<max_conv_its ; iter++) {
    ierr = PetscLogStagePush(stage[6]);CHKERRQ(ierr);
    /* make new DM */
    ierr = DMClone(basedm, &dm);CHKERRQ(ierr);
    ierr = PetscObjectSetOptionsPrefix((PetscObject) dm, "ex56_");CHKERRQ(ierr);
    ierr = PetscObjectSetName( (PetscObject)dm,"Mesh");CHKERRQ(ierr);
    ierr = PetscOptionsClearValue(NULL,"-ex56_dm_refine");CHKERRQ(ierr);
    ierr = PetscOptionsInsertString(NULL,options[iter]);CHKERRQ(ierr);
    ierr = DMSetFromOptions(dm);CHKERRQ(ierr); /* refinement done here in Plex, p4est */
    /* snes */
    ierr = SNESCreate(comm, &snes);CHKERRQ(ierr);
    ierr = SNESSetDM(snes, dm);CHKERRQ(ierr);
    /* fem */
    {
      const PetscInt Ncomp = dim;
      const PetscInt components[] = {0,1,2};
      const PetscInt Nfid = 1, Npid = 1;
      const PetscInt fid[] = {1}; /* The fixed faces (x=0) */
      const PetscInt pid[] = {2}; /* The faces with loading (x=L_x) */
      PetscFE         fe;
      PetscDS         prob;
      DM              cdm = dm;

      ierr = PetscFECreateDefault(dm, dim, dim, PETSC_FALSE, NULL, PETSC_DECIDE, &fe);CHKERRQ(ierr); /* elasticity */
      ierr = PetscObjectSetName((PetscObject) fe, "deformation");CHKERRQ(ierr);
      /* FEM prob */
      ierr = DMGetDS(dm, &prob);CHKERRQ(ierr);
      ierr = PetscDSSetDiscretization(prob, 0, (PetscObject) fe);CHKERRQ(ierr);
      /* setup problem */
      if (run_type==1) {
        ierr = PetscDSSetJacobian(prob, 0, 0, NULL, NULL, NULL, g3_uu_3d);CHKERRQ(ierr);
        ierr = PetscDSSetResidual(prob, 0, f0_u_x4, f1_u_3d);CHKERRQ(ierr);
      } else {
        ierr = PetscDSSetJacobian(prob, 0, 0, NULL, NULL, NULL, g3_uu_3d_alpha);CHKERRQ(ierr);
        ierr = PetscDSSetResidual(prob, 0, f0_u, f1_u_3d_alpha);CHKERRQ(ierr);
        ierr = PetscDSSetBdResidual(prob, 0, f0_bd_u_3d, f1_bd_u);CHKERRQ(ierr);
      }
      /* bcs */
      if (run_type==1) {
        PetscInt id = 1;
        ierr = DMAddBoundary(dm, DM_BC_ESSENTIAL, "wall", "boundary", 0, 0, NULL, (void (*)()) zero, 1, &id, NULL);CHKERRQ(ierr);
      } else {
        ierr = PetscDSAddBoundary(prob, DM_BC_ESSENTIAL, "fixed", "Faces", 0, Ncomp, components, (void (*)()) zero, Nfid, fid, NULL);CHKERRQ(ierr);
        ierr = PetscDSAddBoundary(prob, DM_BC_NATURAL, "traction", "Faces", 0, Ncomp, components, NULL, Npid, pid, NULL);CHKERRQ(ierr);
      }
      while (cdm) {
        ierr = DMSetDS(cdm,prob);CHKERRQ(ierr);
        ierr = DMGetCoarseDM(cdm, &cdm);CHKERRQ(ierr);
      }
      ierr = PetscFEDestroy(&fe);CHKERRQ(ierr);
    }
    /* vecs & mat */
    ierr = DMCreateGlobalVector(dm,&xx);CHKERRQ(ierr);
    ierr = VecDuplicate(xx, &bb);CHKERRQ(ierr);
    ierr = PetscObjectSetName((PetscObject) bb, "b");CHKERRQ(ierr);
    ierr = PetscObjectSetName((PetscObject) xx, "u");CHKERRQ(ierr);
    ierr = DMCreateMatrix(dm, &Amat);CHKERRQ(ierr);
    ierr = VecGetSize(bb,&N);CHKERRQ(ierr);
    local_sizes[iter] = N;
    ierr = PetscPrintf(PETSC_COMM_WORLD,"[%d]%s %d global equations, %d vertices\n",rank,PETSC_FUNCTION_NAME,N,N/dim);CHKERRQ(ierr);
    if (use_nearnullspace && N/dim > 1) {
      /* Set up the near null space (a.k.a. rigid body modes) that will be used by the multigrid preconditioner */
      DM           subdm;
      MatNullSpace nearNullSpace;
      PetscInt     fields = 0;
      PetscObject  deformation;
      ierr = DMCreateSubDM(dm, 1, &fields, NULL, &subdm);CHKERRQ(ierr);
      ierr = DMPlexCreateRigidBody(subdm, &nearNullSpace);CHKERRQ(ierr);
      ierr = DMGetField(dm, 0, &deformation);CHKERRQ(ierr);
      ierr = PetscObjectCompose(deformation, "nearnullspace", (PetscObject) nearNullSpace);CHKERRQ(ierr);
      ierr = DMDestroy(&subdm);CHKERRQ(ierr);
      ierr = MatNullSpaceDestroy(&nearNullSpace);CHKERRQ(ierr); /* created by DM and destroyed by Mat */
    }
    ierr = DMPlexSetSNESLocalFEM(dm,NULL,NULL,NULL);CHKERRQ(ierr);
    ierr = SNESSetJacobian(snes, Amat, Amat, NULL, NULL);CHKERRQ(ierr);
    ierr = SNESSetFromOptions(snes);CHKERRQ(ierr);
    ierr = DMSetUp(dm);CHKERRQ(ierr);
    ierr = PetscLogStagePop();CHKERRQ(ierr);
    ierr = PetscLogStagePush(stage[0]);CHKERRQ(ierr);
    /* ksp */
    ierr = SNESGetKSP(snes, &ksp);CHKERRQ(ierr);
    ierr = KSPSetComputeSingularValues(ksp,PETSC_TRUE);CHKERRQ(ierr);
    /* test BCs */
    ierr = VecZeroEntries(xx);CHKERRQ(ierr);
    if (test_nonzero_cols) {
      if (rank==0) ierr = VecSetValue(xx,0,1.0,INSERT_VALUES);CHKERRQ(ierr);
      ierr = VecAssemblyBegin(xx);CHKERRQ(ierr);
      ierr = VecAssemblyEnd(xx);CHKERRQ(ierr);
    }
    ierr = VecZeroEntries(bb);CHKERRQ(ierr);
    ierr = VecGetSize(bb,&i);CHKERRQ(ierr);
    local_sizes[iter] = i;
    ierr = PetscPrintf(PETSC_COMM_WORLD,"[%d]%s %d equations in vector, %d vertices\n",rank,PETSC_FUNCTION_NAME,i,i/dim);CHKERRQ(ierr);
    /* setup solver, dummy solve to really setup */
    if (0) {
      ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr);
      ierr = SNESSolve(snes, bb, xx);CHKERRQ(ierr);
      ierr = KSPSetTolerances(ksp,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,50);CHKERRQ(ierr);
      ierr = VecZeroEntries(xx);CHKERRQ(ierr);
    }
    ierr = PetscLogStagePop();CHKERRQ(ierr);
    /* solve */
    ierr = PetscLogStagePush(stage[1]);CHKERRQ(ierr);
    ierr = SNESSolve(snes, bb, xx);CHKERRQ(ierr);
    ierr = PetscLogStagePop();CHKERRQ(ierr);
    ierr = VecNorm(xx,NORM_INFINITY,&mdisp[iter]);CHKERRQ(ierr);
    ierr = DMViewFromOptions(dm, NULL, "-dm_view");CHKERRQ(ierr);
    {
      PetscViewer       viewer = NULL;
      PetscViewerFormat fmt;
      ierr = PetscOptionsGetViewer(comm,"ex56_","-vec_view",&viewer,&fmt,&flg);CHKERRQ(ierr);
      if (flg) {
        ierr = PetscViewerPushFormat(viewer,fmt);CHKERRQ(ierr);
        ierr = VecView(xx,viewer);CHKERRQ(ierr);
        ierr = VecView(bb,viewer);CHKERRQ(ierr);
        ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
      }
      ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
    }
    /* Free work space */
    ierr = DMDestroy(&dm);CHKERRQ(ierr);
    ierr = SNESDestroy(&snes);CHKERRQ(ierr);
    ierr = VecDestroy(&xx);CHKERRQ(ierr);
    ierr = VecDestroy(&bb);CHKERRQ(ierr);
    ierr = MatDestroy(&Amat);CHKERRQ(ierr);
  }
  ierr = DMDestroy(&basedm);CHKERRQ(ierr);
  if (run_type==1) {
    err[0] = 59.975208 - mdisp[0]; /* error with what I think is the exact solution */
  } else {
    err[0] = 171.038 - mdisp[0];
  }
  for (iter=1 ; iter<max_conv_its ; iter++) {
    if (run_type==1) {
      err[iter] = 59.975208 - mdisp[iter];
    } else {
      err[iter] = 171.038 - mdisp[iter];
    }
    PetscPrintf(PETSC_COMM_WORLD,"[%d]%s %D) N=%12D, max displ=%9.7e, disp diff=%9.2e, error=%4.3e, rate=%3.2g\n",
                rank,PETSC_FUNCTION_NAME,iter,local_sizes[iter],mdisp[iter],
                mdisp[iter]-mdisp[iter-1],err[iter],log(err[iter-1]/err[iter])/log(2.));
  }

  ierr = PetscFinalize();
  return ierr;
}
Exemplo n.º 27
0
static PetscErrorCode PCBDDCScalingSetUp_Deluxe_Private(PC pc)
{
  PC_BDDC                *pcbddc=(PC_BDDC*)pc->data;
  PCBDDCDeluxeScaling    deluxe_ctx=pcbddc->deluxe_ctx;
  PCBDDCSubSchurs        sub_schurs = pcbddc->sub_schurs;
  PetscScalar            *matdata,*matdata2;
  PetscInt               i,max_subset_size,cum,cum2;
  const PetscInt         *idxs;
  PetscBool              newsetup = PETSC_FALSE;
  PetscErrorCode         ierr;

  PetscFunctionBegin;
  if (!sub_schurs) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"Missing PCBDDCSubSchurs");
  if (!sub_schurs->n_subs) PetscFunctionReturn(0);

  /* Allocate arrays for subproblems */
  if (!deluxe_ctx->seq_n) {
    deluxe_ctx->seq_n = sub_schurs->n_subs;
    ierr = PetscCalloc5(deluxe_ctx->seq_n,&deluxe_ctx->seq_scctx,deluxe_ctx->seq_n,&deluxe_ctx->seq_work1,deluxe_ctx->seq_n,&deluxe_ctx->seq_work2,deluxe_ctx->seq_n,&deluxe_ctx->seq_mat,deluxe_ctx->seq_n,&deluxe_ctx->seq_mat_inv_sum);CHKERRQ(ierr);
    newsetup = PETSC_TRUE;
  } else if (deluxe_ctx->seq_n != sub_schurs->n_subs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Number of deluxe subproblems %D is different from the sub_schurs %D",deluxe_ctx->seq_n,sub_schurs->n_subs);

  /* the change of basis is just a reference to sub_schurs->change (if any) */
  deluxe_ctx->change         = sub_schurs->change;
  deluxe_ctx->change_with_qr = sub_schurs->change_with_qr;

  /* Create objects for deluxe */
  max_subset_size = 0;
  for (i=0;i<sub_schurs->n_subs;i++) {
    PetscInt subset_size;
    ierr = ISGetLocalSize(sub_schurs->is_subs[i],&subset_size);CHKERRQ(ierr);
    max_subset_size = PetscMax(subset_size,max_subset_size);
  }
  if (newsetup) {
    ierr = PetscMalloc1(2*max_subset_size,&deluxe_ctx->workspace);CHKERRQ(ierr);
  }
  cum = cum2 = 0;
  ierr = ISGetIndices(sub_schurs->is_Ej_all,&idxs);CHKERRQ(ierr);
  ierr = MatSeqAIJGetArray(sub_schurs->S_Ej_all,&matdata);CHKERRQ(ierr);
  ierr = MatSeqAIJGetArray(sub_schurs->sum_S_Ej_all,&matdata2);CHKERRQ(ierr);
  for (i=0;i<deluxe_ctx->seq_n;i++) {
    PetscInt     subset_size;

    ierr = ISGetLocalSize(sub_schurs->is_subs[i],&subset_size);CHKERRQ(ierr);
    if (newsetup) {
      IS  sub;
      /* work vectors */
      ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,subset_size,deluxe_ctx->workspace,&deluxe_ctx->seq_work1[i]);CHKERRQ(ierr);
      ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,subset_size,deluxe_ctx->workspace+subset_size,&deluxe_ctx->seq_work2[i]);CHKERRQ(ierr);

      /* scatters */
      ierr = ISCreateGeneral(PETSC_COMM_SELF,subset_size,idxs+cum,PETSC_COPY_VALUES,&sub);CHKERRQ(ierr);
      ierr = VecScatterCreate(pcbddc->work_scaling,sub,deluxe_ctx->seq_work1[i],NULL,&deluxe_ctx->seq_scctx[i]);CHKERRQ(ierr);
      ierr = ISDestroy(&sub);CHKERRQ(ierr);
    }

    /* S_E_j */
    ierr = MatDestroy(&deluxe_ctx->seq_mat[i]);CHKERRQ(ierr);
    ierr = MatCreateSeqDense(PETSC_COMM_SELF,subset_size,subset_size,matdata+cum2,&deluxe_ctx->seq_mat[i]);CHKERRQ(ierr);

    /* \sum_k S^k_E_j */
    ierr = MatDestroy(&deluxe_ctx->seq_mat_inv_sum[i]);CHKERRQ(ierr);
    ierr = MatCreateSeqDense(PETSC_COMM_SELF,subset_size,subset_size,matdata2+cum2,&deluxe_ctx->seq_mat_inv_sum[i]);CHKERRQ(ierr);
    ierr = MatSetOption(deluxe_ctx->seq_mat_inv_sum[i],MAT_SPD,sub_schurs->is_posdef);CHKERRQ(ierr);
    ierr = MatSetOption(deluxe_ctx->seq_mat_inv_sum[i],MAT_HERMITIAN,sub_schurs->is_hermitian);CHKERRQ(ierr);
    if (sub_schurs->is_hermitian) {
      ierr = MatCholeskyFactor(deluxe_ctx->seq_mat_inv_sum[i],NULL,NULL);CHKERRQ(ierr);
    } else {
      ierr = MatLUFactor(deluxe_ctx->seq_mat_inv_sum[i],NULL,NULL,NULL);CHKERRQ(ierr);
    }
    if (pcbddc->deluxe_singlemat) {
      Mat X,Y;
      if (!sub_schurs->is_hermitian) {
        ierr = MatTranspose(deluxe_ctx->seq_mat[i],MAT_INITIAL_MATRIX,&X);CHKERRQ(ierr);
      } else {
        ierr = PetscObjectReference((PetscObject)deluxe_ctx->seq_mat[i]);CHKERRQ(ierr);
        X    = deluxe_ctx->seq_mat[i];
      }
      ierr = MatDuplicate(X,MAT_DO_NOT_COPY_VALUES,&Y);CHKERRQ(ierr);
      if (!sub_schurs->is_hermitian) {
        ierr = PCBDDCMatTransposeMatSolve_SeqDense(deluxe_ctx->seq_mat_inv_sum[i],X,Y);CHKERRQ(ierr);
      } else {
        ierr = MatMatSolve(deluxe_ctx->seq_mat_inv_sum[i],X,Y);CHKERRQ(ierr);
      }

      ierr = MatDestroy(&deluxe_ctx->seq_mat_inv_sum[i]);CHKERRQ(ierr);
      ierr = MatDestroy(&deluxe_ctx->seq_mat[i]);CHKERRQ(ierr);
      ierr = MatDestroy(&X);CHKERRQ(ierr);
      if (deluxe_ctx->change) {
        Mat C,CY;

        if (!deluxe_ctx->change_with_qr) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only QR based change of basis");
        ierr = KSPGetOperators(deluxe_ctx->change[i],&C,NULL);CHKERRQ(ierr);
        ierr = MatMatMult(C,Y,MAT_INITIAL_MATRIX,PETSC_DEFAULT,&CY);CHKERRQ(ierr);
        ierr = MatMatTransposeMult(CY,C,MAT_REUSE_MATRIX,PETSC_DEFAULT,&Y);CHKERRQ(ierr);
        ierr = MatDestroy(&CY);CHKERRQ(ierr);
      }
      ierr = MatTranspose(Y,MAT_INPLACE_MATRIX,&Y);CHKERRQ(ierr);
      deluxe_ctx->seq_mat[i] = Y;
    }
    cum += subset_size;
    cum2 += subset_size*subset_size;
  }
  ierr = ISRestoreIndices(sub_schurs->is_Ej_all,&idxs);CHKERRQ(ierr);
  ierr = MatSeqAIJRestoreArray(sub_schurs->S_Ej_all,&matdata);CHKERRQ(ierr);
  ierr = MatSeqAIJRestoreArray(sub_schurs->sum_S_Ej_all,&matdata2);CHKERRQ(ierr);
  if (pcbddc->deluxe_singlemat) {
    deluxe_ctx->change         = NULL;
    deluxe_ctx->change_with_qr = PETSC_FALSE;
  }

  if (deluxe_ctx->change && !deluxe_ctx->change_with_qr) {
    for (i=0;i<deluxe_ctx->seq_n;i++) {
      if (newsetup) {
        PC pc;

        ierr = KSPGetPC(deluxe_ctx->change[i],&pc);CHKERRQ(ierr);
        ierr = PCSetType(pc,PCLU);CHKERRQ(ierr);
        ierr = KSPSetFromOptions(deluxe_ctx->change[i]);CHKERRQ(ierr);
      }
      ierr = KSPSetUp(deluxe_ctx->change[i]);CHKERRQ(ierr);
    }
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 28
0
/*
 * Increase overlap for the sub-matrix across sub communicator
 * sub-matrix could be a graph or numerical matrix
 * */
PetscErrorCode  MatIncreaseOverlapSplit_Single(Mat mat,IS *is,PetscInt ov)
{
  PetscInt         i,nindx,*indices_sc,*indices_ov,localsize,*localsizes_sc,localsize_tmp;
  PetscInt         *indices_ov_rd,nroots,nleaves,*localoffsets,*indices_recv,*sources_sc,*sources_sc_rd;
  const PetscInt   *indices;
  PetscMPIInt      srank,ssize,issamecomm,k,grank;
  IS               is_sc,allis_sc,partitioning;
  MPI_Comm         gcomm,dcomm,scomm;
  PetscSF          sf;
  PetscSFNode      *remote;
  Mat              *smat;
  MatPartitioning  part;
  PetscErrorCode   ierr;

  PetscFunctionBegin;
  /* get a sub communicator before call individual MatIncreaseOverlap
   * since the sub communicator may be changed.
   * */
  ierr = PetscObjectGetComm((PetscObject)(*is),&dcomm);CHKERRQ(ierr);
  /*make a copy before the original one is deleted*/
  ierr = PetscCommDuplicate(dcomm,&scomm,NULL);CHKERRQ(ierr);
  /*get a global communicator, where mat should be a global matrix  */
  ierr = PetscObjectGetComm((PetscObject)mat,&gcomm);CHKERRQ(ierr);
  /*increase overlap on each individual subdomain*/
  ierr = (*mat->ops->increaseoverlap)(mat,1,is,ov);CHKERRQ(ierr);
  /*compare communicators */
  ierr = MPI_Comm_compare(gcomm,scomm,&issamecomm);CHKERRQ(ierr);
  /* if the sub-communicator is the same as the global communicator,
   * user does not want to use a sub-communicator
   * */
  if(issamecomm == MPI_IDENT || issamecomm == MPI_CONGRUENT) PetscFunctionReturn(0);
  /* if the sub-communicator is petsc_comm_self,
   * user also does not care the sub-communicator
   * */
  ierr = MPI_Comm_compare(scomm,PETSC_COMM_SELF,&issamecomm);CHKERRQ(ierr);
  if(issamecomm == MPI_IDENT || issamecomm == MPI_CONGRUENT){PetscFunctionReturn(0);}
  /*local rank, size in a sub-communicator  */
  ierr = MPI_Comm_rank(scomm,&srank);CHKERRQ(ierr);
  ierr = MPI_Comm_size(scomm,&ssize);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(gcomm,&grank);CHKERRQ(ierr);
  /*create a new IS based on sub-communicator
   * since the old IS is often based on petsc_comm_self
   * */
  ierr = ISGetLocalSize(*is,&nindx);CHKERRQ(ierr);
  ierr = PetscCalloc1(nindx,&indices_sc);CHKERRQ(ierr);
  ierr = ISGetIndices(*is,&indices);CHKERRQ(ierr);
  ierr = PetscMemcpy(indices_sc,indices,sizeof(PetscInt)*nindx);CHKERRQ(ierr);
  ierr = ISRestoreIndices(*is,&indices);CHKERRQ(ierr);
  /*we do not need any more*/
  ierr = ISDestroy(is);CHKERRQ(ierr);
  /*create a index set based on the sub communicator  */
  ierr = ISCreateGeneral(scomm,nindx,indices_sc,PETSC_OWN_POINTER,&is_sc);CHKERRQ(ierr);
  /*gather all indices within  the sub communicator*/
  ierr = ISAllGather(is_sc,&allis_sc);CHKERRQ(ierr);
  ierr = ISDestroy(&is_sc);CHKERRQ(ierr);
  /* gather local sizes */
  ierr = PetscMalloc1(ssize,&localsizes_sc);CHKERRQ(ierr);
  /*get individual local sizes for all index sets*/
  ierr = MPI_Gather(&nindx,1,MPIU_INT,localsizes_sc,1,MPIU_INT,0,scomm);CHKERRQ(ierr);
  /*only root does these computations */
  if(!srank){
   /*get local size for the big index set*/
   ierr = ISGetLocalSize(allis_sc,&localsize);CHKERRQ(ierr);
   ierr = PetscCalloc2(localsize,&indices_ov,localsize,&sources_sc);CHKERRQ(ierr);
   ierr = PetscCalloc2(localsize,&indices_ov_rd,localsize,&sources_sc_rd);CHKERRQ(ierr);
   ierr = ISGetIndices(allis_sc,&indices);CHKERRQ(ierr);
   ierr = PetscMemcpy(indices_ov,indices,sizeof(PetscInt)*localsize);CHKERRQ(ierr);
   ierr = ISRestoreIndices(allis_sc,&indices);CHKERRQ(ierr);
   /*we do not need it any more */
   ierr = ISDestroy(&allis_sc);CHKERRQ(ierr);
   /*assign corresponding sources */
   localsize_tmp = 0;
   for(k=0; k<ssize; k++){
     for(i=0; i<localsizes_sc[k]; i++){
       sources_sc[localsize_tmp++] = k;
     }
   }
   /*record where indices come from */
   ierr = PetscSortIntWithArray(localsize,indices_ov,sources_sc);CHKERRQ(ierr);
   /*count local sizes for reduced indices */
   ierr = PetscMemzero(localsizes_sc,sizeof(PetscInt)*ssize);CHKERRQ(ierr);
   /*initialize the first entity*/
   if(localsize){
	 indices_ov_rd[0] = indices_ov[0];
	 sources_sc_rd[0] = sources_sc[0];
	 localsizes_sc[sources_sc[0]]++;
   }
   localsize_tmp = 1;
   /*remove duplicate integers */
   for(i=1; i<localsize; i++){
	 if(indices_ov[i] != indices_ov[i-1]){
	   indices_ov_rd[localsize_tmp]   = indices_ov[i];
	   sources_sc_rd[localsize_tmp++] = sources_sc[i];
	   localsizes_sc[sources_sc[i]]++;
	 }
   }
   ierr = PetscFree2(indices_ov,sources_sc);CHKERRQ(ierr);
   ierr = PetscCalloc1(ssize+1,&localoffsets);CHKERRQ(ierr);
   for(k=0; k<ssize; k++){
	 localoffsets[k+1] = localoffsets[k] + localsizes_sc[k];
   }
   /*construct a star forest to send data back */
   nleaves = localoffsets[ssize];
   ierr = PetscMemzero(localoffsets,(ssize+1)*sizeof(PetscInt));CHKERRQ(ierr);
   nroots  = localsizes_sc[srank];
   ierr = PetscCalloc1(nleaves,&remote);CHKERRQ(ierr);
   for(i=0; i<nleaves; i++){
	 remote[i].rank  = sources_sc_rd[i];
	 remote[i].index = localoffsets[sources_sc_rd[i]]++;
   }
   ierr = PetscFree(localoffsets);CHKERRQ(ierr);
  }else{
   ierr = ISDestroy(&allis_sc);CHKERRQ(ierr);
   /*Allocate a 'zero' pointer */
   ierr = PetscCalloc1(0,&remote);CHKERRQ(ierr);
   nleaves = 0;
   indices_ov_rd = 0;
   sources_sc_rd = 0;
  }
  /*scatter sizes to everybody */
  ierr = MPI_Scatter(localsizes_sc,1, MPIU_INT,&nroots,1, MPIU_INT,0,scomm);CHKERRQ(ierr);
  /*free memory */
  ierr = PetscFree(localsizes_sc);CHKERRQ(ierr);
  ierr = PetscCalloc1(nroots,&indices_recv);CHKERRQ(ierr);
  /*ierr = MPI_Comm_dup(scomm,&dcomm);CHKERRQ(ierr);*/
  /*set data back to every body */
  ierr = PetscSFCreate(scomm,&sf);CHKERRQ(ierr);
  ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
  ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
  ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,remote,PETSC_OWN_POINTER);CHKERRQ(ierr);
  ierr = PetscSFReduceBegin(sf,MPIU_INT,indices_ov_rd,indices_recv,MPIU_REPLACE);CHKERRQ(ierr);
  ierr = PetscSFReduceEnd(sf,MPIU_INT,indices_ov_rd,indices_recv,MPIU_REPLACE);CHKERRQ(ierr);
  ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
  /* free memory */
  ierr = PetscFree2(indices_ov_rd,sources_sc_rd);CHKERRQ(ierr);
  /*create a index set*/
  ierr = ISCreateGeneral(scomm,nroots,indices_recv,PETSC_OWN_POINTER,&is_sc);CHKERRQ(ierr);
  /*construct a parallel submatrix */
  ierr = MatGetSubMatricesMPI(mat,1,&is_sc,&is_sc,MAT_INITIAL_MATRIX,&smat);CHKERRQ(ierr);
  /* we do not need them any more */
  ierr = ISDestroy(&allis_sc);CHKERRQ(ierr);
  /*create a partitioner to repartition the sub-matrix*/
  ierr = MatPartitioningCreate(scomm,&part);CHKERRQ(ierr);
  ierr = MatPartitioningSetAdjacency(part,smat[0]);CHKERRQ(ierr);
#if PETSC_HAVE_PARMETIS
  /* if there exists a ParMETIS installation, we try to use ParMETIS
   * because a repartition routine possibly work better
   * */
  ierr = MatPartitioningSetType(part,MATPARTITIONINGPARMETIS);CHKERRQ(ierr);
  /*try to use reparition function, instead of partition function */
  ierr = MatPartitioningParmetisSetRepartition(part);CHKERRQ(ierr);
#else
  /*we at least provide a default partitioner to rebalance the computation  */
  ierr = MatPartitioningSetType(part,MATPARTITIONINGAVERAGE);CHKERRQ(ierr);
#endif
  /*user can pick up any partitioner by using an option*/
  ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr);
  /* apply partition */
  ierr = MatPartitioningApply(part,&partitioning);CHKERRQ(ierr);
  ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr);
  ierr = MatDestroy(&(smat[0]));CHKERRQ(ierr);
  ierr = PetscFree(smat);CHKERRQ(ierr);
  /* get local rows including  overlap */
  ierr = ISBuildTwoSided(partitioning,is_sc,is);CHKERRQ(ierr);
  /* destroy */
  ierr = ISDestroy(&is_sc);CHKERRQ(ierr);
  ierr = ISDestroy(&partitioning);CHKERRQ(ierr);
  ierr = PetscCommDestroy(&scomm);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 29
0
/*@C
  TaoVecGetSubVec - Gets a subvector using the IS

  Input Parameters:
+ vfull - the full matrix
. is - the index set for the subvector
. reduced_type - the method TAO is using for subsetting (TAO_SUBSET_SUBVEC, TAO_SUBSET_MASK,  TAO_SUBSET_MATRIXFREE)
- maskvalue - the value to set the unused vector elements to (for TAO_SUBSET_MASK or TAO_SUBSET_MATRIXFREE)

  Output Parameters:
. vreduced - the subvector

  Notes:
  maskvalue should usually be 0.0, unless a pointwise divide will be used.

@*/
PetscErrorCode TaoVecGetSubVec(Vec vfull, IS is, TaoSubsetType reduced_type, PetscReal maskvalue, Vec *vreduced)
{
  PetscErrorCode ierr;
  PetscInt       nfull,nreduced,nreduced_local,rlow,rhigh,flow,fhigh;
  PetscInt       i,nlocal;
  PetscReal      *fv,*rv;
  const PetscInt *s;
  IS             ident;
  VecType        vtype;
  VecScatter     scatter;
  MPI_Comm       comm;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(vfull,VEC_CLASSID,1);
  PetscValidHeaderSpecific(is,IS_CLASSID,2);

  ierr = VecGetSize(vfull, &nfull);CHKERRQ(ierr);
  ierr = ISGetSize(is, &nreduced);CHKERRQ(ierr);

  if (nreduced == nfull) {
    ierr = VecDestroy(vreduced);CHKERRQ(ierr);
    ierr = VecDuplicate(vfull,vreduced);CHKERRQ(ierr);
    ierr = VecCopy(vfull,*vreduced);CHKERRQ(ierr);
  } else {
    switch (reduced_type) {
    case TAO_SUBSET_SUBVEC:
      ierr = VecGetType(vfull,&vtype);CHKERRQ(ierr);
      ierr = VecGetOwnershipRange(vfull,&flow,&fhigh);CHKERRQ(ierr);
      ierr = ISGetLocalSize(is,&nreduced_local);CHKERRQ(ierr);
      ierr = PetscObjectGetComm((PetscObject)vfull,&comm);CHKERRQ(ierr);
      if (*vreduced) {
        ierr = VecDestroy(vreduced);CHKERRQ(ierr);
      }
      ierr = VecCreate(comm,vreduced);CHKERRQ(ierr);
      ierr = VecSetType(*vreduced,vtype);CHKERRQ(ierr);

      ierr = VecSetSizes(*vreduced,nreduced_local,nreduced);CHKERRQ(ierr);
      ierr = VecGetOwnershipRange(*vreduced,&rlow,&rhigh);CHKERRQ(ierr);
      ierr = ISCreateStride(comm,nreduced_local,rlow,1,&ident);CHKERRQ(ierr);
      ierr = VecScatterCreate(vfull,is,*vreduced,ident,&scatter);CHKERRQ(ierr);
      ierr = VecScatterBegin(scatter,vfull,*vreduced,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
      ierr = VecScatterEnd(scatter,vfull,*vreduced,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
      ierr = VecScatterDestroy(&scatter);CHKERRQ(ierr);
      ierr = ISDestroy(&ident);CHKERRQ(ierr);
      break;

    case TAO_SUBSET_MASK:
    case TAO_SUBSET_MATRIXFREE:
      /* vr[i] = vf[i]   if i in is
       vr[i] = 0       otherwise */
      if (!*vreduced) {
        ierr = VecDuplicate(vfull,vreduced);CHKERRQ(ierr);
      }

      ierr = VecSet(*vreduced,maskvalue);CHKERRQ(ierr);
      ierr = ISGetLocalSize(is,&nlocal);CHKERRQ(ierr);
      ierr = VecGetOwnershipRange(vfull,&flow,&fhigh);CHKERRQ(ierr);
      ierr = VecGetArray(vfull,&fv);CHKERRQ(ierr);
      ierr = VecGetArray(*vreduced,&rv);CHKERRQ(ierr);
      ierr = ISGetIndices(is,&s);CHKERRQ(ierr);
      if (nlocal > (fhigh-flow)) SETERRQ2(PETSC_COMM_WORLD,1,"IS local size %d > Vec local size %d",nlocal,fhigh-flow);
      for (i=0;i<nlocal;i++) {
        rv[s[i]-flow] = fv[s[i]-flow];
      }
      ierr = ISRestoreIndices(is,&s);CHKERRQ(ierr);
      ierr = VecRestoreArray(vfull,&fv);CHKERRQ(ierr);
      ierr = VecRestoreArray(*vreduced,&rv);CHKERRQ(ierr);
      break;
    }
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 30
0
/*
     DMCoarsen_SNESVI - Computes the regular coarsened DM then computes additional information about its inactive set

*/
PetscErrorCode  DMCoarsen_SNESVI(DM dm1,MPI_Comm comm,DM *dm2)
{
  PetscErrorCode          ierr;
  PetscContainer          isnes;
  DM_SNESVI               *dmsnesvi1;
  Vec                     finemarked,coarsemarked;
  IS                      inactive;
  VecScatter              inject;
  const PetscInt          *index;
  PetscInt                n,k,cnt = 0,rstart,*coarseindex;
  PetscScalar             *marked;

  PetscFunctionBegin;
  ierr = PetscObjectQuery((PetscObject)dm1,"VI",(PetscObject *)&isnes);CHKERRQ(ierr);
  if (!isnes) SETERRQ(((PetscObject)dm1)->comm,PETSC_ERR_PLIB,"Composed VI data structure is missing");
  ierr = PetscContainerGetPointer(isnes,(void**)&dmsnesvi1);CHKERRQ(ierr);

  /* get the original coarsen */
  ierr = (*dmsnesvi1->coarsen)(dm1,comm,dm2);CHKERRQ(ierr);

  /* not sure why this extra reference is needed, but without the dm2 disappears too early */
  ierr = PetscObjectReference((PetscObject)*dm2);CHKERRQ(ierr);

  /* need to set back global vectors in order to use the original injection */
  ierr = DMClearGlobalVectors(dm1);CHKERRQ(ierr);
  dm1->ops->createglobalvector = dmsnesvi1->createglobalvector;
  ierr = DMCreateGlobalVector(dm1,&finemarked);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(*dm2,&coarsemarked);CHKERRQ(ierr);

  /*
     fill finemarked with locations of inactive points
  */
  ierr = ISGetIndices(dmsnesvi1->inactive,&index);CHKERRQ(ierr);
  ierr = ISGetLocalSize(dmsnesvi1->inactive,&n);CHKERRQ(ierr);
  ierr = VecSet(finemarked,0.0);CHKERRQ(ierr);
  for (k=0;k<n;k++){
      ierr = VecSetValue(finemarked,index[k],1.0,INSERT_VALUES);CHKERRQ(ierr);
  }
  ierr = VecAssemblyBegin(finemarked);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(finemarked);CHKERRQ(ierr);

  ierr = DMCreateInjection(*dm2,dm1,&inject);CHKERRQ(ierr);
  ierr = VecScatterBegin(inject,finemarked,coarsemarked,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd(inject,finemarked,coarsemarked,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&inject);CHKERRQ(ierr);

  /*
     create index set list of coarse inactive points from coarsemarked
  */
  ierr = VecGetLocalSize(coarsemarked,&n);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(coarsemarked,&rstart,PETSC_NULL);CHKERRQ(ierr);
  ierr = VecGetArray(coarsemarked,&marked);CHKERRQ(ierr);
  for (k=0; k<n; k++) {
    if (marked[k] != 0.0) cnt++;
  }
  ierr = PetscMalloc(cnt*sizeof(PetscInt),&coarseindex);CHKERRQ(ierr);
  cnt  = 0;
  for (k=0; k<n; k++) {
    if (marked[k] != 0.0) coarseindex[cnt++] = k + rstart;
  }
  ierr = VecRestoreArray(coarsemarked,&marked);CHKERRQ(ierr);
  ierr = ISCreateGeneral(((PetscObject)coarsemarked)->comm,cnt,coarseindex,PETSC_OWN_POINTER,&inactive);CHKERRQ(ierr);

  ierr = DMClearGlobalVectors(dm1);CHKERRQ(ierr);
  dm1->ops->createglobalvector = DMCreateGlobalVector_SNESVI;
  ierr = DMSetVI(*dm2,inactive);CHKERRQ(ierr);

  ierr = VecDestroy(&finemarked);CHKERRQ(ierr);
  ierr = VecDestroy(&coarsemarked);CHKERRQ(ierr);
  ierr = ISDestroy(&inactive);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}