Пример #1
0
PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
{
  PetscErrorCode ierr;
  PetscInt       n,bs;
  Mat_IS         *is = (Mat_IS*)A->data;
  IS             from,to;
  Vec            global;

  PetscFunctionBegin;
  PetscCheckSameComm(A,1,rmapping,2);
  if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical");
  if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */
    ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
    ierr = VecDestroy(&is->x);CHKERRQ(ierr);
    ierr = VecDestroy(&is->y);CHKERRQ(ierr);
    ierr = VecScatterDestroy(&is->ctx);CHKERRQ(ierr);
    ierr = MatDestroy(&is->A);CHKERRQ(ierr);
  }
  ierr = PetscObjectReference((PetscObject)rmapping);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
  is->mapping = rmapping;
/*
  ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
  ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
*/

  /* Create the local matrix A */
  ierr = ISLocalToGlobalMappingGetSize(rmapping,&n);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);CHKERRQ(ierr);
  ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
  if (bs > 1) {
    ierr = MatSetType(is->A,MATSEQBAIJ);CHKERRQ(ierr);
  } else {
    ierr = MatSetType(is->A,MATSEQAIJ);CHKERRQ(ierr);
  }
  ierr = MatSetSizes(is->A,n,n,n,n);CHKERRQ(ierr);
  ierr = MatSetBlockSize(is->A,bs);CHKERRQ(ierr);
  ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
  ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
  ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);

  /* Create the local work vectors */
  ierr = VecCreate(PETSC_COMM_SELF,&is->x);CHKERRQ(ierr);
  ierr = VecSetBlockSize(is->x,bs);CHKERRQ(ierr);
  ierr = VecSetSizes(is->x,n,n);CHKERRQ(ierr);
  ierr = VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);CHKERRQ(ierr);
  ierr = VecAppendOptionsPrefix(is->x,"is_");CHKERRQ(ierr);
  ierr = VecSetFromOptions(is->x);CHKERRQ(ierr);
  ierr = VecDuplicate(is->x,&is->y);CHKERRQ(ierr);

  /* setup the global to local scatter */
  ierr = ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
  ierr = MatCreateVecs(A,&global,NULL);CHKERRQ(ierr);
  ierr = VecScatterCreate(global,from,is->x,to,&is->ctx);CHKERRQ(ierr);
  ierr = VecDestroy(&global);CHKERRQ(ierr);
  ierr = ISDestroy(&to);CHKERRQ(ierr);
  ierr = ISDestroy(&from);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #2
0
int main(int argc,char **argv)
{
  PetscErrorCode         ierr;
  PetscMPIInt            size,rank;
  PetscInt               nlocal,local[5],nneigh,*neigh,**ineigh,*numneigh;
  ISLocalToGlobalMapping mapping;


  ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr);
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
  if (size != 3) SETERRQ(PETSC_COMM_SELF,1,"Must run with three processors");
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
  if (!rank) {
    nlocal = 4; local[0] = 0; local[1] = 3; local[2] = 2; local[3] = 1;
  } else if (rank == 1) {
    nlocal = 4; local[0] = 3; local[1] = 5; local[2] = 4; local[3] = 2;
  } else {
    nlocal = 4; local[0] = 7; local[1] = 6; local[2] = 5; local[3] = 3;
  }
  ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,nlocal,local,PETSC_COPY_VALUES,&mapping);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingGetInfo(mapping,&nneigh,&neigh,&numneigh,&ineigh);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingRestoreInfo(mapping,&nneigh,&neigh,&numneigh,&ineigh);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&mapping);CHKERRQ(ierr);
  ierr = PetscFinalize();
  return 0;
}
Пример #3
0
/*@
   ISEmbed   -   embed IS a into IS b by finding the locations in b that have the same indices as in a.
                 If c is the IS of these locations, we have a = b*c, regarded as a composition of the
                 corresponding ISLocalToGlobalMaps.

  Not collective.

  Input arguments:
+ a    -  IS to embed
. b    -  IS to embed into
- drop -  flag indicating whether to drop a's indices that are not in b.

  Output arguments:
. c    -  local embedding indices

  Note:
  If some of a's global indices are not among b's indices the embedding is impossible.  The local indices of a
  corresponding to these global indices are either mapped to -1 (if !drop) or are omitted (if drop).  In the former
  case the size of c is that same as that of a, in the latter case c's size may be smaller.

  The resulting IS is sequential, since the index substition it encodes is purely local.

  Level: advanced

.seealso ISLocalToGlobalMapping
 @*/
PetscErrorCode ISEmbed(IS a, IS b, PetscBool drop, IS *c)
{
  PetscErrorCode             ierr;
  ISLocalToGlobalMapping     ltog;
  ISGlobalToLocalMappingType gtoltype = IS_GTOLM_DROP;
  PetscInt                   alen, clen, *cindices, *cindices2;
  const PetscInt             *aindices;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(a, IS_CLASSID, 1);
  PetscValidHeaderSpecific(b, IS_CLASSID, 2);
  PetscValidPointer(c,4);
  ierr = ISLocalToGlobalMappingCreateIS(b, &ltog);CHKERRQ(ierr);
  ierr = ISGetLocalSize(a, &alen);CHKERRQ(ierr);
  ierr = ISGetIndices(a, &aindices);CHKERRQ(ierr);
  ierr = PetscMalloc1(alen, &cindices);CHKERRQ(ierr);
  if (!drop) gtoltype = IS_GTOLM_MASK;
  ierr = ISGlobalToLocalMappingApply(ltog,gtoltype,alen,aindices,&clen,cindices);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
  if (clen != alen) {
    cindices2 = cindices;
    ierr      = PetscMalloc1(clen, &cindices);CHKERRQ(ierr);
    ierr      = PetscMemcpy(cindices,cindices2,clen*sizeof(PetscInt));CHKERRQ(ierr);
    ierr      = PetscFree(cindices2);CHKERRQ(ierr);
  }
  ierr = ISCreateGeneral(PETSC_COMM_SELF,clen,cindices,PETSC_OWN_POINTER,c);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #4
0
/*
   DMDestroy_Private - handles the work vectors created by DMGetGlobalVector() and DMGetLocalVector()

*/
PetscErrorCode  DMDestroy_Private(DM dm,PetscBool  *done)
{
  PetscErrorCode ierr;
  PetscErrorCode i,cnt = 0;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(dm,DM_CLASSID,1);
  *done = PETSC_FALSE;

  for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
    if (dm->localin[i])  cnt++;
    if (dm->globalin[i]) cnt++;
  }

  if (--((PetscObject)dm)->refct - cnt > 0) PetscFunctionReturn(0);

  /*
         Need this test because the dm references the vectors that
     reference the dm, so destroying the dm calls destroy on the
     vectors that cause another destroy on the dm
  */
  if (((PetscObject)dm)->refct < 0) PetscFunctionReturn(0);
  ((PetscObject)dm)->refct = 0;

  for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
    if (dm->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
    ierr = VecDestroy(&dm->localin[i]);CHKERRQ(ierr);
    if (dm->globalout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a global vector obtained with DMGetGlobalVector()");
    ierr = VecDestroy(&dm->globalin[i]);CHKERRQ(ierr);
  }
  ierr = ISLocalToGlobalMappingDestroy(&dm->ltogmap);CHKERRQ(ierr);

  *done = PETSC_TRUE;
  PetscFunctionReturn(0);
}
Пример #5
0
PetscErrorCode  PetscLayoutDestroy(PetscLayout *map)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  if (!*map) PetscFunctionReturn(0);
  if (!(*map)->refcnt--) {
    ierr = PetscFree((*map)->range);CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingDestroy(&(*map)->mapping);CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingDestroy(&(*map)->bmapping);CHKERRQ(ierr);
#if defined(PETSC_THREADCOMM_ACTIVE)
    ierr = PetscFree((*map)->trstarts);CHKERRQ(ierr);
#endif

    ierr = PetscFree((*map));CHKERRQ(ierr);
  }
  *map = NULL;
  PetscFunctionReturn(0);
}
Пример #6
0
/*@
   VecMPISetGhost - Sets the ghost points for an MPI ghost vector

   Collective on Vec

   Input Parameters:
+  vv - the MPI vector
.  nghost - number of local ghost points
-  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)


   Notes:
   Use VecGhostGetLocalForm() to access the local, ghosted representation
   of the vector.

   This also automatically sets the ISLocalToGlobalMapping() for this vector.

   You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).

   Level: advanced

   Concepts: vectors^ghosted

.seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
          VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
          VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
          VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

@*/
PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
{
  PetscErrorCode ierr;
  PetscBool      flg;

  PetscFunctionBegin;
  ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
  /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
  if (flg) {
    PetscInt               n,N;
    Vec_MPI                *w;
    PetscScalar            *larray;
    IS                     from,to;
    ISLocalToGlobalMapping ltog;
    PetscInt               rstart,i,*indices;
    MPI_Comm               comm = ((PetscObject)vv)->comm;

    n = vv->map->n;
    N = vv->map->N;
    ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
    ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
    ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,PETSC_NULL);CHKERRQ(ierr);
    w    = (Vec_MPI *)(vv)->data;
    /* Create local representation */
    ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
    ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
    ierr = PetscLogObjectParent(vv,w->localrep);CHKERRQ(ierr);
    ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);

    /*
     Create scatter context for scattering (updating) ghost values
     */
    ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
    ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
    ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
    ierr = PetscLogObjectParent(vv,w->localupdate);CHKERRQ(ierr);
    ierr = ISDestroy(&to);CHKERRQ(ierr);
    ierr = ISDestroy(&from);CHKERRQ(ierr);

    /* set local to global mapping for ghosted vector */
    ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
    ierr = VecGetOwnershipRange(vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
    for (i=0; i<n; i++) {
      indices[i] = rstart + i;
    }
    for (i=0; i<nghost; i++) {
      indices[n+i] = ghosts[i];
    }
    ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
    ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
  } else if (vv->ops->create == VecCreate_MPI) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
  else if (!((PetscObject)vv)->type_name) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
  PetscFunctionReturn(0);
}
Пример #7
0
/*
   PCISDestroy -
*/
PetscErrorCode  PCISDestroy(PC pc)
{
  PC_IS          *pcis = (PC_IS*)(pc->data);
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = ISDestroy(&pcis->is_B_local);CHKERRQ(ierr);
  ierr = ISDestroy(&pcis->is_I_local);CHKERRQ(ierr);
  ierr = ISDestroy(&pcis->is_B_global);CHKERRQ(ierr);
  ierr = ISDestroy(&pcis->is_I_global);CHKERRQ(ierr);
  ierr = MatDestroy(&pcis->A_II);CHKERRQ(ierr);
  ierr = MatDestroy(&pcis->A_IB);CHKERRQ(ierr);
  ierr = MatDestroy(&pcis->A_BI);CHKERRQ(ierr);
  ierr = MatDestroy(&pcis->A_BB);CHKERRQ(ierr);
  ierr = VecDestroy(&pcis->D);CHKERRQ(ierr);
  ierr = KSPDestroy(&pcis->ksp_N);CHKERRQ(ierr);
  ierr = KSPDestroy(&pcis->ksp_D);CHKERRQ(ierr);
  ierr = VecDestroy(&pcis->vec1_N);CHKERRQ(ierr);
  ierr = VecDestroy(&pcis->vec2_N);CHKERRQ(ierr);
  ierr = VecDestroy(&pcis->vec1_D);CHKERRQ(ierr);
  ierr = VecDestroy(&pcis->vec2_D);CHKERRQ(ierr);
  ierr = VecDestroy(&pcis->vec3_D);CHKERRQ(ierr);
  ierr = VecDestroy(&pcis->vec4_D);CHKERRQ(ierr);
  ierr = VecDestroy(&pcis->vec1_B);CHKERRQ(ierr);
  ierr = VecDestroy(&pcis->vec2_B);CHKERRQ(ierr);
  ierr = VecDestroy(&pcis->vec3_B);CHKERRQ(ierr);
  ierr = VecDestroy(&pcis->vec1_global);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&pcis->global_to_D);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&pcis->N_to_B);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&pcis->N_to_D);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&pcis->global_to_B);CHKERRQ(ierr);
  ierr = PetscFree(pcis->work_N);CHKERRQ(ierr);
  if (pcis->n_neigh > -1) {
    ierr = ISLocalToGlobalMappingRestoreInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);
  }
  ierr = ISLocalToGlobalMappingDestroy(&pcis->mapping);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&pcis->BtoNmap);CHKERRQ(ierr);
  ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetUseStiffnessScaling_C",NULL);CHKERRQ(ierr);
  ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainScalingFactor_C",NULL);CHKERRQ(ierr);
  ierr = PetscObjectComposeFunction((PetscObject)pc,"PCISSetSubdomainDiagonalScaling_C",NULL);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #8
0
/*@C

    PetscLayoutSetISLocalToGlobalMappingBlock - sets a ISLocalGlobalMapping into a PetscLayout

     Collective on PetscLayout

    Input Parameter:
+     in - input PetscLayout
-     ltog - the local to global block mapping


   Level: developer

    Notes: PetscLayoutSetUp() does not need to be called on the resulting PetscLayout

    If the ltog location already contains a PetscLayout it is destroyed

.seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate(), PetscLayoutSetLocalToGlobalMappingBlock()

@*/
PetscErrorCode  PetscLayoutSetISLocalToGlobalMappingBlock(PetscLayout in,ISLocalToGlobalMapping ltog)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = PetscObjectReference((PetscObject)ltog);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&in->bmapping);CHKERRQ(ierr);

  in->bmapping = ltog;
  PetscFunctionReturn(0);
}
Пример #9
0
/*@C
   VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
   the caller allocates the array space. Indices in the ghost region are based on blocks.

   Collective on MPI_Comm

   Input Parameters:
+  comm - the MPI communicator to use
.  bs - block size
.  n - local vector length
.  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
.  nghost - number of local ghost blocks
.  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
-  array - the space to store the vector values (as long as n + nghost*bs)

   Output Parameter:
.  vv - the global vector representation (without ghost points as part of vector)

   Notes:
   Use VecGhostGetLocalForm() to access the local, ghosted representation
   of the vector.

   n is the local vector size (total local size not the number of blocks) while nghost
   is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
   portion is bs*nghost

   Level: advanced

   Concepts: vectors^creating ghosted
   Concepts: vectors^creating with array

.seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
          VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
          VecCreateGhostWithArray(), VecCreateGhostBlock()

@*/
PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
{
  PetscErrorCode         ierr;
  Vec_MPI                *w;
  PetscScalar            *larray;
  IS                     from,to;
  ISLocalToGlobalMapping ltog;
  PetscInt               rstart,i,nb,*indices;

  PetscFunctionBegin;
  *vv = 0;

  if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
  if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
  if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
  if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
  ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
  /* Create global representation */
  ierr = VecCreate(comm,vv);CHKERRQ(ierr);
  ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
  ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
  ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
  w    = (Vec_MPI *)(*vv)->data;
  /* Create local representation */
  ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
  ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
  ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
  ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);

  /*
       Create scatter context for scattering (updating) ghost values
  */
  ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
  ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
  ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
  ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
  ierr = ISDestroy(&to);CHKERRQ(ierr);
  ierr = ISDestroy(&from);CHKERRQ(ierr);

  /* set local to global mapping for ghosted vector */
  nb = n/bs;
  ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
  for (i=0; i<nb; i++) {
    indices[i] = rstart + i*bs;
  }
  for (i=0; i<nghost; i++) {
    indices[nb+i] = ghosts[i];
  }
  ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
  ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #10
0
/*@
  PetscLayoutSetISLocalToGlobalMapping - sets a ISLocalGlobalMapping into a PetscLayout

  Collective on PetscLayout

  Input Parameter:
+ in - input PetscLayout
- ltog - the local to global mapping


  Level: developer

  Notes: PetscLayoutSetUp() does not need to be called on the resulting PetscLayout

  If the ltog location already contains a PetscLayout it is destroyed

.seealso: PetscLayoutCreate(), PetscLayoutDestroy(), PetscLayoutSetUp(), PetscLayoutDuplicate()
@*/
PetscErrorCode PetscLayoutSetISLocalToGlobalMapping(PetscLayout in,ISLocalToGlobalMapping ltog)
{
  PetscErrorCode ierr;
  PetscInt       bs;

  PetscFunctionBegin;
  ierr = ISLocalToGlobalMappingGetBlockSize(ltog,&bs);CHKERRQ(ierr);
  if (in->bs > 0 && in->bs != bs) SETERRQ2(in->comm,PETSC_ERR_PLIB,"Blocksize of layout %D must match that of mapping %D",in->bs,bs);
  ierr = PetscObjectReference((PetscObject)ltog);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&in->mapping);CHKERRQ(ierr);
  in->mapping = ltog;
  PetscFunctionReturn(0);
}
Пример #11
0
int main(int argc,char **argv)
{
  PetscErrorCode         ierr;
  PetscInt               indices[] = {0,1,2,3,-1,-1,-1,-1,4,5,6,7};
  PetscInt               indices2[] = {0,1,2,3,4,5,-1,-1,-1,-1,-1,-1,6,7,8,9,10,11};
  ISLocalToGlobalMapping map;

  ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
  ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,1,12,indices,PETSC_COPY_VALUES,&map);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingSetBlockSize(map,2);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingSetBlockSize(map,4);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingSetBlockSize(map,2);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingSetBlockSize(map,1);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&map);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,1,18,indices2,PETSC_COPY_VALUES,&map);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingSetBlockSize(map,3);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingSetBlockSize(map,6);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingSetBlockSize(map,3);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingSetBlockSize(map,1);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&map);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,5,2,indices2,PETSC_COPY_VALUES,&map);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingSetBlockSize(map,2);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(map,NULL);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&map);CHKERRQ(ierr);
  ierr = PetscFinalize();
  return ierr;
}
Пример #12
0
/*@
  PetscLayoutDestroy - Frees a map object and frees its range if that exists.

  Collective on MPI_Comm

  Input Parameters:
. map - the PetscLayout

  Level: developer

  Note:
  The PetscLayout object and methods are intended to be used in the PETSc Vec and Mat implementions; it is
  recommended they not be used in user codes unless you really gain something in their use.

.seealso: PetscLayoutSetLocalSize(), PetscLayoutSetSize(), PetscLayoutGetSize(), PetscLayoutGetLocalSize(), PetscLayout, PetscLayoutCreate(),
          PetscLayoutGetRange(), PetscLayoutGetRanges(), PetscLayoutSetBlockSize(), PetscLayoutGetBlockSize(), PetscLayoutSetUp()

@*/
PetscErrorCode PetscLayoutDestroy(PetscLayout *map)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  if (!*map) PetscFunctionReturn(0);
  if (!(*map)->refcnt--) {
    ierr = PetscFree((*map)->range);CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingDestroy(&(*map)->mapping);CHKERRQ(ierr);
    ierr = PetscFree((*map));CHKERRQ(ierr);
  }
  *map = NULL;
  PetscFunctionReturn(0);
}
Пример #13
0
static PetscErrorCode DMCreateLocalToGlobalMapping_Composite(DM dm)
{
  DM_Composite           *com = (DM_Composite*)dm->data;
  ISLocalToGlobalMapping *ltogs;
  PetscInt               i;
  PetscErrorCode         ierr;

  PetscFunctionBegin;
  /* Set the ISLocalToGlobalMapping on the new matrix */
  ierr = DMCompositeGetISLocalToGlobalMappings(dm,&ltogs);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingConcatenate(((PetscObject)dm)->comm,com->nDM,ltogs,&dm->ltogmap);CHKERRQ(ierr);
  for (i=0; i<com->nDM; i++) {ierr = ISLocalToGlobalMappingDestroy(&ltogs[i]);CHKERRQ(ierr);}
  ierr = PetscFree(ltogs);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #14
0
PetscErrorCode MatDestroy_IS(Mat A)
{
  PetscErrorCode ierr;
  Mat_IS         *b = (Mat_IS*)A->data;

  PetscFunctionBegin;
  ierr = MatDestroy(&b->A);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&b->ctx);CHKERRQ(ierr);
  ierr = VecDestroy(&b->x);CHKERRQ(ierr);
  ierr = VecDestroy(&b->y);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&b->mapping);CHKERRQ(ierr);
  ierr = PetscFree(A->data);CHKERRQ(ierr);
  ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
  ierr = PetscObjectComposeFunction((PetscObject)A,"MatISGetLocalMat_C",NULL);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #15
0
Файл: ex4.c Проект: Kun-Qu/petsc
int main(int argc,char **argv)
{
  PetscErrorCode         ierr;
  PetscInt               i,n = 4,indices[] = {0,3,9,12},m = 2,input[] = {0,2};
  PetscInt               output[2],inglobals[13],outlocals[13];
  ISLocalToGlobalMapping mapping;

  ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr);

  /*
      Create a local to global mapping. Each processor independently
     creates a mapping  
  */
  ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,n,indices,PETSC_COPY_VALUES,&mapping);CHKERRQ(ierr);

  /*
     Map a set of local indices to their global values 
  */
  ierr = ISLocalToGlobalMappingApply(mapping,m,input,output);CHKERRQ(ierr);
  ierr = PetscIntView(m,output,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
  
  /*
     Map some global indices to local, retaining the ones without a local index by -1
  */
  for (i=0; i<13; i++) {
    inglobals[i] = i;
  }
  ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_MASK,13,inglobals,PETSC_NULL,outlocals);CHKERRQ(ierr);
  ierr = PetscIntView(13,outlocals,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);

  /*
     Map some global indices to local, dropping the ones without a local index.
  */
  ierr = ISGlobalToLocalMappingApply(mapping,IS_GTOLM_DROP,13,inglobals,&m,outlocals);CHKERRQ(ierr);
  ierr = PetscIntView(m,outlocals,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);

  /*
     Free the space used by the local to global mapping
  */
  ierr = ISLocalToGlobalMappingDestroy(&mapping);CHKERRQ(ierr);


  ierr = PetscFinalize();
  return 0;
}
Пример #16
0
PetscErrorCode PCBDDCGraphReset(PCBDDCGraph graph)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  if (!graph) PetscFunctionReturn(0);
  ierr = ISLocalToGlobalMappingDestroy(&graph->l2gmap);CHKERRQ(ierr);
  ierr = PetscFree(graph->subset_ncc);CHKERRQ(ierr);
  ierr = PetscFree(graph->subset_ref_node);CHKERRQ(ierr);
  if (graph->nvtxs) {
    ierr = PetscFree(graph->neighbours_set[0]);CHKERRQ(ierr);
  }
  ierr = PetscBTDestroy(&graph->touched);CHKERRQ(ierr);
  ierr = PetscFree5(graph->count,
                    graph->neighbours_set,
                    graph->subset,
                    graph->which_dof,
                    graph->special_dof);CHKERRQ(ierr);
  ierr = PetscFree2(graph->cptr,graph->queue);CHKERRQ(ierr);
  if (graph->mirrors) {
    ierr = PetscFree(graph->mirrors_set[0]);CHKERRQ(ierr);
  }
  ierr = PetscFree2(graph->mirrors,graph->mirrors_set);CHKERRQ(ierr);
  if (graph->subset_idxs) {
    ierr = PetscFree(graph->subset_idxs[0]);CHKERRQ(ierr);
  }
  ierr = PetscFree2(graph->subset_size,graph->subset_idxs);CHKERRQ(ierr);
  ierr = ISDestroy(&graph->dirdofs);CHKERRQ(ierr);
  ierr = ISDestroy(&graph->dirdofsB);CHKERRQ(ierr);
  if (graph->n_local_subs) {
    ierr = PetscFree(graph->local_subs);CHKERRQ(ierr);
  }
  graph->has_dirichlet       = PETSC_FALSE;
  graph->twodimset           = PETSC_FALSE;
  graph->twodim              = PETSC_FALSE;
  graph->nvtxs               = 0;
  graph->nvtxs_global        = 0;
  graph->n_subsets           = 0;
  graph->custom_minimal_size = 1;
  graph->n_local_subs        = 0;
  graph->maxcount            = PETSC_MAX_INT;
  graph->setupcalled         = PETSC_FALSE;
  PetscFunctionReturn(0);
}
Пример #17
0
static PetscErrorCode MatPartitioningApply_Hierarchical(MatPartitioning part,IS *partitioning)
{
  MatPartitioning_Hierarchical *hpart  = (MatPartitioning_Hierarchical*)part->data;
  const PetscInt               *fineparts_indices, *coarseparts_indices;
  PetscInt                     *parts_indices,i,j,mat_localsize;
  Mat                           mat    = part->adj,adj,sadj;
  PetscBool                     flg;
  PetscInt                      bs     = 1;
  MatPartitioning               finePart, coarsePart;
  PetscInt                     *coarse_vertex_weights = 0;
  PetscMPIInt                   size,rank;
  MPI_Comm                      comm,scomm;
  IS                            destination,fineparts_temp;
  ISLocalToGlobalMapping        mapping;
  PetscErrorCode                ierr;

  PetscFunctionBegin;
  ierr = PetscObjectGetComm((PetscObject)part,&comm);CHKERRQ(ierr);
  ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
  ierr = PetscObjectTypeCompare((PetscObject)mat,MATMPIADJ,&flg);CHKERRQ(ierr);
  if (flg) {
    adj = mat;
    ierr = PetscObjectReference((PetscObject)adj);CHKERRQ(ierr);
  }else {
    /* bs indicates if the converted matrix is "reduced" from the original and hence the
       resulting partition results need to be stretched to match the original matrix */
   ierr = MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&adj);CHKERRQ(ierr);
   if (adj->rmap->n > 0) bs = mat->rmap->n/adj->rmap->n;
  }
  /*local size of mat*/
  mat_localsize = adj->rmap->n;
  /* check parameters */
  /* how many small subdomains we want from a given 'big' suddomain */
  if(!hpart->Nfineparts) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG," must set number of small subdomains for each big subdomain \n");
  if(!hpart->Ncoarseparts && !part->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE," did not either set number of coarse parts or total number of parts \n");
  if(part->n && part->n%hpart->Nfineparts!=0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,
		   " total number of parts %D can not be divided by number of fine parts %D\n",part->n,hpart->Nfineparts);
  if(part->n){
    hpart->Ncoarseparts = part->n/hpart->Nfineparts;
  }else{
	part->n = hpart->Ncoarseparts*hpart->Nfineparts;
  }
   /* we do not support this case currently, but this restriction should be
     * removed in the further
     * */
  if(hpart->Ncoarseparts>size) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_SUP," we do not support number of coarse parts %D > size %D \n",hpart->Ncoarseparts,size);
  /*create a coarse partitioner */
  ierr = MatPartitioningCreate(comm,&coarsePart);CHKERRQ(ierr);
    /*if did not set partitioning type yet, use parmetis by default */
  if(!hpart->coarseparttype){
	ierr = MatPartitioningSetType(coarsePart,MATPARTITIONINGPARMETIS);CHKERRQ(ierr);
  }else{
	ierr = MatPartitioningSetType(coarsePart,hpart->coarseparttype);CHKERRQ(ierr);
  }
  ierr = MatPartitioningSetAdjacency(coarsePart,adj);CHKERRQ(ierr);
  ierr = MatPartitioningSetNParts(coarsePart, hpart->Ncoarseparts);CHKERRQ(ierr);
  /*copy over vertex weights */
  if(part->vertex_weights){
   ierr = PetscMalloc(sizeof(PetscInt)*mat_localsize,&coarse_vertex_weights);CHKERRQ(ierr);
   ierr = PetscMemcpy(coarse_vertex_weights,part->vertex_weights,sizeof(PetscInt)*mat_localsize);CHKERRQ(ierr);
   ierr = MatPartitioningSetVertexWeights(coarsePart,coarse_vertex_weights);CHKERRQ(ierr);
  }
   /*It looks nontrivial to support part weights */
  /*if(part->part_weights){
	ierr = PetscMalloc(sizeof(part->part_weights)*1,&coarse_partition_weights);CHKERRQ(ierr);
	ierr = PetscMemcpy(coarse_partition_weights,part->part_weights,sizeof(part->part_weights)*1);CHKERRQ(ierr);
	ierr = MatPartitioningSetPartitionWeights(coarsePart,coarse_partition_weights);CHKERRQ(ierr);
  }*/
  ierr = MatPartitioningApply(coarsePart,&hpart->coarseparts);CHKERRQ(ierr);
  ierr = MatPartitioningDestroy(&coarsePart);CHKERRQ(ierr);
  /* In the current implementation, destination should be the same as hpart->coarseparts,
   * and this interface is preserved to deal with the case hpart->coarseparts>size in the
   * future.
   * */
  ierr = MatPartitioningHierarchical_DetermineDestination(part,hpart->coarseparts,0,hpart->Ncoarseparts,&destination);CHKERRQ(ierr);
  /* create a sub-matrix*/
  ierr = MatPartitioningHierarchical_AssembleSubdomain(adj,destination,&sadj,&mapping);CHKERRQ(ierr);
  ierr = ISDestroy(&destination);CHKERRQ(ierr);
  ierr = PetscObjectGetComm((PetscObject)sadj,&scomm);CHKERRQ(ierr);
  /*create a fine partitioner */
  ierr = MatPartitioningCreate(scomm,&finePart);CHKERRQ(ierr);
  /*if do not set partitioning type, use parmetis by default */
  if(!hpart->fineparttype){
    ierr = MatPartitioningSetType(finePart,MATPARTITIONINGPARMETIS);CHKERRQ(ierr);
  }else{
    ierr = MatPartitioningSetType(finePart,hpart->fineparttype);CHKERRQ(ierr);
  }
  ierr = MatPartitioningSetAdjacency(finePart,sadj);CHKERRQ(ierr);
  ierr = MatPartitioningSetNParts(finePart, hpart->Nfineparts);CHKERRQ(ierr);
  ierr = MatPartitioningApply(finePart,&fineparts_temp);CHKERRQ(ierr);
  ierr = MatDestroy(&sadj);CHKERRQ(ierr);
  ierr = MatPartitioningDestroy(&finePart);CHKERRQ(ierr);
  ierr = MatPartitioningHierarchical_ReassembleFineparts(adj,fineparts_temp,mapping,&hpart->fineparts);CHKERRQ(ierr);
  ierr = ISDestroy(&fineparts_temp);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&mapping);CHKERRQ(ierr);

  ierr = ISGetIndices(hpart->fineparts,&fineparts_indices);CHKERRQ(ierr);
  ierr = ISGetIndices(hpart->coarseparts,&coarseparts_indices);CHKERRQ(ierr);
  ierr = PetscMalloc1(bs*adj->rmap->n,&parts_indices);CHKERRQ(ierr);
  for(i=0; i<adj->rmap->n; i++){
    for(j=0; j<bs; j++){
      parts_indices[bs*i+j] = fineparts_indices[i]+coarseparts_indices[i]*hpart->Nfineparts;
    }
  }
  ierr = ISCreateGeneral(comm,bs*adj->rmap->n,parts_indices,PETSC_OWN_POINTER,partitioning);CHKERRQ(ierr);
  ierr = MatDestroy(&adj);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #18
0
PetscErrorCode PCBDDCSubSchursSetUp(PCBDDCSubSchurs sub_schurs, Mat S, IS is_A_I, IS is_A_B, PetscInt ncc, IS is_cc[], PetscInt xadj[], PetscInt adjncy[], PetscInt nlayers)
{
  Mat                    A_II,A_IB,A_BI,A_BB;
  ISLocalToGlobalMapping BtoNmap,ItoNmap;
  PetscBT                touched;
  PetscInt               i,n_I,n_B,n_local,*local_numbering;
  PetscBool              is_sorted;
  PetscErrorCode         ierr;

  PetscFunctionBegin;
  ierr = ISSorted(is_A_I,&is_sorted);CHKERRQ(ierr);
  if (!is_sorted) {
    SETERRQ(PetscObjectComm((PetscObject)is_A_I),PETSC_ERR_PLIB,"IS for I dofs should be shorted");
  }
  ierr = ISSorted(is_A_B,&is_sorted);CHKERRQ(ierr);
  if (!is_sorted) {
    SETERRQ(PetscObjectComm((PetscObject)is_A_B),PETSC_ERR_PLIB,"IS for B dofs should be shorted");
  }

  /* get sizes */
  ierr = ISGetLocalSize(is_A_I,&n_I);CHKERRQ(ierr);
  ierr = ISGetLocalSize(is_A_B,&n_B);CHKERRQ(ierr);
  n_local = n_I+n_B;

  /* maps */
  ierr = ISLocalToGlobalMappingCreateIS(is_A_B,&BtoNmap);CHKERRQ(ierr);
  if (nlayers >= 0 && xadj != NULL && adjncy != NULL) { /* I problems have a different size of the original ones */
    ierr = ISLocalToGlobalMappingCreateIS(is_A_I,&ItoNmap);CHKERRQ(ierr);
    /* allocate some auxiliary space */
    ierr = PetscMalloc1(n_local,&local_numbering);CHKERRQ(ierr);
    ierr = PetscBTCreate(n_local,&touched);CHKERRQ(ierr);
  } else {
    ItoNmap = 0;
    local_numbering = 0;
    touched = 0;
  }

  /* get Schur complement matrices */
  ierr = MatSchurComplementGetSubMatrices(S,&A_II,NULL,&A_IB,&A_BI,&A_BB);CHKERRQ(ierr);

  /* allocate space for schur complements */
  ierr = PetscMalloc5(ncc,&sub_schurs->is_AEj_I,ncc,&sub_schurs->is_AEj_B,ncc,&sub_schurs->S_Ej,ncc,&sub_schurs->work1,ncc,&sub_schurs->work2);CHKERRQ(ierr);
  sub_schurs->n_subs = ncc;

  /* cycle on subsets and extract schur complements */
  for (i=0;i<sub_schurs->n_subs;i++) {
    Mat      AE_II,AE_IE,AE_EI,AE_EE;
    IS       is_I,is_subset_B;

    /* get IS for subsets in B numbering */
    ierr = ISDuplicate(is_cc[i],&sub_schurs->is_AEj_B[i]);CHKERRQ(ierr);
    ierr = ISSort(sub_schurs->is_AEj_B[i]);CHKERRQ(ierr);
    ierr = ISGlobalToLocalMappingApplyIS(BtoNmap,IS_GTOLM_DROP,sub_schurs->is_AEj_B[i],&is_subset_B);CHKERRQ(ierr);

    /* BB block on subset */
    ierr = MatGetSubMatrix(A_BB,is_subset_B,is_subset_B,MAT_INITIAL_MATRIX,&AE_EE);CHKERRQ(ierr);

    if (ItoNmap) { /* is ItoNmap has been computed, extracts only a part of I dofs */
      const PetscInt* idx_B;
      PetscInt        n_local_dofs,n_prev_added,j,layer,subset_size;

      /* all boundary dofs must be skipped when adding layers */
      ierr = PetscBTMemzero(n_local,touched);CHKERRQ(ierr);
      ierr = ISGetIndices(is_A_B,&idx_B);CHKERRQ(ierr);
      for (j=0;j<n_B;j++) {
        ierr = PetscBTSet(touched,idx_B[j]);CHKERRQ(ierr);
      }
      ierr = ISRestoreIndices(is_A_B,&idx_B);CHKERRQ(ierr);

      /* add next layers of dofs */
      ierr = ISGetLocalSize(is_cc[i],&subset_size);CHKERRQ(ierr);
      ierr = ISGetIndices(is_cc[i],&idx_B);CHKERRQ(ierr);
      ierr = PetscMemcpy(local_numbering,idx_B,subset_size*sizeof(PetscInt));CHKERRQ(ierr);
      ierr = ISRestoreIndices(is_cc[i],&idx_B);CHKERRQ(ierr);
      n_local_dofs = subset_size;
      n_prev_added = subset_size;
      for (layer=0;layer<nlayers;layer++) {
        PetscInt n_added;
        if (n_local_dofs == n_I+subset_size) break;
        if (n_local_dofs > n_I+subset_size) {
          SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Error querying layer %d. Out of bound access (%d > %d)",layer,n_local_dofs,n_I+subset_size);
        }
        ierr = PCBDDCAdjGetNextLayer_Private(local_numbering+n_local_dofs,n_prev_added,touched,xadj,adjncy,&n_added);CHKERRQ(ierr);
        n_prev_added = n_added;
        n_local_dofs += n_added;
        if (!n_added) break;
      }

      /* IS for I dofs in original numbering and in I numbering */
      ierr = ISCreateGeneral(PetscObjectComm((PetscObject)ItoNmap),n_local_dofs-subset_size,local_numbering+subset_size,PETSC_COPY_VALUES,&sub_schurs->is_AEj_I[i]);CHKERRQ(ierr);
      ierr = ISSort(sub_schurs->is_AEj_I[i]);CHKERRQ(ierr);
      ierr = ISGlobalToLocalMappingApplyIS(ItoNmap,IS_GTOLM_DROP,sub_schurs->is_AEj_I[i],&is_I);CHKERRQ(ierr);

      /* II block */
      ierr = MatGetSubMatrix(A_II,is_I,is_I,MAT_INITIAL_MATRIX,&AE_II);CHKERRQ(ierr);
    } else { /* in this case we can take references of already existing IS and matrices for I dofs */
      /* IS for I dofs in original numbering */
      ierr = PetscObjectReference((PetscObject)is_A_I);CHKERRQ(ierr);
      sub_schurs->is_AEj_I[i] = is_A_I;

      /* IS for I dofs in I numbering TODO: "first" argument of ISCreateStride is not general */
      ierr = ISCreateStride(PetscObjectComm((PetscObject)is_A_I),n_I,0,1,&is_I);CHKERRQ(ierr);

      /* II block is the same */
      ierr = PetscObjectReference((PetscObject)A_II);CHKERRQ(ierr);
      AE_II = A_II;
    }

    /* IE block */
    ierr = MatGetSubMatrix(A_IB,is_I,is_subset_B,MAT_INITIAL_MATRIX,&AE_IE);CHKERRQ(ierr);

    /* EI block */
    ierr = MatGetSubMatrix(A_BI,is_subset_B,is_I,MAT_INITIAL_MATRIX,&AE_EI);CHKERRQ(ierr);

    /* setup Schur complements on subset */
    ierr = MatCreateSchurComplement(AE_II,AE_II,AE_IE,AE_EI,AE_EE,&sub_schurs->S_Ej[i]);CHKERRQ(ierr);
    ierr = MatGetVecs(sub_schurs->S_Ej[i],&sub_schurs->work1[i],&sub_schurs->work2[i]);CHKERRQ(ierr);
    if (AE_II == A_II) { /* we can reuse the same ksp */
      KSP ksp;
      ierr = MatSchurComplementGetKSP(S,&ksp);CHKERRQ(ierr);
      ierr = MatSchurComplementSetKSP(sub_schurs->S_Ej[i],ksp);CHKERRQ(ierr);
    } else { /* build new ksp object which inherits ksp and pc types from the original one */
      KSP      origksp,schurksp;
      PC       origpc,schurpc;
      KSPType  ksp_type;
      PCType   pc_type;
      PetscInt n_internal;

      ierr = MatSchurComplementGetKSP(S,&origksp);CHKERRQ(ierr);
      ierr = MatSchurComplementGetKSP(sub_schurs->S_Ej[i],&schurksp);CHKERRQ(ierr);
      ierr = KSPGetType(origksp,&ksp_type);CHKERRQ(ierr);
      ierr = KSPSetType(schurksp,ksp_type);CHKERRQ(ierr);
      ierr = KSPGetPC(schurksp,&schurpc);CHKERRQ(ierr);
      ierr = KSPGetPC(origksp,&origpc);CHKERRQ(ierr);
      ierr = PCGetType(origpc,&pc_type);CHKERRQ(ierr);
      ierr = PCSetType(schurpc,pc_type);CHKERRQ(ierr);
      ierr = ISGetSize(is_I,&n_internal);CHKERRQ(ierr);
      if (n_internal) { /* UMFPACK gives error with 0 sized problems */
        MatSolverPackage solver=NULL;
        ierr = PCFactorGetMatSolverPackage(origpc,(const MatSolverPackage*)&solver);CHKERRQ(ierr);
        if (solver) {
          ierr = PCFactorSetMatSolverPackage(schurpc,solver);CHKERRQ(ierr);
        }
      }
      ierr = KSPSetUp(schurksp);CHKERRQ(ierr);
    }
    /* free */
    ierr = MatDestroy(&AE_II);CHKERRQ(ierr);
    ierr = MatDestroy(&AE_EE);CHKERRQ(ierr);
    ierr = MatDestroy(&AE_IE);CHKERRQ(ierr);
    ierr = MatDestroy(&AE_EI);CHKERRQ(ierr);
    ierr = ISDestroy(&is_I);CHKERRQ(ierr);
    ierr = ISDestroy(&is_subset_B);CHKERRQ(ierr);
  }
  /* free */
  ierr = ISLocalToGlobalMappingDestroy(&ItoNmap);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&BtoNmap);CHKERRQ(ierr);
  ierr = PetscFree(local_numbering);CHKERRQ(ierr);
  ierr = PetscBTDestroy(&touched);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #19
0
static PetscErrorCode ComputeMatrix(DomainData dd, Mat *A)
{
  PetscErrorCode         ierr;
  GLLData                gll;
  Mat                    local_mat  =0,temp_A=0;
  ISLocalToGlobalMapping matis_map  =0;
  IS                     dirichletIS=0;

  PetscFunctionBeginUser;
  /* Compute some stuff of Gauss-Legendre-Lobatto quadrature rule */
  ierr = GLLStuffs(dd,&gll);CHKERRQ(ierr);
  /* Compute matrix of subdomain Neumann problem */
  ierr = ComputeSubdomainMatrix(dd,gll,&local_mat);CHKERRQ(ierr);
  /* Compute global mapping of local dofs */
  ierr = ComputeMapping(dd,&matis_map);CHKERRQ(ierr);
  /* Create MATIS object needed by BDDC */
  ierr = MatCreateIS(dd.gcomm,1,PETSC_DECIDE,PETSC_DECIDE,dd.xm*dd.ym*dd.zm,dd.xm*dd.ym*dd.zm,matis_map,NULL,&temp_A);CHKERRQ(ierr);
  /* Set local subdomain matrices into MATIS object */
  ierr = MatScale(local_mat,dd.scalingfactor);CHKERRQ(ierr);
  ierr = MatISSetLocalMat(temp_A,local_mat);CHKERRQ(ierr);
  /* Call assembly functions */
  ierr = MatAssemblyBegin(temp_A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
  ierr = MatAssemblyEnd(temp_A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);

  if (dd.DBC_zerorows) {
    PetscInt dirsize;

    ierr = ComputeSpecialBoundaryIndices(dd,&dirichletIS,NULL);CHKERRQ(ierr);
    ierr = MatSetOption(local_mat,MAT_KEEP_NONZERO_PATTERN,PETSC_TRUE);CHKERRQ(ierr);
    ierr = MatZeroRowsLocalIS(temp_A,dirichletIS,1.0,NULL,NULL);CHKERRQ(ierr);
    ierr = ISGetLocalSize(dirichletIS,&dirsize);CHKERRQ(ierr);
    /* giving hints to local and global matrices could be useful for the BDDC */
    if (!dirsize) {
      ierr = MatSetOption(local_mat,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
      ierr = MatSetOption(local_mat,MAT_SPD,PETSC_TRUE);CHKERRQ(ierr);
    } else {
      ierr = MatSetOption(local_mat,MAT_SYMMETRIC,PETSC_FALSE);CHKERRQ(ierr);
      ierr = MatSetOption(local_mat,MAT_SPD,PETSC_FALSE);CHKERRQ(ierr);
    }
    ierr = ISDestroy(&dirichletIS);CHKERRQ(ierr);
  } else { /* safe to set the options for the global matrices (they will be communicated to the matis local matrices) */
    ierr = MatSetOption(temp_A,MAT_SYMMETRIC,PETSC_TRUE);CHKERRQ(ierr);
    ierr = MatSetOption(temp_A,MAT_SPD,PETSC_TRUE);CHKERRQ(ierr);
  }
#if DEBUG
  {
    Vec       lvec,rvec;
    PetscReal norm;
    ierr = MatCreateVecs(temp_A,&lvec,&rvec);CHKERRQ(ierr);
    ierr = VecSet(lvec,1.0);CHKERRQ(ierr);
    ierr = MatMult(temp_A,lvec,rvec);CHKERRQ(ierr);
    ierr = VecNorm(rvec,NORM_INFINITY,&norm);CHKERRQ(ierr);
    printf("Test null space of global mat % 1.14e\n",norm);
    ierr = VecDestroy(&lvec);CHKERRQ(ierr);
    ierr = VecDestroy(&rvec);CHKERRQ(ierr);
  }
#endif
  /* free allocated workspace */
  ierr = PetscFree(gll.zGL);CHKERRQ(ierr);
  ierr = PetscFree(gll.rhoGL);CHKERRQ(ierr);
  ierr = PetscFree(gll.A[0]);CHKERRQ(ierr);
  ierr = PetscFree(gll.A);CHKERRQ(ierr);
  ierr = MatDestroy(&gll.elem_mat);CHKERRQ(ierr);
  ierr = MatDestroy(&local_mat);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&matis_map);CHKERRQ(ierr);
  /* give back the pointer to te MATIS object */
  *A = temp_A;
  PetscFunctionReturn(0);
}
Пример #20
0
PetscErrorCode  PCISSetUp(PC pc)
{
  PC_IS          *pcis  = (PC_IS*)(pc->data);
  Mat_IS         *matis;
  PetscErrorCode ierr;
  PetscBool      flg,issbaij;
  Vec            counter;

  PetscFunctionBegin;
  ierr = PetscObjectTypeCompare((PetscObject)pc->pmat,MATIS,&flg);CHKERRQ(ierr);
  if (!flg) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Preconditioner type of Neumann Neumman requires matrix of type MATIS");
  matis = (Mat_IS*)pc->pmat->data;

  pcis->pure_neumann = matis->pure_neumann;

  /* get info on mapping */
  ierr = PetscObjectReference((PetscObject)matis->mapping);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&pcis->mapping);CHKERRQ(ierr);
  pcis->mapping = matis->mapping;
  ierr = ISLocalToGlobalMappingGetSize(pcis->mapping,&pcis->n);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingGetInfo(pcis->mapping,&(pcis->n_neigh),&(pcis->neigh),&(pcis->n_shared),&(pcis->shared));CHKERRQ(ierr);

  /* Creating local and global index sets for interior and inteface nodes. */
  {
    PetscInt    n_I;
    PetscInt    *idx_I_local,*idx_B_local,*idx_I_global,*idx_B_global;
    PetscInt    *array;
    PetscInt    i,j;

    /* Identifying interior and interface nodes, in local numbering */
    ierr = PetscMalloc1(pcis->n,&array);CHKERRQ(ierr);
    ierr = PetscMemzero(array,pcis->n*sizeof(PetscInt));CHKERRQ(ierr);
    for (i=0;i<pcis->n_neigh;i++)
      for (j=0;j<pcis->n_shared[i];j++)
          array[pcis->shared[i][j]] += 1;

    ierr = PetscMalloc1(pcis->n,&idx_I_local);CHKERRQ(ierr);
    ierr = PetscMalloc1(pcis->n,&idx_B_local);CHKERRQ(ierr);
    for (i=0, pcis->n_B=0, n_I=0; i<pcis->n; i++) {
      if (!array[i]) {
        idx_I_local[n_I] = i;
        n_I++;
      } else {
        idx_B_local[pcis->n_B] = i;
        pcis->n_B++;
      }
    }
    /* Getting the global numbering */
    idx_B_global = idx_I_local + n_I; /* Just avoiding allocating extra memory, since we have vacant space */
    idx_I_global = idx_B_local + pcis->n_B;
    ierr         = ISLocalToGlobalMappingApply(pcis->mapping,pcis->n_B,idx_B_local,idx_B_global);CHKERRQ(ierr);
    ierr         = ISLocalToGlobalMappingApply(pcis->mapping,n_I,      idx_I_local,idx_I_global);CHKERRQ(ierr);

    /* Creating the index sets. */
    ierr = ISCreateGeneral(PETSC_COMM_SELF,pcis->n_B,idx_B_local,PETSC_COPY_VALUES, &pcis->is_B_local);CHKERRQ(ierr);
    ierr = ISCreateGeneral(PETSC_COMM_SELF,pcis->n_B,idx_B_global,PETSC_COPY_VALUES,&pcis->is_B_global);CHKERRQ(ierr);
    ierr = ISCreateGeneral(PETSC_COMM_SELF,n_I,idx_I_local,PETSC_COPY_VALUES, &pcis->is_I_local);CHKERRQ(ierr);
    ierr = ISCreateGeneral(PETSC_COMM_SELF,n_I,idx_I_global,PETSC_COPY_VALUES,&pcis->is_I_global);CHKERRQ(ierr);

    /* Freeing memory and restoring arrays */
    ierr = PetscFree(idx_B_local);CHKERRQ(ierr);
    ierr = PetscFree(idx_I_local);CHKERRQ(ierr);
    ierr = PetscFree(array);CHKERRQ(ierr);
  }

  /*
    Extracting the blocks A_II, A_BI, A_IB and A_BB from A. If the numbering
    is such that interior nodes come first than the interface ones, we have

    [           |      ]
    [    A_II   | A_IB ]
    A = [           |      ]
    [-----------+------]
    [    A_BI   | A_BB ]
  */

  ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_II);CHKERRQ(ierr);
  ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_BB);CHKERRQ(ierr);
  ierr = PetscObjectTypeCompare((PetscObject)matis->A,MATSEQSBAIJ,&issbaij);CHKERRQ(ierr);
  if (!issbaij) {
    ierr = MatGetSubMatrix(matis->A,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB);CHKERRQ(ierr);
    ierr = MatGetSubMatrix(matis->A,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI);CHKERRQ(ierr);
  } else {
    Mat newmat;
    ierr = MatConvert(matis->A,MATSEQBAIJ,MAT_INITIAL_MATRIX,&newmat);CHKERRQ(ierr);
    ierr = MatGetSubMatrix(newmat,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB);CHKERRQ(ierr);
    ierr = MatGetSubMatrix(newmat,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI);CHKERRQ(ierr);
    ierr = MatDestroy(&newmat);CHKERRQ(ierr);
  }
  /*
    Creating work vectors and arrays
  */
  ierr = VecDuplicate(matis->x,&pcis->vec1_N);CHKERRQ(ierr);
  ierr = VecDuplicate(pcis->vec1_N,&pcis->vec2_N);CHKERRQ(ierr);
  ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n-pcis->n_B,&pcis->vec1_D);CHKERRQ(ierr);
  ierr = VecDuplicate(pcis->vec1_D,&pcis->vec2_D);CHKERRQ(ierr);
  ierr = VecDuplicate(pcis->vec1_D,&pcis->vec3_D);CHKERRQ(ierr);
  ierr = VecDuplicate(pcis->vec1_D,&pcis->vec4_D);CHKERRQ(ierr);
  ierr = VecCreateSeq(PETSC_COMM_SELF,pcis->n_B,&pcis->vec1_B);CHKERRQ(ierr);
  ierr = VecDuplicate(pcis->vec1_B,&pcis->vec2_B);CHKERRQ(ierr);
  ierr = VecDuplicate(pcis->vec1_B,&pcis->vec3_B);CHKERRQ(ierr);
  ierr = MatCreateVecs(pc->pmat,&pcis->vec1_global,0);CHKERRQ(ierr);
  ierr = PetscMalloc1(pcis->n,&pcis->work_N);CHKERRQ(ierr);

  /* Creating the scatter contexts */
  ierr = VecScatterCreate(pcis->vec1_global,pcis->is_I_global,pcis->vec1_D,(IS)0,&pcis->global_to_D);CHKERRQ(ierr);
  ierr = VecScatterCreate(pcis->vec1_N,pcis->is_B_local,pcis->vec1_B,(IS)0,&pcis->N_to_B);CHKERRQ(ierr);
  ierr = VecScatterCreate(pcis->vec1_global,pcis->is_B_global,pcis->vec1_B,(IS)0,&pcis->global_to_B);CHKERRQ(ierr);

  /* Creating scaling "matrix" D */
  ierr = PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_is_use_stiffness_scaling",&pcis->use_stiffness_scaling,NULL);CHKERRQ(ierr);
  if (!pcis->D) {
    ierr = VecDuplicate(pcis->vec1_B,&pcis->D);CHKERRQ(ierr);
    if (!pcis->use_stiffness_scaling) {
      ierr = VecSet(pcis->D,pcis->scaling_factor);CHKERRQ(ierr);
    } else {
      ierr = MatGetDiagonal(matis->A,pcis->vec1_N);CHKERRQ(ierr);
      ierr = VecScatterBegin(pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
      ierr = VecScatterEnd  (pcis->N_to_B,pcis->vec1_N,pcis->D,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
    }
  }
  ierr = VecCopy(pcis->D,pcis->vec1_B);CHKERRQ(ierr);
  ierr = MatCreateVecs(pc->pmat,&counter,0);CHKERRQ(ierr); /* temporary auxiliar vector */
  ierr = VecSet(counter,0.0);CHKERRQ(ierr);
  ierr = VecScatterBegin(pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
  ierr = VecScatterEnd  (pcis->global_to_B,pcis->vec1_B,counter,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
  ierr = VecScatterBegin(pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd  (pcis->global_to_B,counter,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecPointwiseDivide(pcis->D,pcis->D,pcis->vec1_B);CHKERRQ(ierr);
  ierr = VecDestroy(&counter);CHKERRQ(ierr);

  /* See historical note 01, at the bottom of this file. */

  /*
    Creating the KSP contexts for the local Dirichlet and Neumann problems.
  */
  if (pcis->computesolvers) {
    PC pc_ctx;
    /* Dirichlet */
    ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_D);CHKERRQ(ierr);
    ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_D,(PetscObject)pc,1);CHKERRQ(ierr);
    ierr = KSPSetOperators(pcis->ksp_D,pcis->A_II,pcis->A_II);CHKERRQ(ierr);
    ierr = KSPSetOptionsPrefix(pcis->ksp_D,"is_localD_");CHKERRQ(ierr);
    ierr = KSPGetPC(pcis->ksp_D,&pc_ctx);CHKERRQ(ierr);
    ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
    ierr = KSPSetType(pcis->ksp_D,KSPPREONLY);CHKERRQ(ierr);
    ierr = KSPSetFromOptions(pcis->ksp_D);CHKERRQ(ierr);
    /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
    ierr = KSPSetUp(pcis->ksp_D);CHKERRQ(ierr);
    /* Neumann */
    ierr = KSPCreate(PETSC_COMM_SELF,&pcis->ksp_N);CHKERRQ(ierr);
    ierr = PetscObjectIncrementTabLevel((PetscObject)pcis->ksp_N,(PetscObject)pc,1);CHKERRQ(ierr);
    ierr = KSPSetOperators(pcis->ksp_N,matis->A,matis->A);CHKERRQ(ierr);
    ierr = KSPSetOptionsPrefix(pcis->ksp_N,"is_localN_");CHKERRQ(ierr);
    ierr = KSPGetPC(pcis->ksp_N,&pc_ctx);CHKERRQ(ierr);
    ierr = PCSetType(pc_ctx,PCLU);CHKERRQ(ierr);
    ierr = KSPSetType(pcis->ksp_N,KSPPREONLY);CHKERRQ(ierr);
    ierr = KSPSetFromOptions(pcis->ksp_N);CHKERRQ(ierr);
    {
      PetscBool damp_fixed                    = PETSC_FALSE,
                remove_nullspace_fixed        = PETSC_FALSE,
                set_damping_factor_floating   = PETSC_FALSE,
                not_damp_floating             = PETSC_FALSE,
                not_remove_nullspace_floating = PETSC_FALSE;
      PetscReal fixed_factor,
                floating_factor;

      ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&fixed_factor,&damp_fixed);CHKERRQ(ierr);
      if (!damp_fixed) fixed_factor = 0.0;
      ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_damp_fixed",&damp_fixed,NULL);CHKERRQ(ierr);

      ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_remove_nullspace_fixed",&remove_nullspace_fixed,NULL);CHKERRQ(ierr);

      ierr = PetscOptionsGetReal(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",
                              &floating_factor,&set_damping_factor_floating);CHKERRQ(ierr);
      if (!set_damping_factor_floating) floating_factor = 0.0;
      ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_set_damping_factor_floating",&set_damping_factor_floating,NULL);CHKERRQ(ierr);
      if (!set_damping_factor_floating) floating_factor = 1.e-12;

      ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_damp_floating",&not_damp_floating,NULL);CHKERRQ(ierr);

      ierr = PetscOptionsGetBool(((PetscObject)pc_ctx)->prefix,"-pc_is_not_remove_nullspace_floating",&not_remove_nullspace_floating,NULL);CHKERRQ(ierr);

      if (pcis->pure_neumann) {  /* floating subdomain */
        if (!(not_damp_floating)) {
          ierr = PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);CHKERRQ(ierr);
          ierr = PCFactorSetShiftAmount(pc_ctx,floating_factor);CHKERRQ(ierr);
        }
        if (!(not_remove_nullspace_floating)) {
          MatNullSpace nullsp;
          ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);CHKERRQ(ierr);
          ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
          ierr = MatNullSpaceDestroy(&nullsp);CHKERRQ(ierr);
        }
      } else {  /* fixed subdomain */
        if (damp_fixed) {
          ierr = PCFactorSetShiftType(pc_ctx,MAT_SHIFT_NONZERO);CHKERRQ(ierr);
          ierr = PCFactorSetShiftAmount(pc_ctx,floating_factor);CHKERRQ(ierr);
        }
        if (remove_nullspace_fixed) {
          MatNullSpace nullsp;
          ierr = MatNullSpaceCreate(PETSC_COMM_SELF,PETSC_TRUE,0,NULL,&nullsp);CHKERRQ(ierr);
          ierr = KSPSetNullSpace(pcis->ksp_N,nullsp);CHKERRQ(ierr);
          ierr = MatNullSpaceDestroy(&nullsp);CHKERRQ(ierr);
        }
      }
    }
    /* the vectors in the following line are dummy arguments, just telling the KSP the vector size. Values are not used */
    ierr = KSPSetUp(pcis->ksp_N);CHKERRQ(ierr);
  }

  PetscFunctionReturn(0);
}
Пример #21
0
int main(int argc,char **argv)
{
  PetscErrorCode ierr;
  PetscInt       nredundant1 = 5,nredundant2 = 2,i;
  ISLocalToGlobalMapping *ltog;
  PetscMPIInt    rank,size;
  DM             packer;
  Vec            global,local1,local2,redundant1,redundant2;
  PF             pf;
  DM             da1,da2,dmred1,dmred2;
  PetscScalar    *redundant1a,*redundant2a;
  PetscViewer    sviewer;
  PetscBool      gather_add = PETSC_FALSE;

  ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr); 
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);

  ierr = PetscOptionsGetBool(PETSC_NULL,"-gather_add",&gather_add,PETSC_NULL);CHKERRQ(ierr);

  ierr = DMCompositeCreate(PETSC_COMM_WORLD,&packer);CHKERRQ(ierr);

  ierr = DMRedundantCreate(PETSC_COMM_WORLD,0,nredundant1,&dmred1);CHKERRQ(ierr);
  ierr = DMCreateLocalVector(dmred1,&redundant1);CHKERRQ(ierr);
  ierr = DMCompositeAddDM(packer,dmred1);CHKERRQ(ierr);

  ierr = DMDACreate1d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,8,1,1,PETSC_NULL,&da1);CHKERRQ(ierr);
  ierr = DMCreateLocalVector(da1,&local1);CHKERRQ(ierr);
  ierr = DMCompositeAddDM(packer,da1);CHKERRQ(ierr);

  ierr = DMRedundantCreate(PETSC_COMM_WORLD,1%size,nredundant2,&dmred2);CHKERRQ(ierr);
  ierr = DMCreateLocalVector(dmred2,&redundant2);CHKERRQ(ierr);
  ierr = DMCompositeAddDM(packer,dmred2);CHKERRQ(ierr);

  ierr = DMDACreate1d(PETSC_COMM_WORLD,DMDA_BOUNDARY_NONE,6,1,1,PETSC_NULL,&da2);CHKERRQ(ierr);
  ierr = DMCreateLocalVector(da2,&local2);CHKERRQ(ierr);
  ierr = DMCompositeAddDM(packer,da2);CHKERRQ(ierr);

  ierr = DMCreateGlobalVector(packer,&global);CHKERRQ(ierr);
  ierr = PFCreate(PETSC_COMM_WORLD,1,1,&pf);CHKERRQ(ierr);
  ierr = PFSetType(pf,PFIDENTITY,PETSC_NULL);CHKERRQ(ierr);
  ierr = PFApplyVec(pf,PETSC_NULL,global);CHKERRQ(ierr);
  ierr = PFDestroy(&pf);CHKERRQ(ierr);
  ierr = VecView(global,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  ierr = DMCompositeScatter(packer,global,redundant1,local1,redundant2,local2);CHKERRQ(ierr);
  ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD,PETSC_TRUE);CHKERRQ(ierr);
  ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_WORLD,"[%d] My part of redundant1 vector\n",rank);CHKERRQ(ierr);
  ierr = PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr);
  ierr = VecView(redundant1,sviewer);CHKERRQ(ierr);
  ierr = PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr);
  ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD,PETSC_TRUE);CHKERRQ(ierr);
  ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_WORLD,"[%d] My part of da1 vector\n",rank);CHKERRQ(ierr);
  ierr = PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr);
  ierr = VecView(local1,sviewer);CHKERRQ(ierr);
  ierr = PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr);
  ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD,PETSC_TRUE);CHKERRQ(ierr);
  ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_WORLD,"[%d] My part of redundant2 vector\n",rank);CHKERRQ(ierr);
  ierr = PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr);
  ierr = VecView(redundant2,sviewer);CHKERRQ(ierr);
  ierr = PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr);
  ierr = PetscViewerASCIISynchronizedAllow(PETSC_VIEWER_STDOUT_WORLD,PETSC_TRUE);CHKERRQ(ierr);
  ierr = PetscViewerASCIISynchronizedPrintf(PETSC_VIEWER_STDOUT_WORLD,"[%d] My part of da2 vector\n",rank);CHKERRQ(ierr);
  ierr = PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr);
  ierr = VecView(local2,sviewer);CHKERRQ(ierr);
  ierr = PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);CHKERRQ(ierr);

  ierr = VecGetArray(redundant1,&redundant1a);CHKERRQ(ierr);
  ierr = VecGetArray(redundant2,&redundant2a);CHKERRQ(ierr);
  for (i=0; i<nredundant1; i++) redundant1a[i] = (rank+2)*i;
  for (i=0; i<nredundant2; i++) redundant2a[i] = (rank+10)*i;
  ierr = VecRestoreArray(redundant1,&redundant1a);CHKERRQ(ierr);
  ierr = VecRestoreArray(redundant2,&redundant2a);CHKERRQ(ierr);

  ierr = DMCompositeGather(packer,global,gather_add?ADD_VALUES:INSERT_VALUES,redundant1,local1,redundant2,local2);CHKERRQ(ierr);
  ierr = VecView(global,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  /* get the global numbering for each subvector element */
  ierr = DMCompositeGetISLocalToGlobalMappings(packer,&ltog);CHKERRQ(ierr);

  ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"Local to global mapping of redundant1 vector\n");CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(ltog[0],PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
  ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"Local to global mapping of local1 vector\n");CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(ltog[1],PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
  ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"Local to global mapping of redundant2 vector\n");CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(ltog[2],PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
  ierr = PetscViewerASCIIPrintf(PETSC_VIEWER_STDOUT_WORLD,"Local to global mapping of local2 vector\n");CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(ltog[3],PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  for (i=0; i<4; i++) {ierr = ISLocalToGlobalMappingDestroy(&ltog[i]);CHKERRQ(ierr);}
  ierr = PetscFree(ltog);CHKERRQ(ierr);

  ierr = DMDestroy(&da1);CHKERRQ(ierr);
  ierr = DMDestroy(&dmred1);CHKERRQ(ierr);
  ierr = DMDestroy(&dmred2);CHKERRQ(ierr);
  ierr = DMDestroy(&da2);CHKERRQ(ierr);
  ierr = VecDestroy(&redundant1);CHKERRQ(ierr);
  ierr = VecDestroy(&redundant2);CHKERRQ(ierr);
  ierr = VecDestroy(&local1);CHKERRQ(ierr);
  ierr = VecDestroy(&local2);CHKERRQ(ierr);
  ierr = VecDestroy(&global);CHKERRQ(ierr);
  ierr = DMDestroy(&packer);CHKERRQ(ierr);
  ierr = PetscFinalize();
  return 0;
}
Пример #22
0
/*@C
    SlicedGetMatrix - Creates a matrix with the correct parallel layout required for 
      computing the Jacobian on a function defined using the informatin in Sliced.

    Collective on Sliced

    Input Parameter:
+   slice - the slice object
-   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, MATSEQSBAIJ, MATMPISBAIJ,
            or any type which inherits from one of these (such as MATAIJ, MATLUSOL, etc.).

    Output Parameters:
.   J  - matrix with the correct nonzero preallocation
        (obviously without the correct Jacobian values)

    Level: advanced

    Notes: This properly preallocates the number of nonzeros in the sparse matrix so you 
       do not need to do it yourself.

.seealso ISColoringView(), ISColoringGetIS(), MatFDColoringCreate(), DASetBlockFills()

@*/
PetscErrorCode PETSCDM_DLLEXPORT SlicedGetMatrix(Sliced slice, const MatType mtype,Mat *J)
{
  PetscErrorCode         ierr;
  PetscInt               *globals,*sd_nnz,*so_nnz,rstart,bs,i;
  ISLocalToGlobalMapping lmap,blmap;
  void                   (*aij)(void) = PETSC_NULL;

  PetscFunctionBegin;
  bs = slice->bs;
  ierr = MatCreate(((PetscObject)slice)->comm,J);CHKERRQ(ierr);
  ierr = MatSetSizes(*J,slice->n*bs,slice->n*bs,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
  ierr = MatSetType(*J,mtype);CHKERRQ(ierr);
  ierr = MatSeqBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
  ierr = MatMPIBAIJSetPreallocation(*J,bs,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
  /* In general, we have to do extra work to preallocate for scalar (AIJ) matrices so we check whether it will do any
  * good before going on with it. */
  ierr = PetscObjectQueryFunction((PetscObject)*J,"MatMPIAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
  if (!aij) {
    ierr = PetscObjectQueryFunction((PetscObject)*J,"MatSeqAIJSetPreallocation_C",&aij);CHKERRQ(ierr);
  }
  if (aij) {
    if (bs == 1) {
      ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz);CHKERRQ(ierr);
      ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz,slice->d_nnz,slice->o_nz,slice->o_nnz);CHKERRQ(ierr);
    } else if (!slice->d_nnz) {
      ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL);CHKERRQ(ierr);
      ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,PETSC_NULL,slice->o_nz*bs,PETSC_NULL);CHKERRQ(ierr);
    } else {
      /* The user has provided preallocation per block-row, convert it to per scalar-row respecting SlicedSetBlockFills() if applicable */
      ierr = PetscMalloc2(slice->n*bs,PetscInt,&sd_nnz,(!!slice->o_nnz)*slice->n*bs,PetscInt,&so_nnz);CHKERRQ(ierr);
      for (i=0; i<slice->n*bs; i++) {
        sd_nnz[i] = (slice->d_nnz[i/bs]-1) * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs)
                                           + (slice->dfill ? slice->dfill->i[i%bs+1]-slice->dfill->i[i%bs] : bs);
        if (so_nnz) {
          so_nnz[i] = slice->o_nnz[i/bs] * (slice->ofill ? slice->ofill->i[i%bs+1]-slice->ofill->i[i%bs] : bs);
        }
      }
      ierr = MatSeqAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz);CHKERRQ(ierr);
      ierr = MatMPIAIJSetPreallocation(*J,slice->d_nz*bs,sd_nnz,slice->o_nz*bs,so_nnz);CHKERRQ(ierr);
      ierr = PetscFree2(sd_nnz,so_nnz);CHKERRQ(ierr);
    }
  }

  ierr = MatSetBlockSize(*J,bs);CHKERRQ(ierr);

  /* Set up the local to global map.  For the scalar map, we have to translate to entry-wise indexing instead of block-wise. */
  ierr = PetscMalloc((slice->n+slice->Nghosts)*bs*sizeof(PetscInt),&globals);CHKERRQ(ierr);
  ierr = MatGetOwnershipRange(*J,&rstart,PETSC_NULL);CHKERRQ(ierr);
  for (i=0; i<slice->n*bs; i++) {
    globals[i] = rstart + i;
  }
  for (i=0; i<slice->Nghosts*bs; i++) {
    globals[slice->n*bs+i] = slice->ghosts[i/bs]*bs + i%bs;
  }
  ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,(slice->n+slice->Nghosts)*bs,globals,&lmap);CHKERRQ(ierr);
  ierr = PetscFree(globals);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingBlock(lmap,bs,&blmap);CHKERRQ(ierr);
  ierr = MatSetLocalToGlobalMapping(*J,lmap);CHKERRQ(ierr);
  ierr = MatSetLocalToGlobalMappingBlock(*J,blmap);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(lmap);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(blmap);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #23
0
/* Distribute cones
   - Partitioning:         input partition point map and naive sf, output sf with inverse of map, distribute points
   - Distribute section:   input current sf, communicate sizes and offsets, output local section and offsets (only use for new sf)
   - Create SF for values: input current sf and offsets, output new sf
   - Distribute values:    input new sf, communicate values
 */
PetscErrorCode DistributeMesh(DM dm, AppCtx *user, PetscSF *pointSF, DM *parallelDM)
{
  MPI_Comm       comm   = ((PetscObject) dm)->comm;
  const PetscInt height = 0;
  PetscInt       dim, numRemoteRanks;
  IS             cellPart,        part;
  PetscSection   cellPartSection, partSection;
  PetscSFNode   *remoteRanks;
  PetscSF        partSF;
  ISLocalToGlobalMapping renumbering;
  PetscSF        coneSF;
  PetscSection   originalConeSection, newConeSection;
  PetscInt      *remoteOffsets, newConesSize;
  PetscInt      *cones, *newCones;
  PetscMPIInt    numProcs, rank, p;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
  ierr = DMMeshGetDimension(dm, &dim);CHKERRQ(ierr);
  /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
  ierr = DMMeshCreatePartition(dm, &cellPartSection, &cellPart, height);CHKERRQ(ierr);
  /* Create SF assuming a serial partition for all processes: Could check for IS length here */
  if (!rank) {
    numRemoteRanks = numProcs;
  } else {
    numRemoteRanks = 0;
  }
  ierr = PetscMalloc(numRemoteRanks * sizeof(PetscSFNode), &remoteRanks);CHKERRQ(ierr);
  for(p = 0; p < numRemoteRanks; ++p) {
    remoteRanks[p].rank  = p;
    remoteRanks[p].index = 0;
  }
  ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
  ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, PETSC_NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
  /* Debugging */
  ierr = PetscPrintf(comm, "Cell Partition:\n");CHKERRQ(ierr);
  ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
  ierr = ISView(cellPart, PETSC_NULL);CHKERRQ(ierr);
  ierr = PetscSFView(partSF, PETSC_NULL);CHKERRQ(ierr);
  /* Close the partition over the mesh */
  ierr = DMMeshCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
  ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
  ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
  /* Create new mesh */
  ierr = DMMeshCreate(comm, parallelDM);CHKERRQ(ierr);
  ierr = DMMeshSetDimension(*parallelDM, dim);CHKERRQ(ierr);
  ierr = PetscObjectSetName((PetscObject) *parallelDM, "Parallel Mesh");CHKERRQ(ierr);
  /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
  ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, pointSF);CHKERRQ(ierr);
  /* Debugging */
  ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
  ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
  ierr = ISView(part, PETSC_NULL);CHKERRQ(ierr);
  ierr = PetscSFView(*pointSF, PETSC_NULL);CHKERRQ(ierr);
  ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingView(renumbering, PETSC_NULL);CHKERRQ(ierr);
  /* Cleanup */
  ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
  ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
  ierr = ISDestroy(&part);CHKERRQ(ierr);
  /* Distribute cone section */
  ierr = DMMeshGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
  ierr = DMMeshGetConeSection(*parallelDM, &newConeSection);CHKERRQ(ierr);
  ierr = PetscSFDistributeSection(*pointSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
  ierr = DMMeshSetUp(*parallelDM);CHKERRQ(ierr);
  /* Communicate and renumber cones */
  ierr = PetscSFCreateSectionSF(*pointSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
  ierr = DMMeshGetCones(dm, &cones);CHKERRQ(ierr);
  ierr = DMMeshGetCones(*parallelDM, &newCones);CHKERRQ(ierr);
  ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
  ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
  ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
  ierr = ISGlobalToLocalMappingApply(renumbering, IS_GTOLM_MASK, newConesSize, newCones, PETSC_NULL, newCones);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
  /* Debugging */
  ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
  ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
  ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
  ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
  ierr = PetscSFView(coneSF, PETSC_NULL);CHKERRQ(ierr);
  ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
  /* Create supports and stratify sieve */
  ierr = DMMeshSymmetrize(*parallelDM);CHKERRQ(ierr);
  ierr = DMMeshStratify(*parallelDM);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #24
0
Файл: ex8.c Проект: Kun-Qu/petsc
int main(int argc,char **argv)
{
  PetscErrorCode ierr;
  PetscMPIInt    rank;
  PetscInt       i,N,ng,*gindices,rstart,rend,M;
  PetscScalar    one = 1.0;
  Vec            x;

  PetscInitialize(&argc,&argv,(char *)0,help);
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);

  /*
     Create a parallel vector.
      - In this case, we specify the size of each processor's local
        portion, and PETSc computes the global size.  Alternatively,
        PETSc could determine the vector's distribution if we specify
        just the global size.
  */
  ierr = VecCreate(PETSC_COMM_WORLD,&x);CHKERRQ(ierr);
  ierr = VecSetSizes(x,rank+1,PETSC_DECIDE);CHKERRQ(ierr);
  ierr = VecSetFromOptions(x);CHKERRQ(ierr);
  ierr = VecGetSize(x,&N);CHKERRQ(ierr);
  ierr = VecSet(x,one);CHKERRQ(ierr);

  /*
     Set the local to global ordering for the vector. Each processor 
     generates a list of the global indices for each local index. Note that
     the local indices are just whatever is convenient for a particular application.
     In this case we treat the vector as lying on a one dimensional grid and 
     have one ghost point on each end of the blocks owned by each processor. 
  */

  ierr = VecGetSize(x,&M);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(x,&rstart,&rend);CHKERRQ(ierr);
  ng   = rend - rstart + 2;
  ierr = PetscMalloc(ng*sizeof(PetscInt),&gindices);CHKERRQ(ierr);
  gindices[0] = rstart - 1; 
  for (i=0; i<ng-1; i++) {
    gindices[i+1] = gindices[i] + 1;
  }
  /* map the first and last point as periodic */
  if (gindices[0]    == -1) gindices[0]    = M - 1;
  if (gindices[ng-1] == M)  gindices[ng-1] = 0;
  {
    ISLocalToGlobalMapping ltog;
    ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,ng,gindices,PETSC_COPY_VALUES,&ltog);CHKERRQ(ierr);
    ierr = VecSetLocalToGlobalMapping(x,ltog);CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
  }
  ierr = PetscFree(gindices);CHKERRQ(ierr);

  /*
     Set the vector elements.
      - In this case set the values using the local ordering
      - Each processor can contribute any vector entries,
        regardless of which processor "owns" them; any nonlocal
        contributions will be transferred to the appropriate processor
        during the assembly process.
      - In this example, the flag ADD_VALUES indicates that all
        contributions will be added together.
  */
  for (i=0; i<ng; i++) {
    ierr = VecSetValuesLocal(x,1,&i,&one,ADD_VALUES);CHKERRQ(ierr);  
  }

  /* 
     Assemble vector, using the 2-step process:
       VecAssemblyBegin(), VecAssemblyEnd()
     Computations can be done while messages are in transition
     by placing code between these two statements.
  */
  ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(x);CHKERRQ(ierr);

  /*
      View the vector; then destroy it.
  */
  ierr = VecView(x,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
  ierr = VecDestroy(&x);CHKERRQ(ierr);

  ierr = PetscFinalize();
  return 0;
}
Пример #25
0
/*@C
  DMPlexDistribute - Distributes the mesh and any associated sections.

  Not Collective

  Input Parameter:
+ dm  - The original DMPlex object
. partitioner - The partitioning package, or NULL for the default
- overlap - The overlap of partitions, 0 is the default

  Output Parameter:
+ sf - The PetscSF used for point distribution
- parallelMesh - The distributed DMPlex object, or NULL

  Note: If the mesh was not distributed, the return value is NULL.

  The user can control the definition of adjacency for the mesh using DMPlexGetAdjacencyUseCone() and
  DMPlexSetAdjacencyUseClosure(). They should choose the combination appropriate for the function
  representation on the mesh.

  Level: intermediate

.keywords: mesh, elements
.seealso: DMPlexCreate(), DMPlexDistributeByFace(), DMPlexSetAdjacencyUseCone(), DMPlexSetAdjacencyUseClosure()
@*/
PetscErrorCode DMPlexDistribute(DM dm, const char partitioner[], PetscInt overlap, PetscSF *sf, DM *dmParallel)
{
  DM_Plex               *mesh   = (DM_Plex*) dm->data, *pmesh;
  MPI_Comm               comm;
  const PetscInt         height = 0;
  PetscInt               dim, numRemoteRanks;
  IS                     origCellPart,        origPart,        cellPart,        part;
  PetscSection           origCellPartSection, origPartSection, cellPartSection, partSection;
  PetscSFNode           *remoteRanks;
  PetscSF                partSF, pointSF, coneSF;
  ISLocalToGlobalMapping renumbering;
  PetscSection           originalConeSection, newConeSection;
  PetscInt              *remoteOffsets;
  PetscInt              *cones, *newCones, newConesSize;
  PetscBool              flg;
  PetscMPIInt            rank, numProcs, p;
  PetscErrorCode         ierr;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
  if (sf) PetscValidPointer(sf,4);
  PetscValidPointer(dmParallel,5);

  ierr = PetscLogEventBegin(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
  ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
  ierr = MPI_Comm_size(comm, &numProcs);CHKERRQ(ierr);

  *dmParallel = NULL;
  if (numProcs == 1) PetscFunctionReturn(0);

  ierr = DMPlexGetDimension(dm, &dim);CHKERRQ(ierr);
  /* Create cell partition - We need to rewrite to use IS, use the MatPartition stuff */
  ierr = PetscLogEventBegin(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
  if (overlap > 1) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Overlap > 1 not yet implemented");
  ierr = DMPlexCreatePartition(dm, partitioner, height, overlap > 0 ? PETSC_TRUE : PETSC_FALSE, &cellPartSection, &cellPart, &origCellPartSection, &origCellPart);CHKERRQ(ierr);
  /* Create SF assuming a serial partition for all processes: Could check for IS length here */
  if (!rank) numRemoteRanks = numProcs;
  else       numRemoteRanks = 0;
  ierr = PetscMalloc1(numRemoteRanks, &remoteRanks);CHKERRQ(ierr);
  for (p = 0; p < numRemoteRanks; ++p) {
    remoteRanks[p].rank  = p;
    remoteRanks[p].index = 0;
  }
  ierr = PetscSFCreate(comm, &partSF);CHKERRQ(ierr);
  ierr = PetscSFSetGraph(partSF, 1, numRemoteRanks, NULL, PETSC_OWN_POINTER, remoteRanks, PETSC_OWN_POINTER);CHKERRQ(ierr);
  ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-partition_view", &flg);CHKERRQ(ierr);
  if (flg) {
    ierr = PetscPrintf(comm, "Cell Partition:\n");CHKERRQ(ierr);
    ierr = PetscSectionView(cellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
    ierr = ISView(cellPart, NULL);CHKERRQ(ierr);
    if (origCellPart) {
      ierr = PetscPrintf(comm, "Original Cell Partition:\n");CHKERRQ(ierr);
      ierr = PetscSectionView(origCellPartSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
      ierr = ISView(origCellPart, NULL);CHKERRQ(ierr);
    }
    ierr = PetscSFView(partSF, NULL);CHKERRQ(ierr);
  }
  /* Close the partition over the mesh */
  ierr = DMPlexCreatePartitionClosure(dm, cellPartSection, cellPart, &partSection, &part);CHKERRQ(ierr);
  ierr = ISDestroy(&cellPart);CHKERRQ(ierr);
  ierr = PetscSectionDestroy(&cellPartSection);CHKERRQ(ierr);
  /* Create new mesh */
  ierr  = DMPlexCreate(comm, dmParallel);CHKERRQ(ierr);
  ierr  = DMPlexSetDimension(*dmParallel, dim);CHKERRQ(ierr);
  ierr  = PetscObjectSetName((PetscObject) *dmParallel, "Parallel Mesh");CHKERRQ(ierr);
  pmesh = (DM_Plex*) (*dmParallel)->data;
  /* Distribute sieve points and the global point numbering (replaces creating remote bases) */
  ierr = PetscSFConvertPartition(partSF, partSection, part, &renumbering, &pointSF);CHKERRQ(ierr);
  if (flg) {
    ierr = PetscPrintf(comm, "Point Partition:\n");CHKERRQ(ierr);
    ierr = PetscSectionView(partSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
    ierr = ISView(part, NULL);CHKERRQ(ierr);
    ierr = PetscSFView(pointSF, NULL);CHKERRQ(ierr);
    ierr = PetscPrintf(comm, "Point Renumbering after partition:\n");CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingView(renumbering, NULL);CHKERRQ(ierr);
  }
  ierr = PetscLogEventEnd(DMPLEX_Partition,dm,0,0,0);CHKERRQ(ierr);
  ierr = PetscLogEventBegin(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
  /* Distribute cone section */
  ierr = DMPlexGetConeSection(dm, &originalConeSection);CHKERRQ(ierr);
  ierr = DMPlexGetConeSection(*dmParallel, &newConeSection);CHKERRQ(ierr);
  ierr = PetscSFDistributeSection(pointSF, originalConeSection, &remoteOffsets, newConeSection);CHKERRQ(ierr);
  ierr = DMSetUp(*dmParallel);CHKERRQ(ierr);
  {
    PetscInt pStart, pEnd, p;

    ierr = PetscSectionGetChart(newConeSection, &pStart, &pEnd);CHKERRQ(ierr);
    for (p = pStart; p < pEnd; ++p) {
      PetscInt coneSize;
      ierr               = PetscSectionGetDof(newConeSection, p, &coneSize);CHKERRQ(ierr);
      pmesh->maxConeSize = PetscMax(pmesh->maxConeSize, coneSize);
    }
  }
  /* Communicate and renumber cones */
  ierr = PetscSFCreateSectionSF(pointSF, originalConeSection, remoteOffsets, newConeSection, &coneSF);CHKERRQ(ierr);
  ierr = DMPlexGetCones(dm, &cones);CHKERRQ(ierr);
  ierr = DMPlexGetCones(*dmParallel, &newCones);CHKERRQ(ierr);
  ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
  ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
  ierr = PetscSectionGetStorageSize(newConeSection, &newConesSize);CHKERRQ(ierr);
  ierr = ISGlobalToLocalMappingApplyBlock(renumbering, IS_GTOLM_MASK, newConesSize, newCones, NULL, newCones);CHKERRQ(ierr);
  ierr = PetscOptionsHasName(((PetscObject) dm)->prefix, "-cones_view", &flg);CHKERRQ(ierr);
  if (flg) {
    ierr = PetscPrintf(comm, "Serial Cone Section:\n");CHKERRQ(ierr);
    ierr = PetscSectionView(originalConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
    ierr = PetscPrintf(comm, "Parallel Cone Section:\n");CHKERRQ(ierr);
    ierr = PetscSectionView(newConeSection, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
    ierr = PetscSFView(coneSF, NULL);CHKERRQ(ierr);
  }
  ierr = DMPlexGetConeOrientations(dm, &cones);CHKERRQ(ierr);
  ierr = DMPlexGetConeOrientations(*dmParallel, &newCones);CHKERRQ(ierr);
  ierr = PetscSFBcastBegin(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
  ierr = PetscSFBcastEnd(coneSF, MPIU_INT, cones, newCones);CHKERRQ(ierr);
  ierr = PetscSFDestroy(&coneSF);CHKERRQ(ierr);
  ierr = PetscLogEventEnd(DMPLEX_DistributeCones,dm,0,0,0);CHKERRQ(ierr);
  /* Create supports and stratify sieve */
  {
    PetscInt pStart, pEnd;

    ierr = PetscSectionGetChart(pmesh->coneSection, &pStart, &pEnd);CHKERRQ(ierr);
    ierr = PetscSectionSetChart(pmesh->supportSection, pStart, pEnd);CHKERRQ(ierr);
  }
  ierr = DMPlexSymmetrize(*dmParallel);CHKERRQ(ierr);
  ierr = DMPlexStratify(*dmParallel);CHKERRQ(ierr);
  /* Distribute Coordinates */
  {
    PetscSection originalCoordSection, newCoordSection;
    Vec          originalCoordinates, newCoordinates;
    PetscInt     bs;
    const char  *name;

    ierr = DMGetCoordinateSection(dm, &originalCoordSection);CHKERRQ(ierr);
    ierr = DMGetCoordinateSection(*dmParallel, &newCoordSection);CHKERRQ(ierr);
    ierr = DMGetCoordinatesLocal(dm, &originalCoordinates);CHKERRQ(ierr);
    ierr = VecCreate(comm, &newCoordinates);CHKERRQ(ierr);
    ierr = PetscObjectGetName((PetscObject) originalCoordinates, &name);CHKERRQ(ierr);
    ierr = PetscObjectSetName((PetscObject) newCoordinates, name);CHKERRQ(ierr);

    ierr = DMPlexDistributeField(dm, pointSF, originalCoordSection, originalCoordinates, newCoordSection, newCoordinates);CHKERRQ(ierr);
    ierr = DMSetCoordinatesLocal(*dmParallel, newCoordinates);CHKERRQ(ierr);
    ierr = VecGetBlockSize(originalCoordinates, &bs);CHKERRQ(ierr);
    ierr = VecSetBlockSize(newCoordinates, bs);CHKERRQ(ierr);
    ierr = VecDestroy(&newCoordinates);CHKERRQ(ierr);
  }
  /* Distribute labels */
  ierr = PetscLogEventBegin(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
  {
    DMLabel  next      = mesh->labels, newNext = pmesh->labels;
    PetscInt numLabels = 0, l;

    /* Bcast number of labels */
    while (next) {++numLabels; next = next->next;}
    ierr = MPI_Bcast(&numLabels, 1, MPIU_INT, 0, comm);CHKERRQ(ierr);
    next = mesh->labels;
    for (l = 0; l < numLabels; ++l) {
      DMLabel   labelNew;
      PetscBool isdepth;

      /* Skip "depth" because it is recreated */
      if (!rank) {ierr = PetscStrcmp(next->name, "depth", &isdepth);CHKERRQ(ierr);}
      ierr = MPI_Bcast(&isdepth, 1, MPIU_BOOL, 0, comm);CHKERRQ(ierr);
      if (isdepth) {if (!rank) next = next->next; continue;}
      ierr = DMLabelDistribute(next, partSection, part, renumbering, &labelNew);CHKERRQ(ierr);
      /* Insert into list */
      if (newNext) newNext->next = labelNew;
      else         pmesh->labels = labelNew;
      newNext = labelNew;
      if (!rank) next = next->next;
    }
  }
  ierr = PetscLogEventEnd(DMPLEX_DistributeLabels,dm,0,0,0);CHKERRQ(ierr);
  /* Setup hybrid structure */
  {
    const PetscInt *gpoints;
    PetscInt        depth, n, d;

    for (d = 0; d <= dim; ++d) {pmesh->hybridPointMax[d] = mesh->hybridPointMax[d];}
    ierr = MPI_Bcast(pmesh->hybridPointMax, dim+1, MPIU_INT, 0, comm);CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingGetSize(renumbering, &n);CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingGetIndices(renumbering, &gpoints);CHKERRQ(ierr);
    ierr = DMPlexGetDepth(dm, &depth);CHKERRQ(ierr);
    for (d = 0; d <= dim; ++d) {
      PetscInt pmax = pmesh->hybridPointMax[d], newmax = 0, pEnd, stratum[2], p;

      if (pmax < 0) continue;
      ierr = DMPlexGetDepthStratum(dm, d > depth ? depth : d, &stratum[0], &stratum[1]);CHKERRQ(ierr);
      ierr = DMPlexGetDepthStratum(*dmParallel, d, NULL, &pEnd);CHKERRQ(ierr);
      ierr = MPI_Bcast(stratum, 2, MPIU_INT, 0, comm);CHKERRQ(ierr);
      for (p = 0; p < n; ++p) {
        const PetscInt point = gpoints[p];

        if ((point >= stratum[0]) && (point < stratum[1]) && (point >= pmax)) ++newmax;
      }
      if (newmax > 0) pmesh->hybridPointMax[d] = pEnd - newmax;
      else            pmesh->hybridPointMax[d] = -1;
    }
    ierr = ISLocalToGlobalMappingRestoreIndices(renumbering, &gpoints);CHKERRQ(ierr);
  }
  /* Cleanup Partition */
  ierr = ISLocalToGlobalMappingDestroy(&renumbering);CHKERRQ(ierr);
  ierr = PetscSFDestroy(&partSF);CHKERRQ(ierr);
  ierr = PetscSectionDestroy(&partSection);CHKERRQ(ierr);
  ierr = ISDestroy(&part);CHKERRQ(ierr);
  /* Create point SF for parallel mesh */
  ierr = PetscLogEventBegin(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
  {
    const PetscInt *leaves;
    PetscSFNode    *remotePoints, *rowners, *lowners;
    PetscInt        numRoots, numLeaves, numGhostPoints = 0, p, gp, *ghostPoints;
    PetscInt        pStart, pEnd;

    ierr = DMPlexGetChart(*dmParallel, &pStart, &pEnd);CHKERRQ(ierr);
    ierr = PetscSFGetGraph(pointSF, &numRoots, &numLeaves, &leaves, NULL);CHKERRQ(ierr);
    ierr = PetscMalloc2(numRoots,&rowners,numLeaves,&lowners);CHKERRQ(ierr);
    for (p=0; p<numRoots; p++) {
      rowners[p].rank  = -1;
      rowners[p].index = -1;
    }
    if (origCellPart) {
      /* Make sure points in the original partition are not assigned to other procs */
      const PetscInt *origPoints;

      ierr = DMPlexCreatePartitionClosure(dm, origCellPartSection, origCellPart, &origPartSection, &origPart);CHKERRQ(ierr);
      ierr = ISGetIndices(origPart, &origPoints);CHKERRQ(ierr);
      for (p = 0; p < numProcs; ++p) {
        PetscInt dof, off, d;

        ierr = PetscSectionGetDof(origPartSection, p, &dof);CHKERRQ(ierr);
        ierr = PetscSectionGetOffset(origPartSection, p, &off);CHKERRQ(ierr);
        for (d = off; d < off+dof; ++d) {
          rowners[origPoints[d]].rank = p;
        }
      }
      ierr = ISRestoreIndices(origPart, &origPoints);CHKERRQ(ierr);
      ierr = ISDestroy(&origPart);CHKERRQ(ierr);
      ierr = PetscSectionDestroy(&origPartSection);CHKERRQ(ierr);
    }
    ierr = ISDestroy(&origCellPart);CHKERRQ(ierr);
    ierr = PetscSectionDestroy(&origCellPartSection);CHKERRQ(ierr);

    ierr = PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
    ierr = PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
    for (p = 0; p < numLeaves; ++p) {
      if (lowners[p].rank < 0 || lowners[p].rank == rank) { /* Either put in a bid or we know we own it */
        lowners[p].rank  = rank;
        lowners[p].index = leaves ? leaves[p] : p;
      } else if (lowners[p].rank >= 0) { /* Point already claimed so flag so that MAXLOC does not listen to us */
        lowners[p].rank  = -2;
        lowners[p].index = -2;
      }
    }
    for (p=0; p<numRoots; p++) { /* Root must not participate in the rediction, flag so that MAXLOC does not use */
      rowners[p].rank  = -3;
      rowners[p].index = -3;
    }
    ierr = PetscSFReduceBegin(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
    ierr = PetscSFReduceEnd(pointSF, MPIU_2INT, lowners, rowners, MPI_MAXLOC);CHKERRQ(ierr);
    ierr = PetscSFBcastBegin(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
    ierr = PetscSFBcastEnd(pointSF, MPIU_2INT, rowners, lowners);CHKERRQ(ierr);
    for (p = 0; p < numLeaves; ++p) {
      if (lowners[p].rank < 0 || lowners[p].index < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Cell partition corrupt: point not claimed");
      if (lowners[p].rank != rank) ++numGhostPoints;
    }
    ierr = PetscMalloc1(numGhostPoints,    &ghostPoints);CHKERRQ(ierr);
    ierr = PetscMalloc1(numGhostPoints, &remotePoints);CHKERRQ(ierr);
    for (p = 0, gp = 0; p < numLeaves; ++p) {
      if (lowners[p].rank != rank) {
        ghostPoints[gp]        = leaves ? leaves[p] : p;
        remotePoints[gp].rank  = lowners[p].rank;
        remotePoints[gp].index = lowners[p].index;
        ++gp;
      }
    }
    ierr = PetscFree2(rowners,lowners);CHKERRQ(ierr);
    ierr = PetscSFSetGraph((*dmParallel)->sf, pEnd - pStart, numGhostPoints, ghostPoints, PETSC_OWN_POINTER, remotePoints, PETSC_OWN_POINTER);CHKERRQ(ierr);
    ierr = PetscSFSetFromOptions((*dmParallel)->sf);CHKERRQ(ierr);
  }
  pmesh->useCone    = mesh->useCone;
  pmesh->useClosure = mesh->useClosure;
  ierr = PetscLogEventEnd(DMPLEX_DistributeSF,dm,0,0,0);CHKERRQ(ierr);
  /* Copy BC */
  ierr = DMPlexCopyBoundary(dm, *dmParallel);CHKERRQ(ierr);
  /* Cleanup */
  if (sf) {*sf = pointSF;}
  else    {ierr = PetscSFDestroy(&pointSF);CHKERRQ(ierr);}
  ierr = DMSetFromOptions(*dmParallel);CHKERRQ(ierr);
  ierr = PetscLogEventEnd(DMPLEX_Distribute,dm,0,0,0);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #26
0
/*@
   DADestroy - Destroys a distributed array.

   Collective on DA

   Input Parameter:
.  da - the distributed array to destroy 

   Level: beginner

.keywords: distributed array, destroy

.seealso: DACreate1d(), DACreate2d(), DACreate3d()
@*/
PetscErrorCode PETSCDM_DLLEXPORT DADestroy(DA da)
{
  PetscErrorCode ierr;
  PetscErrorCode i;
  PetscTruth     done;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(da,DM_COOKIE,1);

  ierr = DMDestroy_Private((DM)da,&done);CHKERRQ(ierr);
  if (!done) PetscFunctionReturn(0);
  /* destroy the internal part */
  if (da->ops->destroy) {
    ierr = (*da->ops->destroy)(da);CHKERRQ(ierr);
  }
  /* destroy the external/common part */
  for (i=0; i<DA_MAX_AD_ARRAYS; i++) {
    ierr = PetscFree(da->adstartghostedout[i]);CHKERRQ(ierr);
    ierr = PetscFree(da->adstartghostedin[i]);CHKERRQ(ierr);
    ierr = PetscFree(da->adstartout[i]);CHKERRQ(ierr);
    ierr = PetscFree(da->adstartin[i]);CHKERRQ(ierr);
  }
  for (i=0; i<DA_MAX_AD_ARRAYS; i++) {
    ierr = PetscFree(da->admfstartghostedout[i]);CHKERRQ(ierr);
    ierr = PetscFree(da->admfstartghostedin[i]);CHKERRQ(ierr);
    ierr = PetscFree(da->admfstartout[i]);CHKERRQ(ierr);
    ierr = PetscFree(da->admfstartin[i]);CHKERRQ(ierr);
  }
  for (i=0; i<DA_MAX_WORK_ARRAYS; i++) {
    ierr = PetscFree(da->startghostedout[i]);CHKERRQ(ierr);
    ierr = PetscFree(da->startghostedin[i]);CHKERRQ(ierr);
    ierr = PetscFree(da->startout[i]);CHKERRQ(ierr);
    ierr = PetscFree(da->startin[i]);CHKERRQ(ierr);
  }

  /* if memory was published with AMS then destroy it */
  ierr = PetscObjectDepublish(da);CHKERRQ(ierr);

  if (da->ltog)   {ierr = VecScatterDestroy(da->ltog);CHKERRQ(ierr);}
  if (da->gtol)   {ierr = VecScatterDestroy(da->gtol);CHKERRQ(ierr);}
  if (da->ltol)   {ierr = VecScatterDestroy(da->ltol);CHKERRQ(ierr);}
  if (da->natural){
    ierr = VecDestroy(da->natural);CHKERRQ(ierr);
  }
  if (da->gton) {
    ierr = VecScatterDestroy(da->gton);CHKERRQ(ierr);
  }

  if (da->ao) {
    ierr = AODestroy(da->ao);CHKERRQ(ierr);
  }
  if (da->ltogmap) {
    ierr = ISLocalToGlobalMappingDestroy(da->ltogmap);CHKERRQ(ierr);
  }
  if (da->ltogmapb) {
    ierr = ISLocalToGlobalMappingDestroy(da->ltogmapb);CHKERRQ(ierr);
  }

  ierr = PetscFree(da->lx);CHKERRQ(ierr);
  ierr = PetscFree(da->ly);CHKERRQ(ierr);
  ierr = PetscFree(da->lz);CHKERRQ(ierr);

  if (da->fieldname) {
    for (i=0; i<da->w; i++) {
      ierr = PetscStrfree(da->fieldname[i]);CHKERRQ(ierr);
    }
    ierr = PetscFree(da->fieldname);CHKERRQ(ierr);
  }

  if (da->localcoloring) {
    ierr = ISColoringDestroy(da->localcoloring);CHKERRQ(ierr);
  }
  if (da->ghostedcoloring) {
    ierr = ISColoringDestroy(da->ghostedcoloring);CHKERRQ(ierr);
  }

  if (da->coordinates) {ierr = VecDestroy(da->coordinates);CHKERRQ(ierr);}
  if (da->ghosted_coordinates) {ierr = VecDestroy(da->ghosted_coordinates);CHKERRQ(ierr);}
  if (da->da_coordinates && da != da->da_coordinates) {ierr = DADestroy(da->da_coordinates);CHKERRQ(ierr);}

  ierr = PetscFree(da->neighbors);CHKERRQ(ierr);
  ierr = PetscFree(da->dfill);CHKERRQ(ierr);
  ierr = PetscFree(da->ofill);CHKERRQ(ierr);
  ierr = PetscFree(da->e);CHKERRQ(ierr);

  ierr = PetscHeaderDestroy(da);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #27
0
/*@
   MatCreateLocalRef - Gets a logical reference to a local submatrix, for use in assembly

   Not Collective

   Input Arguments:
+ A - Full matrix, generally parallel
. isrow - Local index set for the rows
- iscol - Local index set for the columns

   Output Arguments:
. newmat - New serial Mat

   Level: developer

   Notes:
   Most will use MatGetLocalSubMatrix() which returns a real matrix corresponding to the local
   block if it available, such as with matrix formats that store these blocks separately.

   The new matrix forwards MatSetValuesLocal() and MatSetValuesBlockedLocal() to the global system.
   In general, it does not define MatMult() or any other functions.  Local submatrices can be nested.

.seealso: MatSetValuesLocal(), MatSetValuesBlockedLocal(), MatGetLocalSubMatrix(), MatCreateSubMatrix()
@*/
PetscErrorCode  MatCreateLocalRef(Mat A,IS isrow,IS iscol,Mat *newmat)
{
  PetscErrorCode ierr;
  Mat_LocalRef   *lr;
  Mat            B;
  PetscInt       m,n;
  PetscBool      islr;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(A,MAT_CLASSID,1);
  PetscValidHeaderSpecific(isrow,IS_CLASSID,2);
  PetscValidHeaderSpecific(iscol,IS_CLASSID,3);
  PetscValidPointer(newmat,4);
  *newmat = 0;

  ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr);
  ierr = ISGetLocalSize(isrow,&m);CHKERRQ(ierr);
  ierr = ISGetLocalSize(iscol,&n);CHKERRQ(ierr);
  ierr = MatSetSizes(B,m,n,m,n);CHKERRQ(ierr);
  ierr = PetscObjectChangeTypeName((PetscObject)B,MATLOCALREF);CHKERRQ(ierr);
  ierr = MatSetUp(B);CHKERRQ(ierr);

  B->ops->destroy = MatDestroy_LocalRef;

  ierr = PetscNewLog(B,Mat_LocalRef,&lr);CHKERRQ(ierr);
  B->data = (void*)lr;

  ierr = PetscObjectTypeCompare((PetscObject)A,MATLOCALREF,&islr);CHKERRQ(ierr);
  if (islr) {
    Mat_LocalRef *alr = (Mat_LocalRef*)A->data;
    lr->Top = alr->Top;
  } else {
    /* This does not increase the reference count because MatLocalRef is not allowed to live longer than its parent */
    lr->Top = A;
  }
  {
    ISLocalToGlobalMapping rltog,cltog;
    PetscInt abs,rbs,cbs;

    /* We will translate directly to global indices for the top level */
    lr->SetValues        = MatSetValues;
    lr->SetValuesBlocked = MatSetValuesBlocked;

    B->ops->setvalueslocal = MatSetValuesLocal_LocalRef_Scalar;
    ierr = ISL2GCompose(isrow,A->rmap->mapping,&rltog);CHKERRQ(ierr);
    if (isrow == iscol && A->rmap->mapping == A->cmap->mapping) {
      ierr = PetscObjectReference((PetscObject)rltog);CHKERRQ(ierr);
      cltog = rltog;
    } else {
      ierr = ISL2GCompose(iscol,A->cmap->mapping,&cltog);CHKERRQ(ierr);
    }
    ierr = MatSetLocalToGlobalMapping(B,rltog,cltog);CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingDestroy(&rltog);CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingDestroy(&cltog);CHKERRQ(ierr);

    ierr = MatGetBlockSize(A,&abs);CHKERRQ(ierr);
    ierr = ISGetBlockSize(isrow,&rbs);CHKERRQ(ierr);
    ierr = ISGetBlockSize(iscol,&cbs);CHKERRQ(ierr);
    if (rbs == cbs) {           /* submatrix has block structure, so user can insert values with blocked interface */
      ierr = PetscLayoutSetBlockSize(B->rmap,rbs);CHKERRQ(ierr);
      ierr = PetscLayoutSetBlockSize(B->cmap,cbs);CHKERRQ(ierr);
      if (abs != rbs || abs == 1) {
        /* Top-level matrix has different block size, so we have to call its scalar insertion interface */
        B->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_LocalRef_Scalar;
      } else {
        /* Block sizes match so we can forward values to the top level using the block interface */
        B->ops->setvaluesblockedlocal = MatSetValuesBlockedLocal_LocalRef_Block;
        ierr = ISL2GComposeBlock(isrow,A->rmap->bmapping,&rltog);CHKERRQ(ierr);
        if (isrow == iscol && A->rmap->bmapping == A->cmap->bmapping) {
          ierr =  PetscObjectReference((PetscObject)rltog);CHKERRQ(ierr);
          cltog = rltog;
        } else {
          ierr = ISL2GComposeBlock(iscol,A->cmap->bmapping,&cltog);CHKERRQ(ierr);
        }
        ierr = MatSetLocalToGlobalMappingBlock(B,rltog,cltog);CHKERRQ(ierr);
        ierr = ISLocalToGlobalMappingDestroy(&rltog);CHKERRQ(ierr);
        ierr = ISLocalToGlobalMappingDestroy(&cltog);CHKERRQ(ierr);
      }
    }
  }
  *newmat = B;
  PetscFunctionReturn(0);
}
Пример #28
0
int main(int argc, char *argv[])
{
  PetscErrorCode ierr;
  IS is0a,is0b,is0,is1,isl0a,isl0b,isl0,isl1;
  Mat A,Aexplicit;
  PetscBool usenest;
  PetscMPIInt rank,size;
  PetscInt i,j;

  PetscInitialize(&argc,&argv,PETSC_NULL,help);
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);

  {
    const PetscInt ix0a[] = {rank*2+0},ix0b[] = {rank*2+1},ix0[] = {rank*3+0,rank*3+1},ix1[] = {rank*3+2};
    ierr = ISCreateGeneral(PETSC_COMM_WORLD,1,ix0a,PETSC_COPY_VALUES,&is0a);CHKERRQ(ierr);
    ierr = ISCreateGeneral(PETSC_COMM_WORLD,1,ix0b,PETSC_COPY_VALUES,&is0b);CHKERRQ(ierr);
    ierr = ISCreateGeneral(PETSC_COMM_WORLD,2,ix0,PETSC_COPY_VALUES,&is0);CHKERRQ(ierr);
    ierr = ISCreateGeneral(PETSC_COMM_WORLD,1,ix1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
  }
  {
    ierr = ISCreateStride(PETSC_COMM_SELF,6,0,1,&isl0);CHKERRQ(ierr);
    ierr = ISCreateStride(PETSC_COMM_SELF,3,0,1,&isl0a);CHKERRQ(ierr);
    ierr = ISCreateStride(PETSC_COMM_SELF,3,3,1,&isl0b);CHKERRQ(ierr);
    ierr = ISCreateStride(PETSC_COMM_SELF,3,6,1,&isl1);CHKERRQ(ierr);
  }

  usenest = PETSC_FALSE;
  ierr = PetscOptionsGetBool(PETSC_NULL,"-nest",&usenest,PETSC_NULL);CHKERRQ(ierr);
  if (usenest) {
    ISLocalToGlobalMapping l2g;
    const PetscInt l2gind[3] = {(rank-1+size)%size,rank,(rank+1)%size};
    Mat B[9];
    ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,3,l2gind,PETSC_COPY_VALUES,&l2g);CHKERRQ(ierr);
    for (i=0; i<9; i++) {
      ierr = MatCreateAIJ(PETSC_COMM_WORLD,1,1,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,PETSC_NULL,PETSC_DECIDE,PETSC_NULL,&B[i]);CHKERRQ(ierr);
      ierr = MatSetUp(B[i]);CHKERRQ(ierr);
      ierr = MatSetLocalToGlobalMapping(B[i],l2g,l2g);CHKERRQ(ierr);
    }
    {
      const IS isx[] = {is0a,is0b};
      const Mat Bx00[] = {B[0],B[1],B[3],B[4]},Bx01[] = {B[2],B[5]},Bx10[] = {B[6],B[7]};
      Mat B00,B01,B10;
      ierr = MatCreateNest(PETSC_COMM_WORLD,2,isx,2,isx,Bx00,&B00);CHKERRQ(ierr);
      ierr = MatSetUp(B00);CHKERRQ(ierr);
      ierr = MatCreateNest(PETSC_COMM_WORLD,2,isx,1,PETSC_NULL,Bx01,&B01);CHKERRQ(ierr);
      ierr = MatSetUp(B01);CHKERRQ(ierr);
      ierr = MatCreateNest(PETSC_COMM_WORLD,1,PETSC_NULL,2,isx,Bx10,&B10);CHKERRQ(ierr);
      ierr = MatSetUp(B10);CHKERRQ(ierr);
      {
        Mat By[] = {B00,B01,B10,B[8]};
        IS isy[] = {is0,is1};
        ierr = MatCreateNest(PETSC_COMM_WORLD,2,isy,2,isy,By,&A);CHKERRQ(ierr);
        ierr = MatSetUp(A);CHKERRQ(ierr);
      }
      ierr = MatDestroy(&B00);CHKERRQ(ierr);
      ierr = MatDestroy(&B01);CHKERRQ(ierr);
      ierr = MatDestroy(&B10);CHKERRQ(ierr);
    }
    for (i=0; i<9; i++) {ierr = MatDestroy(&B[i]);CHKERRQ(ierr);}
    ierr = ISLocalToGlobalMappingDestroy(&l2g);CHKERRQ(ierr);
  } else {
    ISLocalToGlobalMapping l2g;
    PetscInt l2gind[9];
    for (i=0; i<3; i++) for (j=0; j<3; j++) l2gind[3*i+j] = ((rank-1+j+size) % size)*3 + i;
    ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD,9,l2gind,PETSC_COPY_VALUES,&l2g);CHKERRQ(ierr);
    ierr = MatCreateAIJ(PETSC_COMM_WORLD,3,3,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,PETSC_NULL,PETSC_DECIDE,PETSC_NULL,&A);CHKERRQ(ierr);
    ierr = MatSetLocalToGlobalMapping(A,l2g,l2g);CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingDestroy(&l2g);CHKERRQ(ierr);
  }

  {
    Mat A00,A11,A0a0a,A0a0b;
    ierr = MatGetLocalSubMatrix(A,isl0,isl0,&A00);CHKERRQ(ierr);
    ierr = MatGetLocalSubMatrix(A,isl1,isl1,&A11);CHKERRQ(ierr);
    ierr = MatGetLocalSubMatrix(A00,isl0a,isl0a,&A0a0a);CHKERRQ(ierr);
    ierr = MatGetLocalSubMatrix(A00,isl0a,isl0b,&A0a0b);CHKERRQ(ierr);

    ierr = MatSetValueLocal(A0a0a,0,0,100*rank+1,ADD_VALUES);CHKERRQ(ierr);
    ierr = MatSetValueLocal(A0a0a,0,1,100*rank+2,ADD_VALUES);CHKERRQ(ierr);
    ierr = MatSetValueLocal(A0a0a,2,2,100*rank+9,ADD_VALUES);CHKERRQ(ierr);

    ierr = MatSetValueLocal(A0a0b,1,1,100*rank+50+5,ADD_VALUES);CHKERRQ(ierr);

    ierr = MatSetValueLocal(A11,0,0,1000*(rank+1)+1,ADD_VALUES);CHKERRQ(ierr);
    ierr = MatSetValueLocal(A11,1,2,1000*(rank+1)+6,ADD_VALUES);CHKERRQ(ierr);

    ierr = MatRestoreLocalSubMatrix(A00,isl0a,isl0a,&A0a0a);CHKERRQ(ierr);
    ierr = MatRestoreLocalSubMatrix(A00,isl0a,isl0b,&A0a0b);CHKERRQ(ierr);
    ierr = MatRestoreLocalSubMatrix(A,isl0,isl0,&A00);CHKERRQ(ierr);
    ierr = MatRestoreLocalSubMatrix(A,isl1,isl1,&A11);CHKERRQ(ierr);
  }
  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);

  ierr = MatComputeExplicitOperator(A,&Aexplicit);CHKERRQ(ierr);
  ierr = MatView(Aexplicit,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  ierr = MatDestroy(&A);CHKERRQ(ierr);
  ierr = MatDestroy(&Aexplicit);CHKERRQ(ierr);
  ierr = ISDestroy(&is0a);CHKERRQ(ierr);
  ierr = ISDestroy(&is0b);CHKERRQ(ierr);
  ierr = ISDestroy(&is0);CHKERRQ(ierr);
  ierr = ISDestroy(&is1);CHKERRQ(ierr);
  ierr = ISDestroy(&isl0a);CHKERRQ(ierr);
  ierr = ISDestroy(&isl0b);CHKERRQ(ierr);
  ierr = ISDestroy(&isl0);CHKERRQ(ierr);
  ierr = ISDestroy(&isl1);CHKERRQ(ierr);
  PetscFinalize();
  return 0;
}