Ejemplo n.º 1
0
void PetscVecTools::SetupInterleavedVectorScatterGather(Vec interleavedVec, VecScatter& rFirstVariableScatterContext, VecScatter& rSecondVariableScatterContext)
{
    PetscInt num_rows, num_local_rows;

    VecGetSize(interleavedVec, &num_rows);
    VecGetLocalSize(interleavedVec, &num_local_rows);

    IS A11_rows, A22_rows;
    ISCreateStride(PETSC_COMM_WORLD, num_rows/2, 0, 2, &A11_rows);
    ISCreateStride(PETSC_COMM_WORLD, num_rows/2, 1, 2, &A22_rows);

    IS all_vector;
    ISCreateStride(PETSC_COMM_WORLD, num_rows/2, 0, 1, &all_vector);

    unsigned subvector_num_rows = num_rows/2;
    unsigned subvector_local_rows = num_local_rows/2;
    Vec x1_subvector = PetscTools::CreateVec(subvector_num_rows, subvector_local_rows);
    Vec x2_subvector = PetscTools::CreateVec(subvector_num_rows, subvector_local_rows);

    VecScatterCreate(interleavedVec, A11_rows, x1_subvector, all_vector, &rFirstVariableScatterContext);
    VecScatterCreate(interleavedVec, A22_rows, x2_subvector, all_vector, &rSecondVariableScatterContext);

    PetscTools::Destroy(x1_subvector);
    PetscTools::Destroy(x2_subvector);

    ISDestroy(PETSC_DESTROY_PARAM(A11_rows));
    ISDestroy(PETSC_DESTROY_PARAM(A22_rows));
    ISDestroy(PETSC_DESTROY_PARAM(all_vector));
}
Ejemplo n.º 2
0
Archivo: ex2.c Proyecto: Kun-Qu/petsc
PetscErrorCode RHSFunction(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
{
  PetscScalar    *inptr,*outptr;
  PetscInt       i,n,*idx;
  PetscErrorCode ierr;
  IS             from,to;
  VecScatter     scatter;
  Vec            tmp_in,tmp_out;

  /* Get the length of parallel vector */
  ierr = VecGetSize(globalin,&n);CHKERRQ(ierr);

  /* Set the index sets */
  ierr = PetscMalloc(n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
  for(i=0; i<n; i++) idx[i]=i;
  
  /* Create local sequential vectors */
  ierr = VecCreateSeq(PETSC_COMM_SELF,n,&tmp_in);CHKERRQ(ierr);
  ierr = VecDuplicate(tmp_in,&tmp_out);CHKERRQ(ierr);

  /* Create scatter context */
  ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idx,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
  ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idx,PETSC_COPY_VALUES,&to);CHKERRQ(ierr);
  ierr = VecScatterCreate(globalin,from,tmp_in,to,&scatter);CHKERRQ(ierr);
  ierr = VecScatterBegin(scatter,globalin,tmp_in,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd(scatter,globalin,tmp_in,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&scatter);CHKERRQ(ierr);

  /*Extract income array */ 
  ierr = VecGetArray(tmp_in,&inptr);CHKERRQ(ierr);

  /* Extract outcome array*/
  ierr = VecGetArray(tmp_out,&outptr);CHKERRQ(ierr);

  outptr[0] = 2.0*inptr[0]+inptr[1];
  outptr[1] = inptr[0]+2.0*inptr[1]+inptr[2];
  outptr[2] = inptr[1]+2.0*inptr[2];

  ierr = VecRestoreArray(tmp_in,&inptr);CHKERRQ(ierr);
  ierr = VecRestoreArray(tmp_out,&outptr);CHKERRQ(ierr);

  ierr = VecScatterCreate(tmp_out,from,globalout,to,&scatter);CHKERRQ(ierr);
  ierr = VecScatterBegin(scatter,tmp_out,globalout,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd(scatter,tmp_out,globalout,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);

  /* Destroy idx aand scatter */
  ierr = ISDestroy(&from);CHKERRQ(ierr);
  ierr = ISDestroy(&to);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&scatter);CHKERRQ(ierr);
  ierr = VecDestroy(&tmp_in);CHKERRQ(ierr);
  ierr = VecDestroy(&tmp_out);CHKERRQ(ierr);
  ierr = PetscFree(idx);CHKERRQ(ierr);
  return 0;
}
Ejemplo n.º 3
0
Archivo: ex9.c Proyecto: 00liujj/petsc
int main(int argc,char **argv)
{
  PetscErrorCode ierr;
  AppCtx         ctx;
  TS             ts;
  Vec            tsrhs,UV;
  IS             is;
  PetscInt       I;
  PetscMPIInt    rank;


  PetscInitialize(&argc,&argv,(char*)0,help);
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
  ierr = TSCreate(PETSC_COMM_WORLD,&ts);CHKERRQ(ierr);
  ierr = TSSetProblemType(ts,TS_NONLINEAR);CHKERRQ(ierr);
  ierr = TSSetType(ts,TSROSW);CHKERRQ(ierr);
  ierr = TSSetFromOptions(ts);CHKERRQ(ierr);
  ierr = VecCreateMPI(PETSC_COMM_WORLD,2,PETSC_DETERMINE,&tsrhs);CHKERRQ(ierr);
  ierr = VecCreateMPI(PETSC_COMM_WORLD,2,PETSC_DETERMINE,&UV);CHKERRQ(ierr);
  ierr = TSSetRHSFunction(ts,tsrhs,TSFunctionRHS,&ctx);CHKERRQ(ierr);
  ierr = TSSetIFunction(ts,NULL,TSFunctionI,&ctx);CHKERRQ(ierr);
  ctx.f = f;
  ctx.F = F;

  ierr = VecCreateMPI(PETSC_COMM_WORLD,1,PETSC_DETERMINE,&ctx.U);CHKERRQ(ierr);
  ierr = VecCreateMPI(PETSC_COMM_WORLD,1,PETSC_DETERMINE,&ctx.V);CHKERRQ(ierr);
  ierr = VecCreateMPI(PETSC_COMM_WORLD,1,PETSC_DETERMINE,&ctx.UF);CHKERRQ(ierr);
  ierr = VecCreateMPI(PETSC_COMM_WORLD,1,PETSC_DETERMINE,&ctx.VF);CHKERRQ(ierr);
  I    = 2*rank;
  ierr = ISCreateGeneral(PETSC_COMM_WORLD,1,&I,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
  ierr = VecScatterCreate(ctx.U,NULL,UV,is,&ctx.scatterU);CHKERRQ(ierr);
  ierr = ISDestroy(&is);CHKERRQ(ierr);
  I    = 2*rank + 1;
  ierr = ISCreateGeneral(PETSC_COMM_WORLD,1,&I,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
  ierr = VecScatterCreate(ctx.V,NULL,UV,is,&ctx.scatterV);CHKERRQ(ierr);
  ierr = ISDestroy(&is);CHKERRQ(ierr);

  ierr = VecSet(UV,1.0);CHKERRQ(ierr);
  ierr = TSSolve(ts,UV);CHKERRQ(ierr);
  ierr = VecDestroy(&tsrhs);CHKERRQ(ierr);
  ierr = VecDestroy(&UV);CHKERRQ(ierr);
  ierr = VecDestroy(&ctx.U);CHKERRQ(ierr);
  ierr = VecDestroy(&ctx.V);CHKERRQ(ierr);
  ierr = VecDestroy(&ctx.UF);CHKERRQ(ierr);
  ierr = VecDestroy(&ctx.VF);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&ctx.scatterU);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&ctx.scatterV);CHKERRQ(ierr);
  ierr = TSDestroy(&ts);CHKERRQ(ierr);
  PetscFinalize();
  return 0;
}
Ejemplo n.º 4
0
/*@
   DMDAGlobalToNaturalAllCreate - Creates a scatter context that maps from the
     global vector the entire vector to each processor in natural numbering

   Collective on DMDA

   Input Parameter:
.  da - the distributed array context

   Output Parameter:
.  scatter - the scatter context

   Level: advanced

.keywords: distributed array, global to local, begin, coarse problem

.seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
          DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
@*/
PetscErrorCode  DMDAGlobalToNaturalAllCreate(DM da,VecScatter *scatter)
{
  PetscErrorCode ierr;
  PetscInt       N;
  IS             from,to;
  Vec            tmplocal,global;
  AO             ao;
  DM_DA          *dd = (DM_DA*)da->data;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(da,DM_CLASSID,1);
  PetscValidPointer(scatter,2);
  ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);

  /* create the scatter context */
  ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,dd->Nlocal,PETSC_DETERMINE,0,&global);CHKERRQ(ierr);
  ierr = VecGetSize(global,&N);CHKERRQ(ierr);
  ierr = ISCreateStride(PetscObjectComm((PetscObject)da),N,0,1,&to);CHKERRQ(ierr);
  ierr = AOPetscToApplicationIS(ao,to);CHKERRQ(ierr);
  ierr = ISCreateStride(PetscObjectComm((PetscObject)da),N,0,1,&from);CHKERRQ(ierr);
  ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->w,N,0,&tmplocal);CHKERRQ(ierr);
  ierr = VecScatterCreate(global,from,tmplocal,to,scatter);CHKERRQ(ierr);
  ierr = VecDestroy(&tmplocal);CHKERRQ(ierr);
  ierr = VecDestroy(&global);CHKERRQ(ierr);
  ierr = ISDestroy(&from);CHKERRQ(ierr);
  ierr = ISDestroy(&to);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Ejemplo n.º 5
0
/*
   DAGlobalToNatural_Create - Create the global to natural scatter object

   Collective on DA

   Input Parameter:
.  da - the distributed array context

   Level: developer

   Notes: This is an internal routine called by DAGlobalToNatural() to 
     create the scatter context.

.keywords: distributed array, global to local, begin

.seealso: DAGlobalToNaturalBegin(), DAGlobalToNaturalEnd(), DALocalToGlobal(), DACreate2d(), 
          DAGlobalToLocalBegin(), DAGlobalToLocalEnd(), DACreateNaturalVector()
*/
PetscErrorCode DAGlobalToNatural_Create(DA da)
{
  PetscErrorCode ierr;
  PetscInt  m,start,Nlocal;
  IS  from,to;
  Vec global;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(da,DM_COOKIE,1);
  if (!da->natural) {
    SETERRQ(PETSC_ERR_ORDER,"Natural layout vector not yet created; cannot scatter into it");
  }

  /* create the scatter context */
  ierr = VecGetLocalSize(da->natural,&m);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(da->natural,&start,PETSC_NULL);CHKERRQ(ierr);

  ierr = DAGetNatural_Private(da,&Nlocal,&to);CHKERRQ(ierr);
  if (Nlocal != m) SETERRQ2(PETSC_ERR_PLIB,"Internal error: Nlocal %D local vector size %D",Nlocal,m);
  ierr = ISCreateStride(((PetscObject)da)->comm,m,start,1,&from);CHKERRQ(ierr);
  ierr = VecCreateMPIWithArray(((PetscObject)da)->comm,da->Nlocal,PETSC_DETERMINE,0,&global);
  ierr = VecSetBlockSize(global,da->w);CHKERRQ(ierr);
  ierr = VecScatterCreate(global,from,da->natural,to,&da->gton);CHKERRQ(ierr);
  ierr = VecDestroy(global);CHKERRQ(ierr);
  ierr = ISDestroy(from);CHKERRQ(ierr);
  ierr = ISDestroy(to);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Ejemplo n.º 6
0
Archivo: ex25.c Proyecto: Kun-Qu/petsc
int main(int argc,char **argv)
{
  PetscErrorCode ierr;
  PetscInt       n = 5,N,low,high,iglobal,i;
  PetscMPIInt    size,rank;
  PetscScalar    value,zero = 0.0;
  Vec            x,y;
  IS             is1,is2;
  VecScatter     ctx;

  ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr); 
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);

  /* create two vectors */
  N = size*n;
  ierr = VecCreate(PETSC_COMM_WORLD,&y);CHKERRQ(ierr);
  ierr = VecSetSizes(y,PETSC_DECIDE,N);CHKERRQ(ierr);
  ierr = VecSetFromOptions(y);CHKERRQ(ierr);
  if (!rank) {
    ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x);CHKERRQ(ierr);
  } else {
    ierr = VecCreateSeq(PETSC_COMM_SELF,0,&x);CHKERRQ(ierr);
  }

  /* create two index sets */
  if (!rank) {
    ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&is1);CHKERRQ(ierr);
    ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&is2);CHKERRQ(ierr);
  } else {
    ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is1);CHKERRQ(ierr);
    ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is2);CHKERRQ(ierr);
  }

  ierr = VecSet(x,zero);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(y,&low,&high);CHKERRQ(ierr);
  for (i=0; i<n; i++) {
    iglobal = i + low; value = (PetscScalar) (i + 10*rank);
    ierr = VecSetValues(y,1,&iglobal,&value,INSERT_VALUES);CHKERRQ(ierr);
  }
  ierr = VecAssemblyBegin(y);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(y);CHKERRQ(ierr);
  ierr = VecView(y,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  ierr = VecScatterCreate(y,is2,x,is1,&ctx);CHKERRQ(ierr);
  ierr = VecScatterBegin(ctx,y,x,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd(ctx,y,x,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&ctx);CHKERRQ(ierr);
  
  if (!rank) 
    {printf("----\n"); ierr = VecView(x,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);}

  ierr = VecDestroy(&x);CHKERRQ(ierr);
  ierr = VecDestroy(&y);CHKERRQ(ierr);
  ierr = ISDestroy(&is1);CHKERRQ(ierr);
  ierr = ISDestroy(&is2);CHKERRQ(ierr);

  ierr = PetscFinalize();
  return 0;
}
Ejemplo n.º 7
0
void PetscVector<T>::localize (NumericVector<T>& v_local_in) const
{
  this->_restore_array();

  // Make sure the NumericVector passed in is really a PetscVector
  PetscVector<T>* v_local = libmesh_cast_ptr<PetscVector<T>*>(&v_local_in);

  libmesh_assert(v_local);
  libmesh_assert_equal_to (v_local->size(), this->size());

  PetscErrorCode ierr = 0;
  const PetscInt n = this->size();

  IS is;
  VecScatter scatter;

  // Create idx, idx[i] = i;
  std::vector<PetscInt> idx(n); Utility::iota (idx.begin(), idx.end(), 0);

  // Create the index set & scatter object
  ierr = ISCreateLibMesh(libMesh::COMM_WORLD, n, &idx[0], PETSC_USE_POINTER, &is);
         CHKERRABORT(libMesh::COMM_WORLD,ierr);

  ierr = VecScatterCreate(_vec,          is,
			  v_local->_vec, is,
			  &scatter);
         CHKERRABORT(libMesh::COMM_WORLD,ierr);

  // Perform the scatter
#if PETSC_VERSION_LESS_THAN(2,3,3)

  ierr = VecScatterBegin(_vec, v_local->_vec, INSERT_VALUES,
			 SCATTER_FORWARD, scatter);
         CHKERRABORT(libMesh::COMM_WORLD,ierr);

  ierr = VecScatterEnd  (_vec, v_local->_vec, INSERT_VALUES,
			 SCATTER_FORWARD, scatter);
         CHKERRABORT(libMesh::COMM_WORLD,ierr);
#else
  // API argument order change in PETSc 2.3.3
  ierr = VecScatterBegin(scatter, _vec, v_local->_vec,
			 INSERT_VALUES, SCATTER_FORWARD);
         CHKERRABORT(libMesh::COMM_WORLD,ierr);

  ierr = VecScatterEnd  (scatter, _vec, v_local->_vec,
                         INSERT_VALUES, SCATTER_FORWARD);
         CHKERRABORT(libMesh::COMM_WORLD,ierr);
#endif

  // Clean up
  ierr = LibMeshISDestroy (&is);
         CHKERRABORT(libMesh::COMM_WORLD,ierr);

  ierr = LibMeshVecScatterDestroy(&scatter);
         CHKERRABORT(libMesh::COMM_WORLD,ierr);

  // Make sure ghost dofs are up to date
  if (v_local->type() == GHOSTED)
    v_local->close();
}
Ejemplo n.º 8
0
/*@
   DMDANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy
     of the entire vector on each processor to its local part in the global vector.

   Collective on DMDA

   Input Parameter:
.  da - the distributed array context

   Output Parameter:
.  scatter - the scatter context

   Level: advanced

.keywords: distributed array, global to local, begin, coarse problem

.seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
          DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
@*/
PetscErrorCode  DMDANaturalAllToGlobalCreate(DM da,VecScatter *scatter)
{
  PetscErrorCode ierr;
  DM_DA          *dd = (DM_DA*)da->data;
  PetscInt       M,m = dd->Nlocal,start;
  IS             from,to;
  Vec            tmplocal,global;
  AO             ao;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(da,DM_CLASSID,1);
  PetscValidPointer(scatter,2);
  ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);

  /* create the scatter context */
  ierr = MPI_Allreduce(&m,&M,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
  ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,m,PETSC_DETERMINE,0,&global);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(global,&start,NULL);CHKERRQ(ierr);
  ierr = ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);CHKERRQ(ierr);
  ierr = AOPetscToApplicationIS(ao,from);CHKERRQ(ierr);
  ierr = ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&to);CHKERRQ(ierr);
  ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->w,M,0,&tmplocal);CHKERRQ(ierr);
  ierr = VecScatterCreate(tmplocal,from,global,to,scatter);CHKERRQ(ierr);
  ierr = VecDestroy(&tmplocal);CHKERRQ(ierr);
  ierr = VecDestroy(&global);CHKERRQ(ierr);
  ierr = ISDestroy(&from);CHKERRQ(ierr);
  ierr = ISDestroy(&to);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Ejemplo n.º 9
0
PetscErrorCode MatSetLocalToGlobalMapping_IS(Mat A,ISLocalToGlobalMapping rmapping,ISLocalToGlobalMapping cmapping)
{
  PetscErrorCode ierr;
  PetscInt       n,bs;
  Mat_IS         *is = (Mat_IS*)A->data;
  IS             from,to;
  Vec            global;

  PetscFunctionBegin;
  PetscCheckSameComm(A,1,rmapping,2);
  if (rmapping != cmapping) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_INCOMP,"MATIS requires the row and column mappings to be identical");
  if (is->mapping) { /* Currenly destroys the objects that will be created by this routine. Is there anything else that should be checked? */
    ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
    ierr = VecDestroy(&is->x);CHKERRQ(ierr);
    ierr = VecDestroy(&is->y);CHKERRQ(ierr);
    ierr = VecScatterDestroy(&is->ctx);CHKERRQ(ierr);
    ierr = MatDestroy(&is->A);CHKERRQ(ierr);
  }
  ierr = PetscObjectReference((PetscObject)rmapping);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&is->mapping);CHKERRQ(ierr);
  is->mapping = rmapping;
/*
  ierr = PetscLayoutSetISLocalToGlobalMapping(A->rmap,rmapping);CHKERRQ(ierr);
  ierr = PetscLayoutSetISLocalToGlobalMapping(A->cmap,cmapping);CHKERRQ(ierr);
*/

  /* Create the local matrix A */
  ierr = ISLocalToGlobalMappingGetSize(rmapping,&n);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingGetBlockSize(rmapping,&bs);CHKERRQ(ierr);
  ierr = MatCreate(PETSC_COMM_SELF,&is->A);CHKERRQ(ierr);
  if (bs > 1) {
    ierr = MatSetType(is->A,MATSEQBAIJ);CHKERRQ(ierr);
  } else {
    ierr = MatSetType(is->A,MATSEQAIJ);CHKERRQ(ierr);
  }
  ierr = MatSetSizes(is->A,n,n,n,n);CHKERRQ(ierr);
  ierr = MatSetBlockSize(is->A,bs);CHKERRQ(ierr);
  ierr = MatSetOptionsPrefix(is->A,((PetscObject)A)->prefix);CHKERRQ(ierr);
  ierr = MatAppendOptionsPrefix(is->A,"is_");CHKERRQ(ierr);
  ierr = MatSetFromOptions(is->A);CHKERRQ(ierr);

  /* Create the local work vectors */
  ierr = VecCreate(PETSC_COMM_SELF,&is->x);CHKERRQ(ierr);
  ierr = VecSetBlockSize(is->x,bs);CHKERRQ(ierr);
  ierr = VecSetSizes(is->x,n,n);CHKERRQ(ierr);
  ierr = VecSetOptionsPrefix(is->x,((PetscObject)A)->prefix);CHKERRQ(ierr);
  ierr = VecAppendOptionsPrefix(is->x,"is_");CHKERRQ(ierr);
  ierr = VecSetFromOptions(is->x);CHKERRQ(ierr);
  ierr = VecDuplicate(is->x,&is->y);CHKERRQ(ierr);

  /* setup the global to local scatter */
  ierr = ISCreateStride(PETSC_COMM_SELF,n,0,1,&to);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingApplyIS(rmapping,to,&from);CHKERRQ(ierr);
  ierr = MatCreateVecs(A,&global,NULL);CHKERRQ(ierr);
  ierr = VecScatterCreate(global,from,is->x,to,&is->ctx);CHKERRQ(ierr);
  ierr = VecDestroy(&global);CHKERRQ(ierr);
  ierr = ISDestroy(&to);CHKERRQ(ierr);
  ierr = ISDestroy(&from);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Ejemplo n.º 10
0
void PetscVector::copyFromArray( double v[] )
{
    int ierr;

    Vec sv;
    IS is;
    VecScatter ctx;

    ierr = VecCreateSeqWithArray(PETSC_COMM_SELF, n, v, &sv);
    assert(ierr == 0);
    ierr = ISCreateStride(PETSC_COMM_WORLD, n, 0, 1, &is);
    assert( ierr  == 0);
    ierr = VecScatterCreate( sv, is, pv, is, &ctx);
    assert( ierr  == 0);

    ierr = VecScatterBegin( sv, pv,INSERT_VALUES,SCATTER_FORWARD,
                            ctx);
    assert( ierr  == 0);
    ierr = VecScatterEnd( sv, pv,INSERT_VALUES,SCATTER_FORWARD,
                          ctx);
    assert( ierr  == 0);

    ierr = VecScatterDestroy(ctx);
    assert( ierr  == 0);
    ierr = ISDestroy( is );
    assert(ierr == 0);
    ierr = VecDestroy( sv );
    assert(ierr == 0);
}
Ejemplo n.º 11
0
Real SAMpatchPETSc::normL2(const Vector& x, char dofType) const
{
#ifdef HAVE_MPI
  if (adm.isParallel()) {
    if (dofIS.find(dofType) == dofIS.end())
      setupIS(dofType);

    Vec lx;
    VecCreateSeqWithArray(PETSC_COMM_SELF, 1, x.size(), x.data(), &lx);
    Vec gx;
    VecCreate(*adm.getCommunicator(), &gx);
    VecSetSizes(gx, dofIS[dofType].nDofs, PETSC_DETERMINE);
    VecSetFromOptions(gx);
    PetscInt n;
    VecGetSize(gx, &n);

    if (!dofIS[dofType].scatterCreated) {
      VecScatterCreate(lx, dofIS[dofType].local, gx, dofIS[dofType].global, &dofIS[dofType].ctx);
      dofIS[dofType].scatterCreated = true;
    }

    VecScatterBegin(dofIS[dofType].ctx, lx, gx, INSERT_VALUES, SCATTER_FORWARD);
    VecScatterEnd(dofIS[dofType].ctx, lx, gx, INSERT_VALUES, SCATTER_FORWARD);
    PetscReal d;
    VecNorm(gx, NORM_2, &d);
    VecDestroy(&lx);
    VecDestroy(&gx);

    return d / sqrt(double(n));
  }
#endif

  return this->SAM::normL2(x, dofType);
}
Ejemplo n.º 12
0
Archivo: vecd.c Proyecto: xyuan/dohp
/** Create a cache for Dirichlet part of closure vector, and scatter from global closure to Dirichlet cache.

@arg[in] gvec Global vector
@arg[out] dcache New vector to hold the Dirichlet values
@arg[out] dscat Scatter from global closure to \a dcache

@note This could be local but it doesn't cost anything to make it global.
**/
dErr VecDohpCreateDirichletCache(Vec gvec,Vec *dcache,VecScatter *dscat)
{
  MPI_Comm comm;
  dErr     err;
  dBool    isdohp;
  IS       from;
  Vec      gc;
  dInt     n,nc,crstart;

  dFunctionBegin;
  dValidHeader(gvec,VEC_CLASSID,1);
  dValidPointer(dcache,2);
  dValidPointer(dscat,3);
  err = PetscTypeCompare((PetscObject)gvec,VECDOHP,&isdohp);dCHK(err);
  if (!isdohp) dERROR(PETSC_COMM_SELF,PETSC_ERR_SUP,"Vec type %s",((PetscObject)gvec)->type_name);
  err = PetscObjectGetComm((PetscObject)gvec,&comm);dCHK(err);
  err = VecGetLocalSize(gvec,&n);dCHK(err);
  err = VecDohpGetClosure(gvec,&gc);dCHK(err);
  err = VecGetLocalSize(gc,&nc);dCHK(err);
  err = VecGetOwnershipRange(gc,&crstart,NULL);dCHK(err);
  err = VecCreateMPI(comm,nc-n,PETSC_DECIDE,dcache);dCHK(err);
  err = ISCreateStride(comm,nc-n,crstart+n,1,&from);dCHK(err);
  err = VecScatterCreate(gc,from,*dcache,NULL,dscat);dCHK(err);
  err = VecDohpRestoreClosure(gvec,&gc);dCHK(err);
  err = ISDestroy(&from);dCHK(err);
  /* \todo deal with rotations */
  dFunctionReturn(0);
}
Ejemplo n.º 13
0
/*@
      VecScatterCreateToAll - Creates a vector and a scatter context that copies all 
          vector values to each processor

  Collective

  Input Parameter: 
.  vin  - input MPIVEC

  Output Parameter:
+  ctx - scatter context
-  vout - output SEQVEC that is large enough to scatter into

  Level: intermediate

   Note: vout may be PETSC_NULL [PETSC_NULL_OBJECT from fortran] if you do not 
   need to have it created

   Usage:
$        VecScatterCreateToAll(vin,&ctx,&vout);
$
$        // scatter as many times as you need 
$        VecScatterBegin(ctx,vin,vout,INSERT_VALUES,SCATTER_FORWARD);
$        VecScatterEnd(ctx,vin,vout,INSERT_VALUES,SCATTER_FORWARD);
$
$        // destroy scatter context and local vector when no longer needed
$        VecScatterDestroy(ctx);
$        VecDestroy(vout);

    Do NOT create a vector and then pass it in as the final argument vout! vout is created by this routine
  automatically (unless you pass PETSC_NULL in for that argument if you do not need it).

.seealso VecScatterCreate(), VecScatterCreateToZero(), VecScatterBegin(), VecScatterEnd()

@*/
PetscErrorCode PETSCVEC_DLLEXPORT VecScatterCreateToAll(Vec vin,VecScatter *ctx,Vec *vout)
{

  PetscErrorCode ierr;
  PetscInt       N;
  IS             is;
  Vec            tmp;
  Vec           *tmpv;
  PetscTruth     tmpvout = PETSC_FALSE;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(vin,VEC_COOKIE,1);
  PetscValidType(vin,1);
  PetscValidPointer(ctx,2);
  if (vout) {
    PetscValidPointer(vout,3);
    tmpv = vout;
  } else {
    tmpvout = PETSC_TRUE;
    tmpv = &tmp;
  }

  /* Create seq vec on each proc, with the same size of the original mpi vec */
  ierr = VecGetSize(vin,&N);CHKERRQ(ierr);
  ierr = VecCreateSeq(PETSC_COMM_SELF,N,tmpv);CHKERRQ(ierr);
  /* Create the VecScatter ctx with the communication info */
  ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&is);CHKERRQ(ierr);
  ierr = VecScatterCreate(vin,is,*tmpv,is,ctx);CHKERRQ(ierr);
  ierr = ISDestroy(is);CHKERRQ(ierr);
  if (tmpvout) {ierr = VecDestroy(*tmpv);CHKERRQ(ierr);}
  PetscFunctionReturn(0);
}
Ejemplo n.º 14
0
PetscErrorCode MatSetUpMultiply_MPIDense(Mat mat)
{
  Mat_MPIDense *mdn = (Mat_MPIDense*)mat->data;
  PetscErrorCode ierr;
  IS           from,to;
  Vec          gvec;

  PetscFunctionBegin;
  /* Create local vector that is used to scatter into */
  ierr = VecCreateSeq(PETSC_COMM_SELF,mat->cmap->N,&mdn->lvec);CHKERRQ(ierr);

  /* Create temporary index set for building scatter gather */
  ierr = ISCreateStride(((PetscObject)mat)->comm,mat->cmap->N,0,1,&from);CHKERRQ(ierr);
  ierr = ISCreateStride(PETSC_COMM_SELF,mat->cmap->N,0,1,&to);CHKERRQ(ierr);

  /* Create temporary global vector to generate scatter context */
  /* n    = mdn->cowners[mdn->rank+1] - mdn->cowners[mdn->rank]; */

  ierr = VecCreateMPIWithArray(((PetscObject)mat)->comm,1,mdn->nvec,mat->cmap->N,PETSC_NULL,&gvec);CHKERRQ(ierr);

  /* Generate the scatter context */
  ierr = VecScatterCreate(gvec,from,mdn->lvec,to,&mdn->Mvctx);CHKERRQ(ierr);
  ierr = PetscLogObjectParent(mat,mdn->Mvctx);CHKERRQ(ierr);
  ierr = PetscLogObjectParent(mat,mdn->lvec);CHKERRQ(ierr);
  ierr = PetscLogObjectParent(mat,from);CHKERRQ(ierr);
  ierr = PetscLogObjectParent(mat,to);CHKERRQ(ierr);
  ierr = PetscLogObjectParent(mat,gvec);CHKERRQ(ierr);

  ierr = ISDestroy(&to);CHKERRQ(ierr);
  ierr = ISDestroy(&from);CHKERRQ(ierr);
  ierr = VecDestroy(&gvec);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Ejemplo n.º 15
0
PetscErrorCode RetrieveVecPoints(Vec x, int Npt, int *Pos, double *ptValues)
{
  PetscErrorCode ierr;
  Vec T;
  VecScatter scatter;
  IS from, to;
  ierr = VecCreateSeq(PETSC_COMM_SELF, Npt, &T);CHKERRQ(ierr);
  ierr = ISCreateGeneral(PETSC_COMM_SELF,Npt, Pos,PETSC_COPY_VALUES, &from);CHKERRQ(ierr);
  ierr = ISCreateStride(PETSC_COMM_SELF,Npt,0,1, &to);CHKERRQ(ierr);
  ierr = VecScatterCreate(x,from,T,to,&scatter);CHKERRQ(ierr);
  ierr = VecScatterBegin(scatter,x,T,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd(scatter,x,T,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  
  int ix[Npt];
  int i;
  for(i=0; i<Npt; i++)
    ix[i]=i;
  
  ierr = VecGetValues(T,Npt,ix,ptValues);

  ierr = ISDestroy(&from);CHKERRQ(ierr);
  ierr = ISDestroy(&to);CHKERRQ(ierr);
 ierr =  VecScatterDestroy(&scatter);CHKERRQ(ierr);
  ierr = VecDestroy(&T);CHKERRQ(ierr);

  PetscFunctionReturn(0);
}
Ejemplo n.º 16
0
PetscErrorCode VecReorder(Vec r, Vec order, Vec or){
    PetscErrorCode err;
    PetscInt n_local, low, high;
    PetscInt *to_idx, *from_idx;
    PetscScalar *o_array;
    IS to, from;
    VecScatter scatter;
    int i;

    err = VecGetLocalSize(r, &n_local); CHKERRQ(err);
    err = PetscMalloc1(n_local, &to_idx); CHKERRQ(err);
    err = PetscMalloc1(n_local, &from_idx); CHKERRQ(err);
    err = VecGetOwnershipRange(r, &low, &high); CHKERRQ(err);
    err = VecGetArray(order, &o_array);
    for(i = 0; i < n_local; i++){
        to_idx[i] = (PetscInt) o_array[low + i];
        from_idx[i] = (PetscInt) low + i;
    }
    err = VecRestoreArray(order, &o_array);

    err = ISCreateGeneral(PETSC_COMM_SELF, n_local, from_idx, PETSC_OWN_POINTER, &to); CHKERRQ(err);
    err = ISCreateGeneral(PETSC_COMM_SELF, n_local, from_idx, PETSC_OWN_POINTER, &from); CHKERRQ(err);

    err = VecScatterCreate(r, from, or, to, &scatter); CHKERRQ(err);
    err = VecScatterBegin(scatter, r, or, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(err);
    err = VecScatterEnd(scatter, r, or, INSERT_VALUES, SCATTER_FORWARD); CHKERRQ(err);

    err = PetscFree(to_idx); CHKERRQ(err);
    err = PetscFree(from_idx); CHKERRQ(err);

    return err;
}
Ejemplo n.º 17
0
    NonlocalCollection(Mat depends_on, IS interest_set) {
	find_influences(depends_on, interest_set, &nodes);

	PetscInt local_size;
	ISGetLocalSize(nodes, &local_size);
	VecCreateMPI(PETSC_COMM_WORLD, local_size, PETSC_DECIDE, &vec);
	IS onto_index_set;
	describe_partition(vec, &onto_index_set);
	PetscInt begin;
	PetscInt end;
	VecGetOwnershipRange(vec, &begin, &end);
	PetscInt *indicies;
	ISGetIndices(nodes, &indicies);
	assert(local_size == end-begin);
	for (int ii=0; ii<local_size; ii++) {
	    map[indicies[ii]] = ii+begin;
	}
	ISRestoreIndices(nodes, &indicies);
	Vec w;
	MatGetVecs(depends_on, PETSC_NULL, &w);
	VecScatterCreate(w, nodes, vec, onto_index_set, &scatter);
	VecDestroy(w);

	ISDestroy(onto_index_set);
    }
Ejemplo n.º 18
0
Archivo: ex8.c Proyecto: ZJLi2013/petsc
/*
    Given a DMDA generates a VecScatter context that will deliver a slice
  of the global vector to each processor. In this example, each processor
  receives the values i=*, j=*, k=rank, i.e. one z plane.

  Note: This code is written assuming only one degree of freedom per node.
  For multiple degrees of freedom per node use ISCreateBlock()
  instead of ISCreateGeneral().
*/
PetscErrorCode GenerateSliceScatter(DM da,VecScatter *scatter,Vec *vslice)
{
  AO             ao;
  PetscInt       M,N,P,nslice,*sliceindices,count,i,j;
  PetscMPIInt    rank;
  PetscErrorCode ierr;
  MPI_Comm       comm;
  Vec            vglobal;
  IS             isfrom,isto;

  ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);

  ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);
  ierr = DMDAGetInfo(da,0,&M,&N,&P,0,0,0,0,0,0,0,0,0);CHKERRQ(ierr);

  /*
     nslice is number of degrees of freedom in this processors slice
   if there are more processors then z plans the extra processors get 0
   elements in their slice.
  */
  if (rank < P) nslice = M*N;
  else nslice = 0;

  /*
     Generate the local vector to hold this processors slice
  */
  ierr = VecCreateSeq(PETSC_COMM_SELF,nslice,vslice);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da,&vglobal);CHKERRQ(ierr);

  /*
       Generate the indices for the slice in the "natural" global ordering
     Note: this is just an example, one could select any subset of nodes
    on each processor. Just list them in the global natural ordering.

  */
  ierr = PetscMalloc1((nslice+1),&sliceindices);CHKERRQ(ierr);
  count = 0;
  if (rank < P) {
    for (j=0; j<N; j++) {
      for (i=0; i<M; i++) {
         sliceindices[count++] = rank*M*N + j*M + i;
      }
    }
  }
  /*
      Convert the indices to the "PETSc" global ordering
  */
  ierr = AOApplicationToPetsc(ao,nslice,sliceindices);CHKERRQ(ierr);

  /* Create the "from" and "to" index set */
  /* This is to scatter from the global vector */
  ierr = ISCreateGeneral(PETSC_COMM_SELF,nslice,sliceindices,PETSC_OWN_POINTER,&isfrom);CHKERRQ(ierr);
  /* This is to gather into the local vector */
  ierr = ISCreateStride(PETSC_COMM_SELF,nslice,0,1,&isto);CHKERRQ(ierr);
  ierr = VecScatterCreate(vglobal,isfrom,*vslice,isto,scatter);CHKERRQ(ierr);
  ierr = ISDestroy(&isfrom);CHKERRQ(ierr);
  ierr = ISDestroy(&isto);CHKERRQ(ierr);
  return 0;
}
Ejemplo n.º 19
0
static PetscErrorCode PCBDDCScalingSetUp_Deluxe_Private(PC pc)
{
  PC_BDDC                *pcbddc=(PC_BDDC*)pc->data;
  PCBDDCDeluxeScaling    deluxe_ctx=pcbddc->deluxe_ctx;
  PCBDDCSubSchurs        sub_schurs = pcbddc->sub_schurs;
  PetscErrorCode         ierr;

  PetscFunctionBegin;
  if (!sub_schurs->n_subs) {
    PetscFunctionReturn(0);
  }

  /* Create work vectors for sequential part of deluxe */
  ierr = MatCreateVecs(sub_schurs->S_Ej_all,&deluxe_ctx->seq_work1,&deluxe_ctx->seq_work2);CHKERRQ(ierr);

  /* Compute deluxe sequential scatter */
  if (sub_schurs->reuse_mumps && !sub_schurs->is_dir) {
    PCBDDCReuseMumps reuse_mumps = sub_schurs->reuse_mumps;
    ierr = PetscObjectReference((PetscObject)reuse_mumps->correction_scatter_B);CHKERRQ(ierr);
    deluxe_ctx->seq_scctx = reuse_mumps->correction_scatter_B;
  } else {
    ierr = VecScatterCreate(pcbddc->work_scaling,sub_schurs->is_Ej_all,deluxe_ctx->seq_work1,NULL,&deluxe_ctx->seq_scctx);CHKERRQ(ierr);
  }

  /* Create Mat object for deluxe scaling */
  ierr = PetscObjectReference((PetscObject)sub_schurs->S_Ej_all);CHKERRQ(ierr);
  deluxe_ctx->seq_mat = sub_schurs->S_Ej_all;
  if (sub_schurs->sum_S_Ej_all) { /* if this matrix is present, then we need to create the KSP object to invert it */
    PC               pc_temp;
    MatSolverPackage solver=NULL;
    char             ksp_prefix[256];
    size_t           len;

    ierr = KSPCreate(PETSC_COMM_SELF,&deluxe_ctx->seq_ksp);CHKERRQ(ierr);
    ierr = KSPSetOperators(deluxe_ctx->seq_ksp,sub_schurs->sum_S_Ej_all,sub_schurs->sum_S_Ej_all);CHKERRQ(ierr);
    ierr = KSPSetType(deluxe_ctx->seq_ksp,KSPPREONLY);CHKERRQ(ierr);
    ierr = KSPGetPC(deluxe_ctx->seq_ksp,&pc_temp);CHKERRQ(ierr);
    ierr = PCSetType(pc_temp,PCLU);CHKERRQ(ierr);
    ierr = KSPGetPC(pcbddc->ksp_D,&pc_temp);CHKERRQ(ierr);
    ierr = PCFactorGetMatSolverPackage(pc_temp,(const MatSolverPackage*)&solver);CHKERRQ(ierr);
    if (solver) {
      PC     new_pc;
      PCType type;

      ierr = PCGetType(pc_temp,&type);CHKERRQ(ierr);
      ierr = KSPGetPC(deluxe_ctx->seq_ksp,&new_pc);CHKERRQ(ierr);
      ierr = PCSetType(new_pc,type);CHKERRQ(ierr);
      ierr = PCFactorSetMatSolverPackage(new_pc,solver);CHKERRQ(ierr);
    }
    ierr = PetscStrlen(((PetscObject)(pcbddc->ksp_D))->prefix,&len);CHKERRQ(ierr);
    len -= 10; /* remove "dirichlet_" */
    ierr = PetscStrncpy(ksp_prefix,((PetscObject)(pcbddc->ksp_D))->prefix,len+1);CHKERRQ(ierr);
    ierr = PetscStrcat(ksp_prefix,"deluxe_");CHKERRQ(ierr);
    ierr = KSPSetOptionsPrefix(deluxe_ctx->seq_ksp,ksp_prefix);CHKERRQ(ierr);
    ierr = KSPSetFromOptions(deluxe_ctx->seq_ksp);CHKERRQ(ierr);
    ierr = KSPSetUp(deluxe_ctx->seq_ksp);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}
Ejemplo n.º 20
0
EXTERN_C_BEGIN

void PETSC_STDCALL vecscattercreate_(Vec *xin,IS *ix,Vec *yin,IS *iy,VecScatter *newctx,PetscErrorCode *ierr)
{
  CHKFORTRANNULLOBJECTDEREFERENCE(ix);
  CHKFORTRANNULLOBJECTDEREFERENCE(iy);
  *ierr = VecScatterCreate(*xin,*ix,*yin,*iy,newctx);
}
Ejemplo n.º 21
0
int main(int argc,char **argv)
{
  PetscErrorCode ierr;
  PetscInt       i,blocks[2],nlocal;
  PetscMPIInt    size,rank;
  PetscScalar    value;
  Vec            x,y;
  IS             is1,is2;
  VecScatter     ctx = 0;

  ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr);
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);

  if (size != 2) SETERRQ(PETSC_COMM_SELF,1,"Must run with 2 processors");

  /* create two vectors */
  if (!rank) nlocal = 8;
  else nlocal = 4;
  ierr = VecCreate(PETSC_COMM_WORLD,&x);CHKERRQ(ierr);
  ierr = VecSetSizes(x,nlocal,12);CHKERRQ(ierr);
  ierr = VecSetFromOptions(x);CHKERRQ(ierr);
  ierr = VecCreateSeq(PETSC_COMM_SELF,8,&y);CHKERRQ(ierr);

  /* create two index sets */
  if (!rank) {
    blocks[0] = 0; blocks[1] = 2;
  } else {
    blocks[0] = 1; blocks[1] = 2;
  }
  ierr = ISCreateBlock(PETSC_COMM_SELF,4,2,blocks,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
  ierr = ISCreateStride(PETSC_COMM_SELF,8,0,1,&is2);CHKERRQ(ierr);

  for (i=0; i<12; i++) {
    value = i;
    ierr = VecSetValues(x,1,&i,&value,INSERT_VALUES);CHKERRQ(ierr);
  }
  ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(x);CHKERRQ(ierr);

  ierr = VecScatterCreate(x,is1,y,is2,&ctx);CHKERRQ(ierr);
  ierr = VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&ctx);CHKERRQ(ierr);

  ierr = PetscSleep(2.0*rank);CHKERRQ(ierr);
  ierr = VecView(y,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);

  ierr = VecDestroy(&x);CHKERRQ(ierr);
  ierr = VecDestroy(&y);CHKERRQ(ierr);
  ierr = ISDestroy(&is1);CHKERRQ(ierr);
  ierr = ISDestroy(&is2);CHKERRQ(ierr);

  ierr = PetscFinalize();
  return 0;
}
Ejemplo n.º 22
0
/*@
   VecMPISetGhost - Sets the ghost points for an MPI ghost vector

   Collective on Vec

   Input Parameters:
+  vv - the MPI vector
.  nghost - number of local ghost points
-  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)


   Notes:
   Use VecGhostGetLocalForm() to access the local, ghosted representation
   of the vector.

   This also automatically sets the ISLocalToGlobalMapping() for this vector.

   You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).

   Level: advanced

   Concepts: vectors^ghosted

.seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
          VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
          VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
          VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

@*/
PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
{
  PetscErrorCode ierr;
  PetscBool      flg;

  PetscFunctionBegin;
  ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
  /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
  if (flg) {
    PetscInt               n,N;
    Vec_MPI                *w;
    PetscScalar            *larray;
    IS                     from,to;
    ISLocalToGlobalMapping ltog;
    PetscInt               rstart,i,*indices;
    MPI_Comm               comm = ((PetscObject)vv)->comm;

    n = vv->map->n;
    N = vv->map->N;
    ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
    ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
    ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,PETSC_NULL);CHKERRQ(ierr);
    w    = (Vec_MPI *)(vv)->data;
    /* Create local representation */
    ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
    ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
    ierr = PetscLogObjectParent(vv,w->localrep);CHKERRQ(ierr);
    ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);

    /*
     Create scatter context for scattering (updating) ghost values
     */
    ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
    ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
    ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
    ierr = PetscLogObjectParent(vv,w->localupdate);CHKERRQ(ierr);
    ierr = ISDestroy(&to);CHKERRQ(ierr);
    ierr = ISDestroy(&from);CHKERRQ(ierr);

    /* set local to global mapping for ghosted vector */
    ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
    ierr = VecGetOwnershipRange(vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
    for (i=0; i<n; i++) {
      indices[i] = rstart + i;
    }
    for (i=0; i<nghost; i++) {
      indices[n+i] = ghosts[i];
    }
    ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
    ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
  } else if (vv->ops->create == VecCreate_MPI) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
  else if (!((PetscObject)vv)->type_name) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
  PetscFunctionReturn(0);
}
Ejemplo n.º 23
0
/*@C
   VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
   the caller allocates the array space. Indices in the ghost region are based on blocks.

   Collective on MPI_Comm

   Input Parameters:
+  comm - the MPI communicator to use
.  bs - block size
.  n - local vector length
.  N - global vector length (or PETSC_DECIDE to have calculated if n is given)
.  nghost - number of local ghost blocks
.  ghosts - global indices of ghost blocks (or PETSC_NULL if not needed), counts are by block not by index, these do not need to be in increasing order (sorted)
-  array - the space to store the vector values (as long as n + nghost*bs)

   Output Parameter:
.  vv - the global vector representation (without ghost points as part of vector)

   Notes:
   Use VecGhostGetLocalForm() to access the local, ghosted representation
   of the vector.

   n is the local vector size (total local size not the number of blocks) while nghost
   is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
   portion is bs*nghost

   Level: advanced

   Concepts: vectors^creating ghosted
   Concepts: vectors^creating with array

.seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
          VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
          VecCreateGhostWithArray(), VecCreateGhostBlock()

@*/
PetscErrorCode  VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
{
  PetscErrorCode         ierr;
  Vec_MPI                *w;
  PetscScalar            *larray;
  IS                     from,to;
  ISLocalToGlobalMapping ltog;
  PetscInt               rstart,i,nb,*indices;

  PetscFunctionBegin;
  *vv = 0;

  if (n == PETSC_DECIDE)      SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
  if (nghost == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
  if (nghost < 0)             SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
  if (n % bs)                 SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
  ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr);
  /* Create global representation */
  ierr = VecCreate(comm,vv);CHKERRQ(ierr);
  ierr = VecSetSizes(*vv,n,N);CHKERRQ(ierr);
  ierr = VecSetBlockSize(*vv,bs);CHKERRQ(ierr);
  ierr = VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);CHKERRQ(ierr);
  w    = (Vec_MPI *)(*vv)->data;
  /* Create local representation */
  ierr = VecGetArray(*vv,&larray);CHKERRQ(ierr);
  ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,bs,n+bs*nghost,larray,&w->localrep);CHKERRQ(ierr);
  ierr = PetscLogObjectParent(*vv,w->localrep);CHKERRQ(ierr);
  ierr = VecRestoreArray(*vv,&larray);CHKERRQ(ierr);

  /*
       Create scatter context for scattering (updating) ghost values
  */
  ierr = ISCreateBlock(comm,bs,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
  ierr = ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);CHKERRQ(ierr);
  ierr = VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
  ierr = PetscLogObjectParent(*vv,w->localupdate);CHKERRQ(ierr);
  ierr = ISDestroy(&to);CHKERRQ(ierr);
  ierr = ISDestroy(&from);CHKERRQ(ierr);

  /* set local to global mapping for ghosted vector */
  nb = n/bs;
  ierr = PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
  for (i=0; i<nb; i++) {
    indices[i] = rstart + i*bs;
  }
  for (i=0; i<nghost; i++) {
    indices[nb+i] = ghosts[i];
  }
  ierr = ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
  ierr = VecSetLocalToGlobalMappingBlock(*vv,ltog);CHKERRQ(ierr);
  ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Ejemplo n.º 24
0
int LargeVecScatterCreate(Vec xvec[],IS isx[],Vec y,IS isy[] ,VecScatter ctx[],PetscInt nvec){
   
  PetscErrorCode ierr;
  PetscInt i;

 for(i=0;i<nvec;i++){
     ierr =  VecScatterCreate(xvec[i],isx[i],y,isy[i],ctx+i);CHKERRQ(ierr);
 }


return 0;
}
Ejemplo n.º 25
0
int main(int argc,char **argv)
{
  PetscErrorCode ierr;
  PetscInt       n = 5,N,i;
  PetscMPIInt    size,rank;
  PetscScalar    value,zero = 0.0;
  Vec            x,y;
  IS             is1,is2;
  VecScatter     ctx = 0;

  ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr); 
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);

  /* create two vectors */
  N = size*n; 
  ierr = VecCreate(PETSC_COMM_WORLD,&y);CHKERRQ(ierr);
  ierr = VecSetSizes(y,n,PETSC_DECIDE);CHKERRQ(ierr)
  ierr = VecSetFromOptions(y);CHKERRQ(ierr);

  ierr = VecCreate(PETSC_COMM_WORLD,&x);CHKERRQ(ierr);
  ierr = VecSetSizes(x,n,PETSC_DECIDE);CHKERRQ(ierr);
  ierr = VecSetFromOptions(x);CHKERRQ(ierr);

  /* create two index sets */
  ierr = ISCreateStride(PETSC_COMM_WORLD,n,n*rank,1,&is1);CHKERRQ(ierr);
  ierr = ISCreateStride(PETSC_COMM_WORLD,n,(n*(rank+1))%N,1,&is2);CHKERRQ(ierr);

  /* fill local part of parallel vector x */
  value = (PetscScalar)(rank+1); 
  for (i=n*rank; i<n*(rank+1); i++) {
    ierr = VecSetValues(x,1,&i,&value,INSERT_VALUES);CHKERRQ(ierr);
  }
  ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(x);CHKERRQ(ierr);

  ierr = VecSet(y,zero);CHKERRQ(ierr);

  ierr = VecScatterCreate(x,is1,y,is2,&ctx);CHKERRQ(ierr);
  ierr = VecScatterBegin(ctx,x,y,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd(ctx,x,y,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterDestroy(ctx);CHKERRQ(ierr);
  
  ierr = VecView(y,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  ierr = VecDestroy(x);CHKERRQ(ierr);
  ierr = VecDestroy(y);CHKERRQ(ierr);
  ierr = ISDestroy(is1);CHKERRQ(ierr);
  ierr = ISDestroy(is2);CHKERRQ(ierr);

  ierr = PetscFinalize();CHKERRQ(ierr);
  return 0;
}
Ejemplo n.º 26
0
int main(int argc,char **argv)
{
  PetscErrorCode ierr;
  PetscInt       n = 5,i,idx2[3] = {0,2,3},idx1[3] = {0,1,2};
  PetscMPIInt    size,rank;
  PetscScalar    value;
  Vec            x,y;
  IS             is1,is2;
  VecScatter     ctx = 0;

  PetscInitialize(&argc,&argv,(char*)0,help);
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);

  /* create two vectors */
  ierr = VecCreate(PETSC_COMM_WORLD,&x);CHKERRQ(ierr);
  ierr = VecSetSizes(x,PETSC_DECIDE,size*n);CHKERRQ(ierr);
  ierr = VecSetFromOptions(x);CHKERRQ(ierr);
  ierr = VecCreateSeq(PETSC_COMM_SELF,n,&y);CHKERRQ(ierr);

  /* create two index sets */
  ierr = ISCreateGeneral(PETSC_COMM_SELF,3,idx1,PETSC_COPY_VALUES,&is1);CHKERRQ(ierr);
  ierr = ISCreateGeneral(PETSC_COMM_SELF,3,idx2,PETSC_COPY_VALUES,&is2);CHKERRQ(ierr);

  /* fill local part of parallel vector */
  for (i=n*rank; i<n*(rank+1); i++) {
    value = (PetscScalar) i;
    ierr = VecSetValues(x,1,&i,&value,INSERT_VALUES);CHKERRQ(ierr);
  }
  ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(x);CHKERRQ(ierr);

  ierr = VecView(x,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  ierr = VecSet(y,-1.0);CHKERRQ(ierr);

  ierr = VecScatterCreate(x,is1,y,is2,&ctx);CHKERRQ(ierr);
  ierr = VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&ctx);CHKERRQ(ierr);

  if (!rank) {
    ierr = PetscPrintf(PETSC_COMM_SELF,"scattered vector\n");CHKERRQ(ierr);
    ierr = VecView(y,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
  }
  ierr = ISDestroy(&is1);CHKERRQ(ierr);
  ierr = ISDestroy(&is2);CHKERRQ(ierr);
  ierr = VecDestroy(&x);CHKERRQ(ierr);
  ierr = VecDestroy(&y);CHKERRQ(ierr);

  ierr = PetscFinalize();
  return 0;
}
Ejemplo n.º 27
0
/*@C
   DMDAGetRay - Returns a vector on process zero that contains a row or column of the values in a DMDA vector

   Collective on DMDA

   Input Parameters:
+  da - the distributed array
.  vec - the vector
.  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
-  gp - global grid point number in this direction

   Output Parameters:
+  newvec - the new vector that can hold the values (size zero on all processes except process 0)
-  scatter - the VecScatter that will map from the original vector to the slice

   Level: advanced

   Notes:
   All processors that share the DMDA must call this with the same gp value

.keywords: distributed array, get, processor subset
@*/
PetscErrorCode  DMDAGetRay(DM da,DMDADirection dir,PetscInt gp,Vec *newvec,VecScatter *scatter)
{
  PetscMPIInt    rank;
  DM_DA          *dd = (DM_DA*)da->data;
  PetscErrorCode ierr;
  IS             is;
  AO             ao;
  Vec            vec;
  PetscInt       *indices,i,j;

  PetscFunctionBegin;
  if (dd->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get slice from 1d DMDA");
  if (dd->dim == 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get slice from 3d DMDA");
  ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRQ(ierr);
  if (!rank) {
    if (dir == DMDA_Y) {
      ierr       = PetscMalloc(dd->w*dd->M*sizeof(PetscInt),&indices);CHKERRQ(ierr);
      indices[0] = gp*dd->M*dd->w;
      for (i=1; i<dd->M*dd->w; i++) indices[i] = indices[i-1] + 1;

      ierr = AOApplicationToPetsc(ao,dd->M*dd->w,indices);CHKERRQ(ierr);
      ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr);
      ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr);
      ierr = VecSetSizes(*newvec,dd->M*dd->w,PETSC_DETERMINE);CHKERRQ(ierr);
      ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr);
      ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->M,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
    } else if (dir == DMDA_X) {
      ierr       = PetscMalloc(dd->w*dd->N*sizeof(PetscInt),&indices);CHKERRQ(ierr);
      indices[0] = dd->w*gp;
      for (j=1; j<dd->w; j++) indices[j] = indices[j-1] + 1;
      for (i=1; i<dd->N; i++) {
        indices[i*dd->w] = indices[i*dd->w-1] + dd->w*dd->M - dd->w + 1;
        for (j=1; j<dd->w; j++) indices[i*dd->w + j] = indices[i*dd->w + j - 1] + 1;
      }
      ierr = AOApplicationToPetsc(ao,dd->w*dd->N,indices);CHKERRQ(ierr);
      ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr);
      ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr);
      ierr = VecSetSizes(*newvec,dd->N*dd->w,PETSC_DETERMINE);CHKERRQ(ierr);
      ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr);
      ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->N,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
    } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Unknown DMDADirection");
  } else {
    ierr = VecCreateSeq(PETSC_COMM_SELF,0,newvec);CHKERRQ(ierr);
    ierr = ISCreateGeneral(PETSC_COMM_SELF,0,0,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
  }
  ierr = DMGetGlobalVector(da,&vec);CHKERRQ(ierr);
  ierr = VecScatterCreate(vec,is,*newvec,NULL,scatter);CHKERRQ(ierr);
  ierr = DMRestoreGlobalVector(da,&vec);CHKERRQ(ierr);
  ierr = ISDestroy(&is);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Ejemplo n.º 28
0
int main(int argc,char **argv)
{
  PetscErrorCode ierr;
  PetscMPIInt    rank,size;
  Vec            x,y;
  IS             is1,is2;
  PetscInt       n,N,ix[2],iy[2];
  VecScatter     ctx;

  ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
  if (size < 3) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_OUTOFRANGE,"This example needs at least 3 processes");

  /* create two vectors */
  n = 2;
  N = 2*size;

  ierr = VecCreateMPI(PETSC_COMM_WORLD,n,N,&x);CHKERRQ(ierr);
  ierr = VecDuplicate(x,&y);CHKERRQ(ierr);

  /* Specify indices to send from the next process in the ring */
  ix[0] = ((rank+1)*n+0) % N;
  ix[1] = ((rank+1)*n+1) % N;
  /* And put them on the process after that in the ring */
  iy[0] = ((rank+2)*n+0) % N;
  iy[1] = ((rank+2)*n+1) % N;

  /* create two index sets */
  ierr = ISCreateGeneral(PETSC_COMM_WORLD,n,ix,PETSC_USE_POINTER,&is1);CHKERRQ(ierr);
  ierr = ISCreateGeneral(PETSC_COMM_WORLD,n,iy,PETSC_USE_POINTER,&is2);CHKERRQ(ierr);

  ierr = VecSetValue(x,rank*n,rank*n,INSERT_VALUES);CHKERRQ(ierr);
  ierr = VecSetValue(x,rank*n+1,rank*n+1,INSERT_VALUES);CHKERRQ(ierr);

  ierr = VecView(x,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
  ierr = PetscPrintf(PETSC_COMM_WORLD,"----\n");CHKERRQ(ierr);

  ierr = VecScatterCreate(x,is1,y,is2,&ctx);CHKERRQ(ierr);
  ierr = VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&ctx);CHKERRQ(ierr);

  ierr = VecView(y,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  ierr = ISDestroy(&is1);CHKERRQ(ierr);
  ierr = ISDestroy(&is2);CHKERRQ(ierr);
  ierr = VecDestroy(&x);CHKERRQ(ierr);
  ierr = VecDestroy(&y);CHKERRQ(ierr);
  ierr = PetscFinalize();
  return ierr;
}
Ejemplo n.º 29
0
int main(int argc,char **argv)
{
  PetscErrorCode ierr;
  PetscMPIInt    size,rank;
  PetscInt       n = 5,i;
  PetscScalar    value;
  Vec            x,y;
  IS             is1,is2;
  VecScatter     ctx = 0;

  ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);

  /* create two vectors */
  ierr = VecCreate(PETSC_COMM_WORLD,&x);CHKERRQ(ierr);
  ierr = VecSetSizes(x,PETSC_DECIDE,size*n);CHKERRQ(ierr);
  ierr = VecSetFromOptions(x);CHKERRQ(ierr);
  ierr = VecCreateSeq(PETSC_COMM_SELF,n,&y);CHKERRQ(ierr);

  /* create two index sets */
  ierr = ISCreateStride(PETSC_COMM_SELF,n,n*rank,1,&is1);CHKERRQ(ierr);
  ierr = ISCreateStride(PETSC_COMM_SELF,n,0,1,&is2);CHKERRQ(ierr);

  /* each processor inserts the entire vector */
  /* this is redundant but tests assembly */
  for (i=0; i<n*size; i++) {
    value = (PetscScalar) i;
    ierr  = VecSetValues(x,1,&i,&value,INSERT_VALUES);CHKERRQ(ierr);
  }
  ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(x);CHKERRQ(ierr);
  ierr = VecView(x,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  ierr = VecScatterCreate(x,is1,y,is2,&ctx);CHKERRQ(ierr);
  ierr = VecScatterBegin(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd(ctx,x,y,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&ctx);CHKERRQ(ierr);

  if (!rank) {
    printf("----\n");
    VecView(y,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
  }

  ierr = VecDestroy(&x);CHKERRQ(ierr);
  ierr = VecDestroy(&y);CHKERRQ(ierr);
  ierr = ISDestroy(&is1);CHKERRQ(ierr);
  ierr = ISDestroy(&is2);CHKERRQ(ierr);

  ierr = PetscFinalize();
  return ierr;
}
Ejemplo n.º 30
0
/*
   Sets up a monitor that will display He as a function of space and cluster size for each time step
*/
PetscErrorCode MyMonitorSetUp(TS ts)
{
  DM             da;
  PetscErrorCode ierr;
  PetscInt       xi,xs,xm,*idx,M,xj,cnt = 0,dof = 3*N + N*N;
  const PetscInt *lx;
  Vec            C;
  MyMonitorCtx   *ctx;
  PetscBool      flg;
  IS             is;
  char           ycoor[32];
  PetscReal      valuebounds[4]  = {0, 1.2, 0, 1.2};

  PetscFunctionBeginUser;
  ierr = PetscOptionsHasName(PETSC_NULL,"-mymonitor",&flg);CHKERRQ(ierr);
  if (!flg) PetscFunctionReturn(0);

  ierr = TSGetDM(ts,&da);CHKERRQ(ierr);
  ierr = PetscNew(MyMonitorCtx,&ctx);CHKERRQ(ierr);
  ierr = PetscViewerDrawOpen(((PetscObject)da)->comm,PETSC_NULL,"",PETSC_DECIDE,PETSC_DECIDE,600,400,&ctx->viewer);CHKERRQ(ierr);

  ierr = DMDAGetCorners(da,&xs,PETSC_NULL,PETSC_NULL,&xm,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
  ierr = DMDAGetInfo(da,PETSC_IGNORE,&M,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE,PETSC_IGNORE);CHKERRQ(ierr);
  ierr = DMDAGetOwnershipRanges(da,&lx,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
  ierr = DMDACreate2d(((PetscObject)da)->comm,DMDA_BOUNDARY_NONE,DMDA_BOUNDARY_NONE,DMDA_STENCIL_STAR,M,N,PETSC_DETERMINE,1,2,1,lx,PETSC_NULL,&ctx->da);CHKERRQ(ierr);
  ierr = DMDASetFieldName(ctx->da,0,"He");CHKERRQ(ierr);
  ierr = DMDASetFieldName(ctx->da,1,"V");CHKERRQ(ierr);
  ierr = DMDASetCoordinateName(ctx->da,0,"X coordinate direction");CHKERRQ(ierr);
  ierr = PetscSNPrintf(ycoor,32,"%D ... Cluster size ... 1",N);CHKERRQ(ierr);
  ierr = DMDASetCoordinateName(ctx->da,1,ycoor);CHKERRQ(ierr);

  ierr = DMCreateGlobalVector(ctx->da,&ctx->He);CHKERRQ(ierr);
  ierr = PetscMalloc(2*N*xm*sizeof(PetscInt),&idx);CHKERRQ(ierr);
  cnt  = 0;
  for (xj=0; xj<N; xj++) {
    for (xi=xs; xi<xs+xm; xi++) {
      idx[cnt++] = dof*xi + xj;
      idx[cnt++] = dof*xi + xj + N;
    }
  }
  ierr = ISCreateGeneral(((PetscObject)ts)->comm,2*N*xm,idx,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
  ierr = TSGetSolution(ts,&C);CHKERRQ(ierr);
  ierr = VecScatterCreate(C,is,ctx->He,PETSC_NULL,&ctx->scatter);CHKERRQ(ierr);
  ierr = ISDestroy(&is);CHKERRQ(ierr);

  /* sets the bounds on the contour plot values so the colors mean the same thing for different timesteps */
  ierr = PetscViewerDrawSetBounds(ctx->viewer,2,valuebounds);CHKERRQ(ierr);

  ierr = TSMonitorSet(ts,MyMonitorMonitor,ctx,MyMonitorDestroy);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}