示例#1
0
PETScVector::PETScVector(const PetscInt vec_size,
                         const std::vector<PetscInt>& ghost_ids,
                         const bool is_global_size)
    : _size_ghosts{static_cast<PetscInt>(ghost_ids.size())}
    , _has_ghost_id{true}
{
    PetscInt nghosts = static_cast<PetscInt>( ghost_ids.size() );
    if ( is_global_size )
    {
        VecCreateGhost(PETSC_COMM_WORLD, PETSC_DECIDE, vec_size, nghosts,
                       ghost_ids.data(), &_v);
    }
    else
    {
        VecCreate(PETSC_COMM_WORLD, &_v);
        VecSetType(_v, VECMPI);
        VecSetSizes(_v, vec_size, PETSC_DECIDE);
        VecMPISetGhost(_v, nghosts, ghost_ids.data());
    }

    config();
}
示例#2
0
PETScVector::PETScVector(const PetscInt vec_size,
                         const std::vector<PetscInt>& ghost_ids,
                         const bool is_global_size) :
                         _size_ghosts(ghost_ids.size()), _has_ghost_id(true)
{
    _v.reset(new PETSc_Vec);

    PetscInt nghosts = static_cast<PetscInt>( ghost_ids.size() );
    if ( is_global_size )
    {
        VecCreateGhost(PETSC_COMM_WORLD, PETSC_DECIDE, vec_size, nghosts,
                       ghost_ids.data(), _v.get());
    }
    else
    {
        VecCreate(PETSC_COMM_WORLD, _v.get());
        VecSetType(*_v, VECMPI);
        VecSetSizes(*_v, vec_size, PETSC_DECIDE);
        VecMPISetGhost(*_v, nghosts, ghost_ids.data());
    }

    config();
}
示例#3
0
文件: ex9.c 项目: Kun-Qu/petsc
int main(int argc,char **argv)
{
  PetscMPIInt    rank,size;
  PetscInt       nlocal = 6,nghost = 2,ifrom[2],i,rstart,rend;
  PetscErrorCode ierr;
  PetscBool      flg,flg2;
  PetscScalar    value,*array,*tarray=0;
  Vec            lx,gx,gxs;

  PetscInitialize(&argc,&argv,(char *)0,help);
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
  if (size != 2) SETERRQ(PETSC_COMM_SELF,1,"Must run example with two processors\n");

  /*
     Construct a two dimensional graph connecting nlocal degrees of 
     freedom per processor. From this we will generate the global
     indices of needed ghost values

     For simplicity we generate the entire graph on each processor:
     in real application the graph would stored in parallel, but this
     example is only to demonstrate the management of ghost padding
     with VecCreateGhost().

     In this example we consider the vector as representing 
     degrees of freedom in a one dimensional grid with periodic 
     boundary conditions.

        ----Processor  1---------  ----Processor 2 --------
         0    1   2   3   4    5    6    7   8   9   10   11
                               |----| 
         |-------------------------------------------------|

  */

  if (!rank) {
    ifrom[0] = 11; ifrom[1] = 6; 
  } else {
    ifrom[0] = 0;  ifrom[1] = 5; 
  }

  /*
     Create the vector with two slots for ghost points. Note that both 
     the local vector (lx) and the global vector (gx) share the same 
     array for storing vector values.
  */
  ierr = PetscOptionsHasName(PETSC_NULL,"-allocate",&flg);CHKERRQ(ierr);
  ierr = PetscOptionsHasName(PETSC_NULL,"-vecmpisetghost",&flg2);CHKERRQ(ierr);
  if (flg) {
    ierr = PetscMalloc((nlocal+nghost)*sizeof(PetscScalar),&tarray);CHKERRQ(ierr);
    ierr = VecCreateGhostWithArray(PETSC_COMM_WORLD,nlocal,PETSC_DECIDE,nghost,ifrom,tarray,&gxs);CHKERRQ(ierr);
  } else if (flg2) {
    ierr = VecCreate(PETSC_COMM_WORLD,&gxs);CHKERRQ(ierr);
    ierr = VecSetType(gxs,VECMPI);CHKERRQ(ierr);
    ierr = VecSetSizes(gxs,nlocal,PETSC_DECIDE);CHKERRQ(ierr);
    ierr = VecMPISetGhost(gxs,nghost,ifrom);CHKERRQ(ierr);
  } else {
    ierr = VecCreateGhost(PETSC_COMM_WORLD,nlocal,PETSC_DECIDE,nghost,ifrom,&gxs);CHKERRQ(ierr);
  }

  /*
      Test VecDuplicate()
  */
  ierr = VecDuplicate(gxs,&gx);CHKERRQ(ierr);
  ierr = VecDestroy(&gxs);CHKERRQ(ierr);

  /*
     Access the local representation
  */
  ierr = VecGhostGetLocalForm(gx,&lx);CHKERRQ(ierr);

  /*
     Set the values from 0 to 12 into the "global" vector 
  */
  ierr = VecGetOwnershipRange(gx,&rstart,&rend);CHKERRQ(ierr);
  for (i=rstart; i<rend; i++) {
    value = (PetscScalar) i;
    ierr  = VecSetValues(gx,1,&i,&value,INSERT_VALUES);CHKERRQ(ierr);
  }
  ierr = VecAssemblyBegin(gx);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(gx);CHKERRQ(ierr);

  ierr = VecGhostUpdateBegin(gx,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecGhostUpdateEnd(gx,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);

  /*
     Print out each vector, including the ghost padding region. 
  */
  ierr = VecGetArray(lx,&array);CHKERRQ(ierr);
  for (i=0; i<nlocal+nghost; i++) {
    ierr = PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%D %G\n",i,PetscRealPart(array[i]));CHKERRQ(ierr);
  }
  ierr = VecRestoreArray(lx,&array);CHKERRQ(ierr);
  ierr = PetscSynchronizedFlush(PETSC_COMM_WORLD);CHKERRQ(ierr);

  ierr = VecGhostRestoreLocalForm(gx,&lx);CHKERRQ(ierr); 
  ierr = VecDestroy(&gx);CHKERRQ(ierr);
  if (flg) {ierr = PetscFree(tarray);CHKERRQ(ierr);}
  ierr = PetscFinalize();
  return 0;
}
示例#4
0
PETSC_EXTERN void PETSC_STDCALL  vecmpisetghost_(Vec vv,PetscInt *nghost, PetscInt ghosts[], int *__ierr ){
*__ierr = VecMPISetGhost(
	(Vec)PetscToPointer((vv) ),*nghost,ghosts);
}