Пример #1
0
PetscErrorCode MatTransposeMatMultSymbolic_SeqAIJ_SeqDense(Mat A,Mat B,PetscReal fill,Mat *C)
{
  PetscErrorCode      ierr;
  PetscInt            m=A->rmap->n,n=A->cmap->n,BN=B->cmap->N;
  Mat_MatTransMatMult *atb;
  Mat                 Cdense;
  Vec                 bt,ct;
  Mat_SeqDense        *c;

  PetscFunctionBegin;
  ierr = PetscNew(&atb);CHKERRQ(ierr);

  /* create output dense matrix C = A^T*B */
  ierr = MatCreate(PETSC_COMM_SELF,&Cdense);CHKERRQ(ierr);
  ierr = MatSetSizes(Cdense,n,BN,n,BN);CHKERRQ(ierr);
  ierr = MatSetType(Cdense,MATSEQDENSE);CHKERRQ(ierr);
  ierr = MatSeqDenseSetPreallocation(Cdense,NULL);CHKERRQ(ierr);

  /* create vectors bt and ct to hold locally transposed arrays of B and C */
  ierr = VecCreate(PETSC_COMM_SELF,&bt);CHKERRQ(ierr);
  ierr = VecSetSizes(bt,m*BN,m*BN);CHKERRQ(ierr);
  ierr = VecSetType(bt,VECSTANDARD);CHKERRQ(ierr);
  ierr = VecCreate(PETSC_COMM_SELF,&ct);CHKERRQ(ierr);
  ierr = VecSetSizes(ct,n*BN,n*BN);CHKERRQ(ierr);
  ierr = VecSetType(ct,VECSTANDARD);CHKERRQ(ierr);
  atb->bt = bt;
  atb->ct = ct;

  *C                   = Cdense;
  c                    = (Mat_SeqDense*)Cdense->data;
  c->atb               = atb;
  atb->destroy         = Cdense->ops->destroy;
  Cdense->ops->destroy = MatDestroy_SeqDense_MatTransMatMult;
  PetscFunctionReturn(0);
}
Пример #2
0
PetscErrorCode test_vec_ops( void )
{
  Vec            X, a,b;
  Vec            c,d,e,f;
  PetscScalar    val;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  PetscPrintf( PETSC_COMM_WORLD, "\n\n============== %s ==============\n",PETSC_FUNCTION_NAME);

  ierr = VecCreate( PETSC_COMM_WORLD, &X );CHKERRQ(ierr);
  ierr = VecSetSizes( X, 2, 2 );CHKERRQ(ierr);
  ierr = VecSetType( X, VECNEST );CHKERRQ(ierr);

  ierr = VecCreate( PETSC_COMM_WORLD, &a );CHKERRQ(ierr);
  ierr = VecSetSizes( a, 2, 2 );CHKERRQ(ierr);
  ierr = VecSetType( a, VECNEST );CHKERRQ(ierr);

  ierr = VecCreate( PETSC_COMM_WORLD, &b );CHKERRQ(ierr);
  ierr = VecSetSizes( b, 2, 2 );CHKERRQ(ierr);
  ierr = VecSetType( b, VECNEST );CHKERRQ(ierr);

  /* assemble X */
  ierr = VecNestSetSubVec( X, 0, a );CHKERRQ(ierr);
  ierr = VecNestSetSubVec( X, 1, b );CHKERRQ(ierr);
  ierr = VecAssemblyBegin(X);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(X);CHKERRQ(ierr);

  ierr = VecCreate( PETSC_COMM_WORLD, &c );CHKERRQ(ierr);
  ierr = VecSetSizes( c, 3, 3 );CHKERRQ(ierr);
  ierr = VecSetType( c, VECSEQ );CHKERRQ(ierr);
  ierr = VecDuplicate( c, &d );CHKERRQ(ierr);
  ierr = VecDuplicate( c, &e );CHKERRQ(ierr);
  ierr = VecDuplicate( c, &f );CHKERRQ(ierr);

  ierr = VecSet( c, 1.0 );CHKERRQ(ierr);
  ierr = VecSet( d, 2.0 );CHKERRQ(ierr);
  ierr = VecSet( e, 3.0 );CHKERRQ(ierr);
  ierr = VecSet( f, 4.0 );CHKERRQ(ierr);

  /* assemble a */
  ierr = VecNestSetSubVec( a, 0, c );CHKERRQ(ierr);
  ierr = VecNestSetSubVec( a, 1, d );CHKERRQ(ierr);
  ierr = VecAssemblyBegin(a);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(a);CHKERRQ(ierr);

  /* assemble b */
  ierr = VecNestSetSubVec( b, 0, e );CHKERRQ(ierr);
  ierr = VecNestSetSubVec( b, 1, f );CHKERRQ(ierr);
  ierr = VecAssemblyBegin(b);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(b);CHKERRQ(ierr);

  //PetscPrintf( PETSC_COMM_WORLD, "X \n");
  //VecView( X, PETSC_VIEWER_STDOUT_WORLD );

  ierr = VecDot( X,X, &val );CHKERRQ(ierr);
  PetscPrintf( PETSC_COMM_WORLD, "X.X = %f \n", val );

  PetscFunctionReturn(0);
}
Пример #3
0
int main(int argc,char **argv)
{
  PetscErrorCode ierr;
  PetscInt       n = 5;
  PetscScalar    one = 1.0,two = 2.0;
  Vec            x,y;

  ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr); 

  /* create vector */
  ierr = VecCreate(PETSC_COMM_SELF,&x);CHKERRQ(ierr);
  ierr = VecSetSizes(x,n,PETSC_DECIDE);CHKERRQ(ierr);
  ierr = VecSetType(x,"mpi");CHKERRQ(ierr);
  ierr = VecSetType(x,"seq");CHKERRQ(ierr);
  ierr = VecDuplicate(x,&y);CHKERRQ(ierr);
  ierr = VecSetType(x,"mpi");CHKERRQ(ierr);

  ierr = VecSet(x,one);CHKERRQ(ierr);
  ierr = VecSet(y,two);CHKERRQ(ierr);

  ierr = VecDestroy(x);CHKERRQ(ierr);
  ierr = VecDestroy(y);CHKERRQ(ierr);

  ierr = PetscFinalize();CHKERRQ(ierr);
  return 0;
}
Пример #4
0
  int DA::createActiveVector(Vec &arr, bool isElemental, bool isGhosted, unsigned int dof) {
    // first determine the length of the vector ...
    unsigned int sz = 0;
    if(m_bIamActive) {
      if (isElemental) {
        sz = m_uiElementSize;
        if (isGhosted) {
          sz += (m_uiPreGhostElementSize);
        }
      } else {
        sz = m_uiNodeSize + m_uiBoundaryNodeSize;
        if (isGhosted) {
          sz += (m_uiPreGhostNodeSize + m_uiPreGhostBoundaryNodeSize + m_uiPostGhostNodeSize);
        }
      }
      // now for dof ...
      sz *= dof;

      // now create the PETSc Vector
      VecCreate(m_mpiCommActive, &arr);
      VecSetSizes(arr, sz, PETSC_DECIDE);
      if (m_iNpesActive > 1) {
        VecSetType(arr, VECMPI);
      } else {
        VecSetType(arr, VECSEQ);
      }    
    }//end if active

    return 0;
  }
Пример #5
0
PetscErrorCode gen_test_vector( MPI_Comm comm, PetscInt length, PetscInt start_value, PetscInt stride, Vec *_v )
{
  int            nproc;
  Vec            v;
  PetscInt       i;
  PetscScalar    vx;
  PetscErrorCode ierr;

  MPI_Comm_size( comm, &nproc );

  ierr = VecCreate( comm, &v );CHKERRQ(ierr);
  ierr = VecSetSizes( v, PETSC_DECIDE, length );CHKERRQ(ierr);
  if ( nproc == 1 ) { ierr = VecSetType( v, VECSEQ );CHKERRQ(ierr); }
  else { ierr = VecSetType( v, VECMPI );CHKERRQ(ierr); }

  for ( i=0; i<length; i++ ) {
    vx = (PetscScalar)( start_value + i * stride );
    ierr = VecSetValue( v, i, vx, INSERT_VALUES );CHKERRQ(ierr);
  }
  ierr = VecAssemblyBegin( v );CHKERRQ(ierr);
  ierr = VecAssemblyEnd( v );CHKERRQ(ierr);

  *_v = v;

  PetscFunctionReturn(0);
}
Пример #6
0
EXTERN_C_END

/*MC
   VECSTANDARD = "standard" - A VECSEQ on one process and VECMPI on more than one process

   Options Database Keys:
. -vec_type standard - sets a vector type to standard on calls to VecSetFromOptions()

  Level: beginner

.seealso: VecCreateSeq(), VecCreateMPI()
M*/

EXTERN_C_BEGIN
#undef __FUNCT__
#define __FUNCT__ "VecCreate_Standard"
PetscErrorCode  VecCreate_Standard(Vec v)
{
  PetscErrorCode ierr;
  PetscMPIInt    size;

  PetscFunctionBegin;
  ierr = MPI_Comm_size(((PetscObject)v)->comm,&size);CHKERRQ(ierr);
  if (size == 1) {
    ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
  } else {
    ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}
Пример #7
0
/*@C
   DMDAGetRay - Returns a vector on process zero that contains a row or column of the values in a DMDA vector

   Collective on DMDA

   Input Parameters:
+  da - the distributed array
.  vec - the vector
.  dir - Cartesian direction, either DMDA_X, DMDA_Y, or DMDA_Z
-  gp - global grid point number in this direction

   Output Parameters:
+  newvec - the new vector that can hold the values (size zero on all processes except process 0)
-  scatter - the VecScatter that will map from the original vector to the slice

   Level: advanced

   Notes:
   All processors that share the DMDA must call this with the same gp value

.keywords: distributed array, get, processor subset
@*/
PetscErrorCode  DMDAGetRay(DM da,DMDADirection dir,PetscInt gp,Vec *newvec,VecScatter *scatter)
{
  PetscMPIInt    rank;
  DM_DA          *dd = (DM_DA*)da->data;
  PetscErrorCode ierr;
  IS             is;
  AO             ao;
  Vec            vec;
  PetscInt       *indices,i,j;

  PetscFunctionBegin;
  if (dd->dim == 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get slice from 1d DMDA");
  if (dd->dim == 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Cannot get slice from 3d DMDA");
  ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)da),&rank);CHKERRQ(ierr);
  if (!rank) {
    if (dir == DMDA_Y) {
      ierr       = PetscMalloc(dd->w*dd->M*sizeof(PetscInt),&indices);CHKERRQ(ierr);
      indices[0] = gp*dd->M*dd->w;
      for (i=1; i<dd->M*dd->w; i++) indices[i] = indices[i-1] + 1;

      ierr = AOApplicationToPetsc(ao,dd->M*dd->w,indices);CHKERRQ(ierr);
      ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr);
      ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr);
      ierr = VecSetSizes(*newvec,dd->M*dd->w,PETSC_DETERMINE);CHKERRQ(ierr);
      ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr);
      ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->M,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
    } else if (dir == DMDA_X) {
      ierr       = PetscMalloc(dd->w*dd->N*sizeof(PetscInt),&indices);CHKERRQ(ierr);
      indices[0] = dd->w*gp;
      for (j=1; j<dd->w; j++) indices[j] = indices[j-1] + 1;
      for (i=1; i<dd->N; i++) {
        indices[i*dd->w] = indices[i*dd->w-1] + dd->w*dd->M - dd->w + 1;
        for (j=1; j<dd->w; j++) indices[i*dd->w + j] = indices[i*dd->w + j - 1] + 1;
      }
      ierr = AOApplicationToPetsc(ao,dd->w*dd->N,indices);CHKERRQ(ierr);
      ierr = VecCreate(PETSC_COMM_SELF,newvec);CHKERRQ(ierr);
      ierr = VecSetBlockSize(*newvec,dd->w);CHKERRQ(ierr);
      ierr = VecSetSizes(*newvec,dd->N*dd->w,PETSC_DETERMINE);CHKERRQ(ierr);
      ierr = VecSetType(*newvec,VECSEQ);CHKERRQ(ierr);
      ierr = ISCreateGeneral(PETSC_COMM_SELF,dd->w*dd->N,indices,PETSC_OWN_POINTER,&is);CHKERRQ(ierr);
    } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Unknown DMDADirection");
  } else {
    ierr = VecCreateSeq(PETSC_COMM_SELF,0,newvec);CHKERRQ(ierr);
    ierr = ISCreateGeneral(PETSC_COMM_SELF,0,0,PETSC_COPY_VALUES,&is);CHKERRQ(ierr);
  }
  ierr = DMGetGlobalVector(da,&vec);CHKERRQ(ierr);
  ierr = VecScatterCreate(vec,is,*newvec,NULL,scatter);CHKERRQ(ierr);
  ierr = DMRestoreGlobalVector(da,&vec);CHKERRQ(ierr);
  ierr = ISDestroy(&is);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #8
0
PETSC_EXTERN PetscErrorCode VecCreate_Standard(Vec v)
{
  PetscErrorCode ierr;
  PetscMPIInt    size;

  PetscFunctionBegin;
  ierr = MPI_Comm_size(PetscObjectComm((PetscObject)v),&size);CHKERRQ(ierr);
  if (size == 1) {
    ierr = VecSetType(v,VECSEQ);CHKERRQ(ierr);
  } else {
    ierr = VecSetType(v,VECMPI);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}
Пример #9
0
/*@
   DMDACreateNaturalVector - Creates a parallel PETSc vector that
   will hold vector values in the natural numbering, rather than in
   the PETSc parallel numbering associated with the DMDA.

   Collective

   Input Parameter:
.  da - the distributed array

   Output Parameter:
.  g - the distributed global vector

   Level: developer

   Note:
   The output parameter, g, is a regular PETSc vector that should be destroyed
   with a call to VecDestroy() when usage is finished.

   The number of local entries in the vector on each process is the same
   as in a vector created with DMCreateGlobalVector().

.keywords: distributed array, create, global, distributed, vector

.seealso: DMCreateLocalVector(), VecDuplicate(), VecDuplicateVecs(),
          DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMGlobalToLocalBegin(),
          DMGlobalToLocalEnd(), DMDALocalToGlobalBegin()
@*/
PetscErrorCode  DMDACreateNaturalVector(DM da,Vec *g)
{
  PetscErrorCode ierr;
  PetscInt       cnt;
  DM_DA          *dd = (DM_DA*)da->data;

  PetscFunctionBegin;
  PetscValidHeaderSpecificType(da,DM_CLASSID,1,DMDA);
  PetscValidPointer(g,2);
  if (dd->natural) {
    ierr = PetscObjectGetReference((PetscObject)dd->natural,&cnt);CHKERRQ(ierr);
    if (cnt == 1) { /* object is not currently used by anyone */
      ierr = PetscObjectReference((PetscObject)dd->natural);CHKERRQ(ierr);
      *g   = dd->natural;
    } else {
      ierr = VecDuplicate(dd->natural,g);CHKERRQ(ierr);
    }
  } else { /* create the first version of this guy */
    ierr = VecCreate(PetscObjectComm((PetscObject)da),g);CHKERRQ(ierr);
    ierr = VecSetSizes(*g,dd->Nlocal,PETSC_DETERMINE);CHKERRQ(ierr);
    ierr = VecSetBlockSize(*g, dd->w);CHKERRQ(ierr);
    ierr = VecSetType(*g,da->vectype);CHKERRQ(ierr);
    ierr = PetscObjectReference((PetscObject)*g);CHKERRQ(ierr);

    dd->natural = *g;
  }
  PetscFunctionReturn(0);
}
Пример #10
0
PetscErrorCode CreateStructures(DM da, UserContext *user)
{
  const PetscInt *necon;
  PetscInt       ne,nc;
  PetscErrorCode ierr;

  PetscFunctionBeginUser;
  ierr = DMDAGetElements(da,&ne,&nc,&necon);CHKERRQ(ierr);
  ierr = DMDARestoreElements(da,&ne,&nc,&necon);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_n.rho);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_n.rho_u);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_n.rho_v);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_n.rho_e);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_n.p);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_n.u);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_n.v);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_n.t);CHKERRQ(ierr);
  ierr = VecCreate(PETSC_COMM_WORLD, &user->sol_phi.rho);CHKERRQ(ierr);
  ierr = VecSetSizes(user->sol_phi.rho, ne, PETSC_DECIDE);CHKERRQ(ierr);
  ierr = VecSetType(user->sol_phi.rho,VECMPI);CHKERRQ(ierr);
  ierr = VecDuplicate(user->sol_phi.rho, &user->sol_phi.rho_u);CHKERRQ(ierr);
  ierr = VecDuplicate(user->sol_phi.rho, &user->sol_phi.rho_v);CHKERRQ(ierr);
  ierr = VecDuplicate(user->sol_phi.rho, &user->sol_phi.u);CHKERRQ(ierr);
  ierr = VecDuplicate(user->sol_phi.rho, &user->sol_phi.v);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_np1.rho);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_np1.rho_u);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_np1.rho_v);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_np1.rho_e);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_np1.p);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_np1.u);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->sol_np1.v);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->mu);CHKERRQ(ierr);
  ierr = DMCreateGlobalVector(da, &user->kappa);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #11
0
void
PetscSparseMtrx :: times(const FloatArray &x, FloatArray &answer) const
{
    if ( this->giveNumberOfColumns() != x.giveSize() ) {
        OOFEM_ERROR("Dimension mismatch");
    }

#ifdef __PARALLEL_MODE
    if ( emodel->isParallel() ) {
        OOFEM_ERROR("PetscSparseMtrx :: times - Not implemented");
    }
#endif
    Vec globX, globY;
    VecCreateSeqWithArray(PETSC_COMM_SELF, 1, x.giveSize(), x.givePointer(), & globX);
    VecCreate(PETSC_COMM_SELF, & globY);
    VecSetType(globY, VECSEQ);
    VecSetSizes(globY, PETSC_DECIDE, this->nRows);

    MatMult(this->mtrx, globX, globY);
    double *ptr;
    VecGetArray(globY, & ptr);
    answer.resize(this->nRows);
    for ( int i = 0; i < this->nRows; i++ ) {
        answer(i) = ptr [ i ];
    }

    VecRestoreArray(globY, & ptr);
    VecDestroy(&globX);
    VecDestroy(&globY);
}
Пример #12
0
PetscErrorCode StokesSetupVectors(Stokes* s) {
  PetscErrorCode ierr;

  PetscFunctionBeginUser;
  /* solution vector x */
  ierr = VecCreate(PETSC_COMM_WORLD, &s->x);
  CHKERRQ(ierr);
  ierr = VecSetSizes(s->x, PETSC_DECIDE, 3 * s->nx * s->ny);
  CHKERRQ(ierr);
  ierr = VecSetType(s->x, VECMPI);
  CHKERRQ(ierr);
  /*  ierr = VecSetRandom(s->x, NULL);CHKERRQ(ierr); */
  /*  ierr = VecView(s->x, (PetscViewer) PETSC_VIEWER_DEFAULT);CHKERRQ(ierr); */

  /* exact solution y */
  ierr = VecDuplicate(s->x, &s->y);
  CHKERRQ(ierr);
  ierr = StokesExactSolution(s);
  CHKERRQ(ierr);
  /*  ierr = VecView(s->y, (PetscViewer) PETSC_VIEWER_DEFAULT);CHKERRQ(ierr); */

  /* rhs vector b */
  ierr = VecDuplicate(s->x, &s->b);
  CHKERRQ(ierr);
  ierr = StokesRhs(s);
  CHKERRQ(ierr);
  /*ierr = VecView(s->b, (PetscViewer) PETSC_VIEWER_DEFAULT);CHKERRQ(ierr);*/
  PetscFunctionReturn(0);
}
Пример #13
0
PetscErrorCode  DMCreateGlobalVector_DA(DM da,Vec *g)
{
  PetscErrorCode ierr;
  DM_DA          *dd = (DM_DA*)da->data;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(da,DM_CLASSID,1);
  PetscValidPointer(g,2);
  if (da->defaultSection) {
    ierr = DMCreateGlobalVector_Section_Private(da,g);CHKERRQ(ierr);
    /* The view and load functions break for general layouts */
    PetscFunctionReturn(0);
  } else {
    ierr = VecCreate(PetscObjectComm((PetscObject)da),g);CHKERRQ(ierr);
    ierr = VecSetSizes(*g,dd->Nlocal,PETSC_DETERMINE);CHKERRQ(ierr);
    ierr = VecSetBlockSize(*g,dd->w);CHKERRQ(ierr);
    ierr = VecSetType(*g,da->vectype);CHKERRQ(ierr);
    ierr = VecSetDM(*g, da);CHKERRQ(ierr);
    ierr = VecSetLocalToGlobalMapping(*g,da->ltogmap);CHKERRQ(ierr);
  }
  ierr = VecSetOperation(*g,VECOP_VIEW,(void (*)(void))VecView_MPI_DA);CHKERRQ(ierr);
  ierr = VecSetOperation(*g,VECOP_LOAD,(void (*)(void))VecLoad_Default_DA);CHKERRQ(ierr);
  ierr = VecSetOperation(*g,VECOP_DUPLICATE,(void (*)(void))VecDuplicate_MPI_DA);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #14
0
/*@
  DMPlexDistributeField - Distribute field data to match a given PetscSF, usually the SF from mesh distribution

  Collective on DM

  Input Parameters:
+ dm - The DMPlex object
. pointSF - The PetscSF describing the communication pattern
. originalSection - The PetscSection for existing data layout
- originalVec - The existing data

  Output Parameters:
+ newSection - The PetscSF describing the new data layout
- newVec - The new data

  Level: developer

.seealso: DMPlexDistribute(), DMPlexDistributeData()
@*/
PetscErrorCode DMPlexDistributeField(DM dm, PetscSF pointSF, PetscSection originalSection, Vec originalVec, PetscSection newSection, Vec newVec)
{
  PetscSF        fieldSF;
  PetscInt      *remoteOffsets, fieldSize;
  PetscScalar   *originalValues, *newValues;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = PetscLogEventBegin(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
  ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);

  ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
  ierr = VecSetSizes(newVec, fieldSize, PETSC_DETERMINE);CHKERRQ(ierr);
  ierr = VecSetType(newVec,dm->vectype);CHKERRQ(ierr);

  ierr = VecGetArray(originalVec, &originalValues);CHKERRQ(ierr);
  ierr = VecGetArray(newVec, &newValues);CHKERRQ(ierr);
  ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
  ierr = PetscSFBcastBegin(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
  ierr = PetscSFBcastEnd(fieldSF, MPIU_SCALAR, originalValues, newValues);CHKERRQ(ierr);
  ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
  ierr = VecRestoreArray(newVec, &newValues);CHKERRQ(ierr);
  ierr = VecRestoreArray(originalVec, &originalValues);CHKERRQ(ierr);
  ierr = PetscLogEventEnd(DMPLEX_DistributeField,dm,0,0,0);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #15
0
PetscErrorCode StokesSetupApproxSchur(Stokes *s)
{
  Vec            diag;
  PetscErrorCode ierr;

  PetscFunctionBeginUser;
  /* Schur complement approximation: myS = A11 - A10 diag(A00)^(-1) A01 */
  /* note: A11 is zero */
  /* note: in real life this matrix would be build directly, */
  /* i.e. without MatMatMult */

  /* inverse of diagonal of A00 */
  ierr = VecCreate(PETSC_COMM_WORLD,&diag);CHKERRQ(ierr);
  ierr = VecSetSizes(diag,PETSC_DECIDE,2*s->nx*s->ny);CHKERRQ(ierr);
  ierr = VecSetType(diag,VECMPI);CHKERRQ(ierr);
  ierr = MatGetDiagonal(s->subA[0],diag);
  ierr = VecReciprocal(diag);

  /* compute: - A10 diag(A00)^(-1) A01 */
  ierr = MatDiagonalScale(s->subA[1],diag,NULL); /* (*warning* overwrites subA[1]) */
  ierr = MatMatMult(s->subA[2],s->subA[1],MAT_INITIAL_MATRIX,PETSC_DEFAULT,&s->myS);CHKERRQ(ierr);
  ierr = MatScale(s->myS,-1.0);CHKERRQ(ierr);

  /* restore A10 */
  ierr = MatGetDiagonal(s->subA[0],diag);
  ierr = MatDiagonalScale(s->subA[1],diag,NULL);
  ierr = VecDestroy(&diag);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #16
0
PetscErrorCode DMCreateGlobalVector_Section_Private(DM dm,Vec *vec)
{
  PetscSection   gSection;
  PetscInt       localSize, bs, blockSize = -1, pStart, pEnd, p;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
  ierr = PetscSectionGetChart(gSection, &pStart, &pEnd);CHKERRQ(ierr);
  for (p = pStart; p < pEnd; ++p) {
    PetscInt dof, cdof;

    ierr = PetscSectionGetDof(gSection, p, &dof);CHKERRQ(ierr);
    ierr = PetscSectionGetConstraintDof(gSection, p, &cdof);CHKERRQ(ierr);
    if ((blockSize < 0) && (dof > 0) && (dof-cdof > 0)) blockSize = dof-cdof;
    if ((dof > 0) && (dof-cdof != blockSize)) {
      blockSize = 1;
      break;
    }
  }
  if (blockSize < 0) blockSize = PETSC_MAX_INT;
  ierr = MPIU_Allreduce(&blockSize, &bs, 1, MPIU_INT, MPI_MIN, PetscObjectComm((PetscObject)dm));CHKERRQ(ierr);
  if (blockSize == PETSC_MAX_INT) blockSize = 1; /* Everyone was empty */
  ierr = PetscSectionGetConstrainedStorageSize(gSection, &localSize);CHKERRQ(ierr);
  if (localSize%blockSize) SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
  ierr = VecCreate(PetscObjectComm((PetscObject)dm), vec);CHKERRQ(ierr);
  ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
  ierr = VecSetBlockSize(*vec, bs);CHKERRQ(ierr);
  ierr = VecSetType(*vec,dm->vectype);CHKERRQ(ierr);
  ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
  /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
  PetscFunctionReturn(0);
}
Пример #17
0
PetscErrorCode DMCreateLocalVector_Section_Private(DM dm,Vec *vec)
{
  PetscSection   section;
  PetscInt       localSize, blockSize = -1, pStart, pEnd, p;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
  ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
  for (p = pStart; p < pEnd; ++p) {
    PetscInt dof;

    ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
    if ((blockSize < 0) && (dof > 0)) blockSize = dof;
    if ((dof > 0) && (dof != blockSize)) {
      blockSize = 1;
      break;
    }
  }
  ierr = PetscSectionGetStorageSize(section, &localSize);CHKERRQ(ierr);
  ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
  ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
  ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
  ierr = VecSetType(*vec,dm->vectype);CHKERRQ(ierr);
  ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #18
0
UniqueVec::UniqueVec(PetscInt local_n, PetscInt global_n) :
    data_(new Vec, PetscVecDeleter()), local_n_(local_n) {
  VecCreate(PETSC_COMM_WORLD, data_.get());
  Vec &vec = *data_.get();
  VecSetType(vec, VECMPI);
  VecSetSizes(vec, local_n, global_n);
  VecSet(vec, 0.0);
}
Пример #19
0
void SparseVector::allocate(int m)
{
    VecCreate(PETSC_COMM_WORLD, &vec_);
    VecSetSizes(vec_, PETSC_DECIDE, m);
    VecSetType(vec_, VECSTANDARD);

    // Get MPI ranges
    VecGetOwnershipRange(vec_, &iLower_, &iUpper_);
}
Пример #20
0
void
PetscSparseMtrx :: times(const FloatMatrix &B, FloatMatrix &answer) const
{
    if ( this->giveNumberOfColumns() != B.giveNumberOfRows() ) {
        OOFEM_ERROR("Dimension mismatch");
    }

#ifdef __PARALLEL_MODE
    if ( emodel->isParallel() ) {
        OOFEM_ERROR("PetscSparseMtrx :: times - Not implemented");
    }
#endif
    // I'm opting to work with a set of vectors, as i think it might be faster and more robust. / Mikael

    int nr = this->giveNumberOfRows();
    int nc = B.giveNumberOfColumns();
    answer.resize(nr, nc);
    double *aptr = answer.givePointer();

#if 0
     // Approach using several vectors. Not sure if it is optimal, but it includes petsc calls which i suspect are inefficient. / Mikael
     // UNTESTED!
     Vec globX, globY;
     VecCreate(PETSC_COMM_SELF, &globY);
     VecSetType(globY, VECSEQ);
     VecSetSizes(globY, PETSC_DECIDE, nr);
     int nrB = B.giveNumberOfRows();
     for (int k = 0; k < nc; k++) {
         double colVals[nrB];
         for (int i = 0; i < nrB; i++) colVals[i] = B(i,k); // B.copyColumn(Bk,k);
         VecCreateSeqWithArray(PETSC_COMM_SELF, nrB, colVals, &globX);
         MatMult(this->mtrx, globX, globY );
                 double *ptr;
                 VecGetArray(globY, &ptr);
                 for (int i = 0; i < nr; i++) *aptr++ = ptr[i]; // answer.setColumn(Ak,k);
                 VecRestoreArray(globY, &ptr);
                 VecDestroy(globX);
     }
     VecDestroy(globY);
#endif

    Mat globB, globC;
    MatCreateSeqDense(PETSC_COMM_SELF, B.giveNumberOfRows(), B.giveNumberOfColumns(), B.givePointer(), & globB);
    MatMatMult(this->mtrx, globB, MAT_INITIAL_MATRIX, PETSC_DEFAULT, & globC);
    const double *vals;
    for ( int r = 0; r < nr; r++ ) {
        MatGetRow(globC, r, NULL, NULL, & vals);
        for ( int i = 0, i2 = r; i < nc; i++, i2 += nr ) {
            aptr [ i2 ] = vals [ i ];
        }
        MatRestoreRow(globC, r, NULL, NULL, & vals);
    }

    MatDestroy(&globB);
    MatDestroy(&globC);
}
Пример #21
0
static PetscErrorCode MatHermitianTransposeMatMult_MPIAIJ_MPIDense0(Mat A,PetscScalar *Barray, PetscInt BN, PRIMME_INT ldb,PetscScalar *Carray, PetscInt ldc)
{
  PetscErrorCode      ierr;
  PetscInt            i,j,m=A->rmap->n,n=A->cmap->n;
  PetscScalar         *btarray,*ctarray;
  Vec                 bt,ct;
  Mat                 mA;

  PetscFunctionBegin;
  /* create MAIJ matrix mA from A -- should be done in symbolic phase */
  ierr = MatCreateMAIJ(A,BN,&mA);CHKERRQ(ierr);

  /* create vectors bt and ct to hold locally transposed arrays of B and C */
  ierr = VecCreate(PetscObjectComm((PetscObject)A),&bt);CHKERRQ(ierr);
  ierr = VecSetSizes(bt,m*BN,PETSC_DECIDE);CHKERRQ(ierr);
  ierr = VecSetType(bt,VECSTANDARD);CHKERRQ(ierr);
  ierr = VecCreate(PetscObjectComm((PetscObject)A),&ct);CHKERRQ(ierr);
  ierr = VecSetSizes(ct,n*BN,PETSC_DECIDE);CHKERRQ(ierr);
  ierr = VecSetType(ct,VECSTANDARD);CHKERRQ(ierr);

  /* transpose local arry of B, then copy it to vector bt */
  ierr = VecGetArray(bt,&btarray);CHKERRQ(ierr);

  for (j=0; j<BN; j++) {
    for (i=0; i<m; i++) btarray[i*BN + j] = PetscConj(Barray[ldb*j+i]);
  }
  ierr = VecRestoreArray(bt,&btarray);CHKERRQ(ierr);

  /* compute ct = mA^T * cb */
  ierr = MatMultTranspose(mA,bt,ct);CHKERRQ(ierr);

  /* transpose local arry of ct to matrix C */
  ierr = VecGetArray(ct,&ctarray);CHKERRQ(ierr);
  for (j=0; j<BN; j++) {
    for (i=0; i<n; i++) Carray[j*ldc+i] = PetscConj(ctarray[i*BN + j]);
  }
  ierr = VecRestoreArray(ct,&ctarray);CHKERRQ(ierr);

  ierr = VecDestroy(&bt);CHKERRQ(ierr);
  ierr = VecDestroy(&ct);CHKERRQ(ierr);
  ierr = MatDestroy(&mA);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #22
0
/*@
   VecCreateMPI - Creates a parallel vector.

   Collective on MPI_Comm

   Input Parameters:
+  comm - the MPI communicator to use
.  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
-  N - global vector length (or PETSC_DETERMINE to have calculated if n is given)

   Output Parameter:
.  vv - the vector

   Notes:
   Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
   same type as an existing vector.

   Level: intermediate

   Concepts: vectors^creating parallel

.seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
          VecCreateMPIWithArray(), VecCreateGhostWithArray(), VecMPISetGhost()

@*/
PetscErrorCode  VecCreateMPI(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = VecCreate(comm,v);CHKERRQ(ierr);
  ierr = VecSetSizes(*v,n,N);CHKERRQ(ierr);
  ierr = VecSetType(*v,VECMPI);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #23
0
int main(int argc,char **argv)
{
  PetscErrorCode ierr;
  PetscMPIInt    size;
  PetscInt       n = 9,bs = 3,indices[2],i;
  PetscScalar    values[6];
  Vec            x;

  ierr = PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);

  if (size != 1) SETERRQ(PETSC_COMM_SELF,1,"Must be run with one processor");

  /* create vector */
  ierr = VecCreate(PETSC_COMM_SELF,&x);CHKERRQ(ierr);
  ierr = VecSetSizes(x,n,n);CHKERRQ(ierr);
  ierr = VecSetBlockSize(x,bs);CHKERRQ(ierr);
  ierr = VecSetType(x,VECSEQ);CHKERRQ(ierr);

  for (i=0; i<6; i++) values[i] = 4.0*i;
  indices[0] = 0;
  indices[1] = 2;

  ierr = VecSetValuesBlocked(x,2,indices,values,INSERT_VALUES);CHKERRQ(ierr);
  ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(x);CHKERRQ(ierr);

  /*
      Resulting vector should be 0 4 8  0 0 0 12 16 20
  */
  ierr = VecView(x,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  /* test insertion with negative indices */
  ierr = VecSetOption(x,VEC_IGNORE_NEGATIVE_INDICES,PETSC_TRUE);CHKERRQ(ierr);
  for (i=0; i<6; i++) values[i] = -4.0*i;
  indices[0] = -1;
  indices[1] = 2;

  ierr = VecSetValuesBlocked(x,2,indices,values,ADD_VALUES);CHKERRQ(ierr);
  ierr = VecAssemblyBegin(x);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(x);CHKERRQ(ierr);

  /*
      Resulting vector should be 0 4 8  0 0 0 0 0 0
  */
  ierr = VecView(x,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  ierr = VecDestroy(&x);CHKERRQ(ierr);

  ierr = PetscFinalize();
  return ierr;
}
Пример #24
0
PetscErrorCode DMInterpolationGetVector(DMInterpolationInfo ctx, Vec *v)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  PetscValidPointer(v, 2);
  if (!ctx->coords) SETERRQ(ctx->comm, PETSC_ERR_ARG_WRONGSTATE, "The interpolation context has not been setup.");
  ierr = VecCreate(ctx->comm, v);CHKERRQ(ierr);
  ierr = VecSetSizes(*v, ctx->n*ctx->dof, PETSC_DECIDE);CHKERRQ(ierr);
  ierr = VecSetBlockSize(*v, ctx->dof);CHKERRQ(ierr);
  ierr = VecSetType(*v,VECSTANDARD);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #25
0
/*@
  DMPlexCopyCoordinates - Copy coordinates from one mesh to another with the same vertices

  Collective on DM

  Input Parameter:
. dmA - The DMPlex object with initial coordinates

  Output Parameter:
. dmB - The DMPlex object with copied coordinates

  Level: intermediate

  Note: This is typically used when adding pieces other than vertices to a mesh

.keywords: mesh
.seealso: DMCopyLabels(), DMGetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM(), DMGetCoordinateSection()
@*/
PetscErrorCode DMPlexCopyCoordinates(DM dmA, DM dmB)
{
  Vec            coordinatesA, coordinatesB;
  PetscSection   coordSectionA, coordSectionB;
  PetscScalar   *coordsA, *coordsB;
  PetscInt       spaceDim, vStartA, vStartB, vEndA, vEndB, coordSizeB, v, d;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  if (dmA == dmB) PetscFunctionReturn(0);
  ierr = DMPlexGetDepthStratum(dmA, 0, &vStartA, &vEndA);CHKERRQ(ierr);
  ierr = DMPlexGetDepthStratum(dmB, 0, &vStartB, &vEndB);CHKERRQ(ierr);
  if ((vEndA-vStartA) != (vEndB-vStartB)) SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "The number of vertices in first DM %d != %d in the second DM", vEndA-vStartA, vEndB-vStartB);
  ierr = DMGetCoordinateSection(dmA, &coordSectionA);CHKERRQ(ierr);
  ierr = DMGetCoordinateSection(dmB, &coordSectionB);CHKERRQ(ierr);
  if (!coordSectionB) {
    PetscInt dim;

    ierr = PetscSectionCreate(PetscObjectComm((PetscObject) coordSectionA), &coordSectionB);CHKERRQ(ierr);
    ierr = DMGetCoordinateDim(dmA, &dim);CHKERRQ(ierr);
    ierr = DMSetCoordinateSection(dmB, dim, coordSectionB);CHKERRQ(ierr);
    ierr = PetscObjectDereference((PetscObject) coordSectionB);CHKERRQ(ierr);
  }
  ierr = PetscSectionSetNumFields(coordSectionB, 1);CHKERRQ(ierr);
  ierr = PetscSectionGetFieldComponents(coordSectionA, 0, &spaceDim);CHKERRQ(ierr);
  ierr = PetscSectionSetFieldComponents(coordSectionB, 0, spaceDim);CHKERRQ(ierr);
  ierr = PetscSectionSetChart(coordSectionB, vStartB, vEndB);CHKERRQ(ierr);
  for (v = vStartB; v < vEndB; ++v) {
    ierr = PetscSectionSetDof(coordSectionB, v, spaceDim);CHKERRQ(ierr);
    ierr = PetscSectionSetFieldDof(coordSectionB, v, 0, spaceDim);CHKERRQ(ierr);
  }
  ierr = PetscSectionSetUp(coordSectionB);CHKERRQ(ierr);
  ierr = PetscSectionGetStorageSize(coordSectionB, &coordSizeB);CHKERRQ(ierr);
  ierr = DMGetCoordinatesLocal(dmA, &coordinatesA);CHKERRQ(ierr);
  ierr = VecCreate(PetscObjectComm((PetscObject) dmB), &coordinatesB);CHKERRQ(ierr);
  ierr = PetscObjectSetName((PetscObject) coordinatesB, "coordinates");CHKERRQ(ierr);
  ierr = VecSetSizes(coordinatesB, coordSizeB, PETSC_DETERMINE);CHKERRQ(ierr);
  ierr = VecSetType(coordinatesB,VECSTANDARD);CHKERRQ(ierr);
  ierr = VecGetArray(coordinatesA, &coordsA);CHKERRQ(ierr);
  ierr = VecGetArray(coordinatesB, &coordsB);CHKERRQ(ierr);
  for (v = 0; v < vEndB-vStartB; ++v) {
    for (d = 0; d < spaceDim; ++d) {
      coordsB[v*spaceDim+d] = coordsA[v*spaceDim+d];
    }
  }
  ierr = VecRestoreArray(coordinatesA, &coordsA);CHKERRQ(ierr);
  ierr = VecRestoreArray(coordinatesB, &coordsB);CHKERRQ(ierr);
  ierr = DMSetCoordinatesLocal(dmB, coordinatesB);CHKERRQ(ierr);
  ierr = VecDestroy(&coordinatesB);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #26
0
END_TEST

START_TEST( WriteVector_test )
{
  PetscErrorCode ierr;
  
  Vec v;
  VecCreate( PETSC_COMM_SELF, &v);
  VecSetSizes(v, 10, 10);
  VecSetType(v, VECSEQ );
  VecSet(v, one);
  
  ierr = WriteVector("testwrite.bin", v); CHKERRQ(ierr);
}
Пример #27
0
static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
{
  PetscErrorCode ierr;
  DM_Redundant   *red = (DM_Redundant*)dm->data;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(dm,DM_CLASSID,1);
  PetscValidPointer(lvec,2);
  *lvec = 0;
  ierr  = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr);
  ierr  = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr);
  ierr  = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr);
  ierr  = VecSetDM(*lvec,dm);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #28
0
int Epetra_PETScAIJMatrix::ExtractDiagonalCopy(Epetra_Vector & Diagonal) const
{

  //TODO optimization: only get this diagonal once
  Vec petscDiag;
  double *vals=0;
  int length;

  int ierr=VecCreate(Comm_->Comm(),&petscDiag);CHKERRQ(ierr);
  VecSetSizes(petscDiag,NumMyRows_,NumGlobalRows_);
# ifdef HAVE_MPI
  ierr = VecSetType(petscDiag,VECMPI);CHKERRQ(ierr);
# else //TODO untested!!
  VecSetType(petscDiag,VECSEQ);
# endif

  MatGetDiagonal(Amat_, petscDiag);
  VecGetArray(petscDiag,&vals);
  VecGetLocalSize(petscDiag,&length);
  for (int i=0; i<length; i++) Diagonal[i] = vals[i];
  VecRestoreArray(petscDiag,&vals);
  VecDestroy(petscDiag);
  return(0);
}
Пример #29
0
PetscErrorCode VecDuplicate_Seq(Vec win,Vec *V)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = VecCreate(PetscObjectComm((PetscObject)win),V);CHKERRQ(ierr);
  ierr = VecSetSizes(*V,win->map->n,win->map->n);CHKERRQ(ierr);
  ierr = VecSetType(*V,((PetscObject)win)->type_name);CHKERRQ(ierr);
  ierr = PetscLayoutReference(win->map,&(*V)->map);CHKERRQ(ierr);
  ierr = PetscObjectListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*V))->olist);CHKERRQ(ierr);
  ierr = PetscFunctionListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*V))->qlist);CHKERRQ(ierr);

  (*V)->ops->view          = win->ops->view;
  (*V)->stash.ignorenegidx = win->stash.ignorenegidx;
  PetscFunctionReturn(0);
}
Пример #30
0
EXTERN_C_END

#endif

#undef __FUNCT__  
#define __FUNCT__ "VecCreateShared"
/*@
   VecCreateShared - Creates a parallel vector that uses shared memory.

   Input Parameters:
.  comm - the MPI communicator to use
.  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
.  N - global vector length (or PETSC_DECIDE to have calculated if n is given)

   Output Parameter:
.  vv - the vector

   Collective on MPI_Comm
 
   Notes:
   Currently VecCreateShared() is available only on the SGI; otherwise,
   this routine is the same as VecCreateMPI().

   Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
   same type as an existing vector.

   Level: advanced

   Concepts: vectors^creating with shared memory

.seealso: VecCreateSeq(), VecCreate(), VecCreateMPI(), VecDuplicate(), VecDuplicateVecs(), 
          VecCreateGhost(), VecCreateMPIWithArray(), VecCreateGhostWithArray()

@*/ 
PetscErrorCode  VecCreateShared(MPI_Comm comm,PetscInt n,PetscInt N,Vec *v)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = VecCreate(comm,v);CHKERRQ(ierr);
  ierr = VecSetSizes(*v,n,N);CHKERRQ(ierr);
  ierr = VecSetType(*v,VECSHARED);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}