void Hdf5DataReader::GetVariableOverNodes(Vec data,
                                          const std::string& rVariableName,
                                          unsigned timestep)
{
    if (!mIsDataComplete)
    {
        EXCEPTION("You can only get a vector for complete data");
    }
    if (!mIsUnlimitedDimensionSet && timestep!=0)
    {
        EXCEPTION("The dataset '" << mDatasetName << "' does not contain time dependent data");
    }

    std::map<std::string, unsigned>::iterator col_iter = mVariableToColumnIndex.find(rVariableName);
    if (col_iter == mVariableToColumnIndex.end())
    {
        EXCEPTION("The dataset '" << mDatasetName << "' does not contain data for variable " << rVariableName);
    }
    unsigned column_index = (*col_iter).second;

    // Check for valid timestep
    if (timestep >= mNumberTimesteps)
    {
        EXCEPTION("The dataset '" << mDatasetName << "' does not contain data for timestep number " << timestep);
    }

    int lo, hi, size;
    VecGetSize(data, &size);
    if ((unsigned)size != mDatasetDims[1])
    {
        EXCEPTION("Could not read data because Vec is the wrong size");
    }
    // Get range owned by each processor
    VecGetOwnershipRange(data, &lo, &hi);

    if (hi > lo) // i.e. we own some...
    {
        // Define a dataset in memory for this process
        hsize_t v_size[1] = {(unsigned)(hi-lo)};
        hid_t memspace = H5Screate_simple(1, v_size, NULL);

        // Select hyperslab in the file.
        hsize_t offset[3] = {timestep, (unsigned)(lo), column_index};
        hsize_t count[3]  = {1, (unsigned)(hi-lo), 1};
        hid_t hyperslab_space = H5Dget_space(mVariablesDatasetId);
        H5Sselect_hyperslab(hyperslab_space, H5S_SELECT_SET, offset, NULL, count, NULL);

        double* p_petsc_vector;
        VecGetArray(data, &p_petsc_vector);

        herr_t err = H5Dread(mVariablesDatasetId, H5T_NATIVE_DOUBLE, memspace, hyperslab_space, H5P_DEFAULT, p_petsc_vector);
        UNUSED_OPT(err);
        assert(err==0);

        VecRestoreArray(data, &p_petsc_vector);

        H5Sclose(hyperslab_space);
        H5Sclose(memspace);
    }
}
Пример #2
0
Файл: vecd.c Проект: xyuan/dohp
/** Create a cache for Dirichlet part of closure vector, and scatter from global closure to Dirichlet cache.

@arg[in] gvec Global vector
@arg[out] dcache New vector to hold the Dirichlet values
@arg[out] dscat Scatter from global closure to \a dcache

@note This could be local but it doesn't cost anything to make it global.
**/
dErr VecDohpCreateDirichletCache(Vec gvec,Vec *dcache,VecScatter *dscat)
{
  MPI_Comm comm;
  dErr     err;
  dBool    isdohp;
  IS       from;
  Vec      gc;
  dInt     n,nc,crstart;

  dFunctionBegin;
  dValidHeader(gvec,VEC_CLASSID,1);
  dValidPointer(dcache,2);
  dValidPointer(dscat,3);
  err = PetscTypeCompare((PetscObject)gvec,VECDOHP,&isdohp);dCHK(err);
  if (!isdohp) dERROR(PETSC_COMM_SELF,PETSC_ERR_SUP,"Vec type %s",((PetscObject)gvec)->type_name);
  err = PetscObjectGetComm((PetscObject)gvec,&comm);dCHK(err);
  err = VecGetLocalSize(gvec,&n);dCHK(err);
  err = VecDohpGetClosure(gvec,&gc);dCHK(err);
  err = VecGetLocalSize(gc,&nc);dCHK(err);
  err = VecGetOwnershipRange(gc,&crstart,NULL);dCHK(err);
  err = VecCreateMPI(comm,nc-n,PETSC_DECIDE,dcache);dCHK(err);
  err = ISCreateStride(comm,nc-n,crstart+n,1,&from);dCHK(err);
  err = VecScatterCreate(gc,from,*dcache,NULL,dscat);dCHK(err);
  err = VecDohpRestoreClosure(gvec,&gc);dCHK(err);
  err = ISDestroy(&from);dCHK(err);
  /* \todo deal with rotations */
  dFunctionReturn(0);
}
Пример #3
0
/*
   ExactSolution - Computes the exact solution at a given time.

   Input Parameters:
   t - current time
   solution - vector in which exact solution will be computed
   appctx - user-defined application context

   Output Parameter:
   solution - vector with the newly computed exact solution
*/
PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
{
  PetscScalar    *s_localptr,h = appctx->h,x;
  PetscInt       i,mybase,myend;
  PetscErrorCode ierr;

  /*
     Determine starting and ending points of each processor's
     range of grid values
  */
  ierr = VecGetOwnershipRange(solution,&mybase,&myend);CHKERRQ(ierr);

  /*
     Get a pointer to vector data.
  */
  ierr = VecGetArray(solution,&s_localptr);CHKERRQ(ierr);

  /*
     Simply write the solution directly into the array locations.
     Alternatively, we could use VecSetValues() or VecSetValuesLocal().
  */
  for (i=mybase; i<myend; i++) {
    x = h*(PetscReal)i;
    s_localptr[i-mybase] = (t + 1.0)*(1.0 + x*x);
  }

  /*
     Restore vector
  */
  ierr = VecRestoreArray(solution,&s_localptr);CHKERRQ(ierr);
  return 0;
}
Пример #4
0
inline int PetscVector::last_local_index () const {
  assert (this->initialized());
  int ierr=0, petsc_first=0, petsc_last=0;
  ierr = VecGetOwnershipRange (_vec, &petsc_first, &petsc_last);
  CHKERRABORT(MPI_COMM_WORLD,ierr);
  return static_cast<int>(petsc_last);
}
Пример #5
0
/*
   SNESVIComputeInactiveSetIS - Gets the global indices for the bogus inactive set variables

   Input parameter
.  snes - the SNES context
.  X    - the snes solution vector

   Output parameter
.  ISact - active set index set

 */
PetscErrorCode SNESVIComputeInactiveSetIS(Vec upper,Vec lower,Vec X,Vec F,IS *inact)
{
  PetscErrorCode    ierr;
  const PetscScalar *x,*xl,*xu,*f;
  PetscInt          *idx_act,i,nlocal,nloc_isact=0,ilow,ihigh,i1=0;

  PetscFunctionBegin;
  ierr = VecGetLocalSize(X,&nlocal);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(X,&ilow,&ihigh);CHKERRQ(ierr);
  ierr = VecGetArrayRead(X,&x);CHKERRQ(ierr);
  ierr = VecGetArrayRead(lower,&xl);CHKERRQ(ierr);
  ierr = VecGetArrayRead(upper,&xu);CHKERRQ(ierr);
  ierr = VecGetArrayRead(F,&f);CHKERRQ(ierr);
  /* Compute inactive set size */
  for (i=0; i < nlocal; i++) {
    if (((PetscRealPart(x[i]) > PetscRealPart(xl[i]) + 1.e-8 || (PetscRealPart(f[i]) < 0.0)) && ((PetscRealPart(x[i]) < PetscRealPart(xu[i]) - 1.e-8) || PetscRealPart(f[i]) > 0.0))) nloc_isact++;
  }

  ierr = PetscMalloc(nloc_isact*sizeof(PetscInt),&idx_act);CHKERRQ(ierr);

  /* Set inactive set indices */
  for (i=0; i < nlocal; i++) {
    if (((PetscRealPart(x[i]) > PetscRealPart(xl[i]) + 1.e-8 || (PetscRealPart(f[i]) < 0.0)) && ((PetscRealPart(x[i]) < PetscRealPart(xu[i]) - 1.e-8) || PetscRealPart(f[i]) > 0.0))) idx_act[i1++] = ilow+i;
  }

  /* Create inactive set IS */
  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)upper),nloc_isact,idx_act,PETSC_OWN_POINTER,inact);CHKERRQ(ierr);

  ierr = VecRestoreArrayRead(X,&x);CHKERRQ(ierr);
  ierr = VecRestoreArrayRead(lower,&xl);CHKERRQ(ierr);
  ierr = VecRestoreArrayRead(upper,&xu);CHKERRQ(ierr);
  ierr = VecRestoreArrayRead(F,&f);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #6
0
SparseGp *
SparseGp_create (AcfrKern *kern, int rank, gsl_matrix *trainObs, 
                 gsl_vector *trainLabels, gsl_matrix *testObs, 
                 gsl_vector *testLabels)
{
    PetscErrorCode ierr;        /* not used here - not returning int */
    (void) ierr;

    SparseGp *gp = calloc (1, sizeof (*gp));

    gp->kern = kern;

    gp->trainObs = trainObs;
    gp->trainLabels = trainLabels;
    gp->testObs = testObs;
    gp->testLabels = testLabels;

    PetscInt N = gp->trainLabels->size;
    ierr = VecCreate (PETSC_COMM_WORLD, &(gp->_trainLabels)); /* CHKERRQ (ierr); */
    ierr = VecSetSizes (gp->_trainLabels, PETSC_DECIDE, N); /* CHKERRQ (ierr); */
    ierr = VecSetFromOptions (gp->_trainLabels); /* CHKERRQ (ierr); */

    ierr = VecGetOwnershipRange (gp->_trainLabels, &(gp->rstart), &(gp->rend)); /* CHKERRQ (ierr); */
    ierr = VecGetLocalSize (gp->_trainLabels, &(gp->nlocal)); /* CHKERRQ (ierr); */
    petsc_util_fillVec (gp->trainLabels, &gp->_trainLabels, gp->rstart, gp->rend);

    gp->rank = rank;

    return gp;
}
void AbstractContinuumMechanicsSolver<DIM>::AddIdentityBlockForDummyPressureVariables(ApplyDirichletBcsType type)
{
    assert(mCompressibilityType==INCOMPRESSIBLE);

    int lo, hi;
    VecGetOwnershipRange(mResidualVector, &lo, &hi);

    for (unsigned i=0; i<mrQuadMesh.GetNumNodes(); i++)
    {
        if (mrQuadMesh.GetNode(i)->IsInternal())
        {
            unsigned row = (DIM+1)*i + DIM; // DIM+1 is the problem dimension
            if (lo <= (int)row && (int)row < hi)
            {
                if (type!=LINEAR_PROBLEM)
                {
                    PetscVecTools::SetElement(mResidualVector, row, mCurrentSolution[row]-0.0);
                }
                if (type!=NONLINEAR_PROBLEM_APPLY_TO_RESIDUAL_ONLY) // ie doing a whole linear system
                {
                    double rhs_vector_val = type==LINEAR_PROBLEM ? 0.0 : mCurrentSolution[row]-0.0;
                    PetscVecTools::SetElement(mLinearSystemRhsVector, row, rhs_vector_val);
                    // This assumes the row is already zero, which is should be..
                    PetscMatTools::SetElement(mSystemLhsMatrix, row, row, 1.0);
                    PetscMatTools::SetElement(mPreconditionMatrix, row, row, 1.0);
                }
            }
        }
    }
}
Пример #8
0
/*
   ExactSolution - Computes the exact solution at a given time.

   Input Parameters:
   t - current time
   solution - vector in which exact solution will be computed
   appctx - user-defined application context

   Output Parameter:
   solution - vector with the newly computed exact solution
*/
PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
{
  PetscScalar    *s_localptr,h = appctx->h,ex1,ex2,sc1,sc2;
  PetscInt       i,mybase,myend;
  PetscErrorCode ierr;

  /* 
     Determine starting and ending points of each processor's 
     range of grid values
  */
  ierr = VecGetOwnershipRange(solution,&mybase,&myend);CHKERRQ(ierr);

  /*
     Get a pointer to vector data.
  */
  ierr = VecGetArray(solution,&s_localptr);CHKERRQ(ierr);

  /* 
     Simply write the solution directly into the array locations.
     Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
  */
  ex1 = exp(-36.*PETSC_PI*PETSC_PI*t); ex2 = exp(-4.*PETSC_PI*PETSC_PI*t);
  sc1 = PETSC_PI*6.*h;                 sc2 = PETSC_PI*2.*h;
  for (i=mybase; i<myend; i++) {
    s_localptr[i-mybase] = PetscSinScalar(sc1*(PetscReal)i)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i)*ex2;
  }

  /* 
     Restore vector
  */
  ierr = VecRestoreArray(solution,&s_localptr);CHKERRQ(ierr);
  return 0;
}
Пример #9
0
inline PetscErrorCode ScatIEMatMult(Mat mat,Vec xx,Vec yy)
{
  PetscInt       n,s,t;
  PetscScalar    tmp,v;
  PetscInt       xstart,xend;

  PetscFunctionBegin;
  VecGetOwnershipRange(xx,&xstart,&xend);
  VecGetSize(yy,&n);
  VecZeroEntries(yy);

  for(s = 0; s <n ; s++)
  {
      tmp = 0;
      for(t = xstart; t < xend; t++)
      {
          VecGetValues(xx,1,&t,&v);
          tmp += ScatIE.CoefMatFast(s,t)*v;
      }
      VecSetValues(yy,1,&s,&tmp,ADD_VALUES);
  }
  VecAssemblyBegin(yy);
  VecAssemblyEnd(yy);

  PetscFunctionReturn(0);
}
Пример #10
0
/* Histogram a PETSC vector 
 *
 * x is the vector
 * nbins -- number of bins
 * xmin, xmax -- histogram xmin, xmax -- assume uniform bins
 * hh -- output vector -- assumed to be defined.
 */
void VecHist(const Vec& x, int nbins, double xmin, double xmax, vector<double>& hh) {
  gsl_histogram *h1; 
  double *_x, x1;
  PetscInt lo, hi;
  vector<double> tmp(nbins);

  // Set up the histogram struct
  h1 = gsl_histogram_alloc(nbins);
  gsl_histogram_set_ranges_uniform(h1, xmin, xmax);

  // Get the array
  VecGetOwnershipRange(x, &lo, &hi);
  hi -= lo;
  VecGetArray(x, &_x);
  for (PetscInt ii=0; ii < hi; ++ii) {
    x1 = _x[ii];
    if (x1 < xmin) x1 = xmin;
    if (x1 >= xmax) x1 = xmax - 1.e-10;
    gsl_histogram_increment(h1, x1);
  }
  VecRestoreArray(x, &_x);
  
  // Fill the temporary output vector
  for (int ii =0; ii<nbins; ++ii) 
    tmp[ii] = gsl_histogram_get(h1, ii);

  // MPI Allreduce
  MPI_Allreduce(&tmp[0], &hh[0], nbins, MPI_DOUBLE, MPI_SUM, PETSC_COMM_WORLD); 

  // Clean up
  gsl_histogram_free(h1);
}
Пример #11
0
int main(int argc,char **argv)
{
  PetscErrorCode ierr;
  PetscInt       n = 5,N,low,high,iglobal,i;
  PetscMPIInt    size,rank;
  PetscScalar    value,zero = 0.0;
  Vec            x,y;
  IS             is1,is2;
  VecScatter     ctx;

  ierr = PetscInitialize(&argc,&argv,(char*)0,help);CHKERRQ(ierr); 
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);

  /* create two vectors */
  N = size*n;
  ierr = VecCreate(PETSC_COMM_WORLD,&y);CHKERRQ(ierr);
  ierr = VecSetSizes(y,PETSC_DECIDE,N);CHKERRQ(ierr);
  ierr = VecSetFromOptions(y);CHKERRQ(ierr);
  if (!rank) {
    ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x);CHKERRQ(ierr);
  } else {
    ierr = VecCreateSeq(PETSC_COMM_SELF,0,&x);CHKERRQ(ierr);
  }

  /* create two index sets */
  if (!rank) {
    ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&is1);CHKERRQ(ierr);
    ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&is2);CHKERRQ(ierr);
  } else {
    ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is1);CHKERRQ(ierr);
    ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&is2);CHKERRQ(ierr);
  }

  ierr = VecSet(x,zero);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(y,&low,&high);CHKERRQ(ierr);
  for (i=0; i<n; i++) {
    iglobal = i + low; value = (PetscScalar) (i + 10*rank);
    ierr = VecSetValues(y,1,&iglobal,&value,INSERT_VALUES);CHKERRQ(ierr);
  }
  ierr = VecAssemblyBegin(y);CHKERRQ(ierr);
  ierr = VecAssemblyEnd(y);CHKERRQ(ierr);
  ierr = VecView(y,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  ierr = VecScatterCreate(y,is2,x,is1,&ctx);CHKERRQ(ierr);
  ierr = VecScatterBegin(ctx,y,x,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd(ctx,y,x,ADD_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&ctx);CHKERRQ(ierr);
  
  if (!rank) 
    {printf("----\n"); ierr = VecView(x,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);}

  ierr = VecDestroy(&x);CHKERRQ(ierr);
  ierr = VecDestroy(&y);CHKERRQ(ierr);
  ierr = ISDestroy(&is1);CHKERRQ(ierr);
  ierr = ISDestroy(&is2);CHKERRQ(ierr);

  ierr = PetscFinalize();
  return 0;
}
Пример #12
0
PetscErrorCode VecMin_MPI(Vec xin,PetscInt *idx,PetscReal *z)
{
  PetscErrorCode ierr;
  PetscReal      work;

  PetscFunctionBegin;
  /* Find the local Min */
  ierr = VecMin_Seq(xin,idx,&work);CHKERRQ(ierr);

  /* Find the global Min */
  if (!idx) {
    ierr = MPIU_Allreduce(&work,z,1,MPIU_REAL,MPIU_MIN,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
  } else {
    PetscReal work2[2],z2[2];
    PetscInt  rstart;

    ierr = VecGetOwnershipRange(xin,&rstart,NULL);CHKERRQ(ierr);
    work2[0] = work;
    work2[1] = *idx + rstart;
    ierr = MPIU_Allreduce(work2,z2,2,MPIU_REAL,MPIU_MININDEX_OP,PetscObjectComm((PetscObject)xin));CHKERRQ(ierr);
    *z   = z2[0];
    *idx = (PetscInt)z2[1];
  }
  PetscFunctionReturn(0);
}
Пример #13
0
PetscErrorCode GetRadiusVec(Vec vecRad, int Nr, int Nz, double hr, int m)
{
   PetscErrorCode ierr;
   int i, j, ir, ic, ns, ne;
   double value=1.0;
   ierr = VecGetOwnershipRange(vecRad,&ns,&ne); CHKERRQ(ierr);
   for(i=ns;i<ne; i++)
     {
       j = i;
       ir = (j /= Nz) % Nr;
       ic = (j /= Nr) % 3;
 
       if (ic==0) value = (ir + 0.5)*hr;
       if (ic==1) value = (ir + 0.0)*hr;
       if (ic==2) value = (ir + 0.0)*hr + (ir==0)*(m==0)*hr/8; 
        
       VecSetValue(vecRad, i, value, INSERT_VALUES);

       }

   ierr = VecAssemblyBegin(vecRad); CHKERRQ(ierr);
   ierr = VecAssemblyEnd(vecRad); CHKERRQ(ierr);

   PetscFunctionReturn(0);
}
Пример #14
0
PetscErrorCode GetUnitVec(Vec ej, int pol, int N)
{
   PetscErrorCode ierr;
   int i, j, ns, ne, ic;
   int Nc=3;
   int Nxyz=N/6;

   ierr = VecGetOwnershipRange(ej,&ns,&ne); CHKERRQ(ierr);

   for(i=ns; i<ne; i++)
     {
       j=i;
       ic = (j /= Nxyz) % Nc;
       
       if (ic==pol)
	 VecSetValue(ej,i,1.0,INSERT_VALUES);
       else
	 VecSetValue(ej,i,0.0,INSERT_VALUES);
     }

   ierr = VecAssemblyBegin(ej); CHKERRQ(ierr);
   ierr = VecAssemblyEnd(ej); CHKERRQ(ierr);
   
   PetscFunctionReturn(0);
  
}
Пример #15
0
/*@
   DMDANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy
     of the entire vector on each processor to its local part in the global vector.

   Collective on DMDA

   Input Parameter:
.  da - the distributed array context

   Output Parameter:
.  scatter - the scatter context

   Level: advanced

.keywords: distributed array, global to local, begin, coarse problem

.seealso: DMDAGlobalToNaturalEnd(), DMLocalToGlobalBegin(), DMDACreate2d(),
          DMGlobalToLocalBegin(), DMGlobalToLocalEnd(), DMDACreateNaturalVector()
@*/
PetscErrorCode  DMDANaturalAllToGlobalCreate(DM da,VecScatter *scatter)
{
  PetscErrorCode ierr;
  DM_DA          *dd = (DM_DA*)da->data;
  PetscInt       M,m = dd->Nlocal,start;
  IS             from,to;
  Vec            tmplocal,global;
  AO             ao;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(da,DM_CLASSID,1);
  PetscValidPointer(scatter,2);
  ierr = DMDAGetAO(da,&ao);CHKERRQ(ierr);

  /* create the scatter context */
  ierr = MPI_Allreduce(&m,&M,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)da));CHKERRQ(ierr);
  ierr = VecCreateMPIWithArray(PetscObjectComm((PetscObject)da),dd->w,m,PETSC_DETERMINE,0,&global);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(global,&start,NULL);CHKERRQ(ierr);
  ierr = ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&from);CHKERRQ(ierr);
  ierr = AOPetscToApplicationIS(ao,from);CHKERRQ(ierr);
  ierr = ISCreateStride(PetscObjectComm((PetscObject)da),m,start,1,&to);CHKERRQ(ierr);
  ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->w,M,0,&tmplocal);CHKERRQ(ierr);
  ierr = VecScatterCreate(tmplocal,from,global,to,scatter);CHKERRQ(ierr);
  ierr = VecDestroy(&tmplocal);CHKERRQ(ierr);
  ierr = VecDestroy(&global);CHKERRQ(ierr);
  ierr = ISDestroy(&from);CHKERRQ(ierr);
  ierr = ISDestroy(&to);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #16
0
void SetJacobian(Geometry geo, Mat J, Vec v, int jc, int jr, int jh){
// use jc = -1 for last 2 columns, and jc = -2 for blocks (all jc)

	AssembleVec(v);

	int ns, ne, i;
	VecGetOwnershipRange(v, &ns, &ne);
	const double *vals;
	VecGetArrayRead(v, &vals);

	for(i=ns; i<ne; i++){
		if( Last2(geo, i) || vals[i-ns] == 0.0) continue;

		int col, offset = jh*(Nxyzcr(geo)+2),
		ij = i%(Nxyzcr(geo)+2);
	
		Point p;
		CreatePoint_i(&p, ij, &geo->gN);
	
		if(jc == -1) //columns
			col = Nxyzcr(geo)+jr;
		else if(jc == -2) //blocks
			col = jr*Nxyzc(geo) + xyzc(&p);
		else // tensors
			col = jr*Nxyzc(geo) + jc*Nxyz(geo) + xyz(&p);
	
		MatSetValue(J, i, col+offset, vals[i-ns], ADD_VALUES);
	
	}
	VecRestoreArrayRead(v, &vals);
}
Пример #17
0
void CollectVec(Geometry geo, Vec vN, Vec vM){
	VecSet(vM, 0.0);

	Vecfun f;
	CreateVecfun(&f, geo->vf);

	const double *vals;
	VecGetArrayRead(vN, &vals);
	int ns, ne;
	VecGetOwnershipRange(vN, &ns, &ne);
	if( ne > Nxyzcr(geo)-2) ne = Nxyzcr(geo)-2;

	int i;
	for(i=ns; i<ne; i++){
		if( valr(&f, i) == 0.0) continue;
		// skip if gain profile zero here

		Point p;
		CreatePoint_i(&p, i, &geo->gN);
		if( projectmedium(&p, &geo->gM, geo->LowerPML) )
			VecSetValue(vM, xyz(&p), vals[i-ns], ADD_VALUES);
	}

	VecRestoreArrayRead(vN, &vals);
	DestroyVecfun(&f);
	AssembleVec(vM);
}
Пример #18
0
// -------------------------------------------------------------
// Vec2GA
// -------------------------------------------------------------
static
PetscErrorCode
Vec2GA(Vec x, int pgroup, int *ga, bool trans = false)
{
  int lrows, rows;
  PetscErrorCode ierr = 0;
  
  ierr = VecGetLocalSize(x, &lrows); CHKERRQ(ierr);
  ierr = VecGetSize(x, &rows); CHKERRQ(ierr);
  
  PetscInt vlo, vhi;
  ierr = VecGetOwnershipRange(x, &vlo, &vhi); CHKERRQ(ierr);
  
  PetscScalar *v;
  ierr = VecGetArray(x, &v); CHKERRQ(ierr);

  int lo[2] = {0,0}, hi[2] = {0,0}, ld[2] = {1,1};
  if (!trans) {
    ierr = CreateMatGA(pgroup, lrows, 1, rows, 1, ga); CHKERRQ(ierr);
    lo[0] = vlo; 
    hi[0] = vhi-1;
  } else {
    ierr = CreateMatGA(pgroup, 1, lrows, 1, rows, ga); CHKERRQ(ierr);
    lo[1] = vlo; 
    hi[1] = vhi-1;
  }
  NGA_Put(*ga, lo, hi, v, ld);
  // GA_Print(*ga);
  ierr = VecRestoreArray(x, &v); CHKERRQ(ierr);

  GA_Pgroup_sync(pgroup);
  return ierr;
}
PetscErrorCode NavierStokesSolver :: createLocalToGlobalMappingsLambda()
{
	PetscErrorCode ierr;
	PetscInt	m,n,i,j,mstart,nstart;
	PetscReal	**lp;
	PetscInt	localIdx;

	ierr = VecGetOwnershipRange(lambda, &localIdx, NULL); CHKERRQ(ierr);
//populate local vector with global indices
//values outside the domain are never accessed and not set
//P
	ierr = DMCreateLocalVector(pda, &pMapping); CHKERRQ(ierr);
	ierr = DMDAVecGetArray(pda, pMapping, &lp); CHKERRQ(ierr);
	ierr = DMDAGetCorners(pda, &mstart, &nstart, NULL, &m, &n, NULL); CHKERRQ(ierr);
	for(j=nstart; j<nstart+n; j++)
	{
		for(i=mstart; i < mstart+m; i++)
		{
			lp[j][i] = localIdx;
			localIdx++;
		}
	}

	ierr = DMDAVecRestoreArray(pda, pMapping, &lp); CHKERRQ(ierr);

	ierr = DMDALocalToLocalBegin(pda, pMapping, INSERT_VALUES, pMapping); CHKERRQ(ierr);
	ierr = DMDALocalToLocalEnd(pda, pMapping, INSERT_VALUES, pMapping); CHKERRQ(ierr);
	return 0;
}
Пример #20
0
/*
   DAGlobalToNatural_Create - Create the global to natural scatter object

   Collective on DA

   Input Parameter:
.  da - the distributed array context

   Level: developer

   Notes: This is an internal routine called by DAGlobalToNatural() to 
     create the scatter context.

.keywords: distributed array, global to local, begin

.seealso: DAGlobalToNaturalBegin(), DAGlobalToNaturalEnd(), DALocalToGlobal(), DACreate2d(), 
          DAGlobalToLocalBegin(), DAGlobalToLocalEnd(), DACreateNaturalVector()
*/
PetscErrorCode DAGlobalToNatural_Create(DA da)
{
  PetscErrorCode ierr;
  PetscInt  m,start,Nlocal;
  IS  from,to;
  Vec global;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(da,DM_COOKIE,1);
  if (!da->natural) {
    SETERRQ(PETSC_ERR_ORDER,"Natural layout vector not yet created; cannot scatter into it");
  }

  /* create the scatter context */
  ierr = VecGetLocalSize(da->natural,&m);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(da->natural,&start,PETSC_NULL);CHKERRQ(ierr);

  ierr = DAGetNatural_Private(da,&Nlocal,&to);CHKERRQ(ierr);
  if (Nlocal != m) SETERRQ2(PETSC_ERR_PLIB,"Internal error: Nlocal %D local vector size %D",Nlocal,m);
  ierr = ISCreateStride(((PetscObject)da)->comm,m,start,1,&from);CHKERRQ(ierr);
  ierr = VecCreateMPIWithArray(((PetscObject)da)->comm,da->Nlocal,PETSC_DETERMINE,0,&global);
  ierr = VecSetBlockSize(global,da->w);CHKERRQ(ierr);
  ierr = VecScatterCreate(global,from,da->natural,to,&da->gton);CHKERRQ(ierr);
  ierr = VecDestroy(global);CHKERRQ(ierr);
  ierr = ISDestroy(from);CHKERRQ(ierr);
  ierr = ISDestroy(to);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #21
0
int main(int argc,char **argv)
{
  Vec            v,s,r,vecs[2];               /* vectors */
  PetscInt       i,start,end,n = 20;
  PetscErrorCode ierr;
  PetscScalar    value;

  PetscInitialize(&argc,&argv,(char*)0,help);
  ierr = PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);CHKERRQ(ierr);

  /*
      Create multi-component vector with 2 components
  */
  ierr = VecCreate(PETSC_COMM_WORLD,&v);CHKERRQ(ierr);
  ierr = VecSetSizes(v,PETSC_DECIDE,n);CHKERRQ(ierr);
  ierr = VecSetBlockSize(v,4);CHKERRQ(ierr);
  ierr = VecSetFromOptions(v);CHKERRQ(ierr);

  /*
      Create double-component vectors
  */
  ierr = VecCreate(PETSC_COMM_WORLD,&s);CHKERRQ(ierr);
  ierr = VecSetSizes(s,PETSC_DECIDE,n/2);CHKERRQ(ierr);
  ierr = VecSetBlockSize(s,2);CHKERRQ(ierr);
  ierr = VecSetFromOptions(s);CHKERRQ(ierr);
  ierr = VecDuplicate(s,&r);CHKERRQ(ierr);

  vecs[0] = s;
  vecs[1] = r;
  /*
     Set the vector values
  */
  ierr = VecGetOwnershipRange(v,&start,&end);CHKERRQ(ierr);
  for (i=start; i<end; i++) {
    value = i;
    ierr  = VecSetValues(v,1,&i,&value,INSERT_VALUES);CHKERRQ(ierr);
  }

  /*
     Get the components from the multi-component vector to the other vectors
  */
  ierr = VecStrideGatherAll(v,vecs,INSERT_VALUES);CHKERRQ(ierr);

  ierr = VecView(s,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
  ierr = VecView(r,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  ierr = VecStrideScatterAll(vecs,v,ADD_VALUES);CHKERRQ(ierr);

  ierr = VecView(v,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);

  /*
     Free work space.  All PETSc objects should be destroyed when they
     are no longer needed.
  */
  ierr = VecDestroy(&v);CHKERRQ(ierr);
  ierr = VecDestroy(&s);CHKERRQ(ierr);
  ierr = VecDestroy(&r);CHKERRQ(ierr);
  ierr = PetscFinalize();
  return 0;
}
Пример #22
0
/*@
    KSPComputeExplicitOperator - Computes the explicit preconditioned operator.

    Collective on KSP

    Input Parameter:
.   ksp - the Krylov subspace context

    Output Parameter:
.   mat - the explict preconditioned operator

    Notes:
    This computation is done by applying the operators to columns of the
    identity matrix.

    Currently, this routine uses a dense matrix format when 1 processor
    is used and a sparse format otherwise.  This routine is costly in general,
    and is recommended for use only with relatively small systems.

    Level: advanced

.keywords: KSP, compute, explicit, operator

.seealso: KSPComputeEigenvaluesExplicitly(), PCComputeExplicitOperator()
@*/
PetscErrorCode  KSPComputeExplicitOperator(KSP ksp,Mat *mat)
{
  Vec            in,out;
  PetscErrorCode ierr;
  PetscMPIInt    size;
  PetscInt       i,M,m,*rows,start,end;
  Mat            A;
  MPI_Comm       comm;
  PetscScalar    *array,one = 1.0;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(ksp,KSP_CLASSID,1);
  PetscValidPointer(mat,2);
  ierr = PetscObjectGetComm((PetscObject)ksp,&comm);CHKERRQ(ierr);
  ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);

  ierr = VecDuplicate(ksp->vec_sol,&in);CHKERRQ(ierr);
  ierr = VecDuplicate(ksp->vec_sol,&out);CHKERRQ(ierr);
  ierr = VecGetSize(in,&M);CHKERRQ(ierr);
  ierr = VecGetLocalSize(in,&m);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(in,&start,&end);CHKERRQ(ierr);
  ierr = PetscMalloc1(m,&rows);CHKERRQ(ierr);
  for (i=0; i<m; i++) rows[i] = start + i;

  ierr = MatCreate(comm,mat);CHKERRQ(ierr);
  ierr = MatSetSizes(*mat,m,m,M,M);CHKERRQ(ierr);
  if (size == 1) {
    ierr = MatSetType(*mat,MATSEQDENSE);CHKERRQ(ierr);
    ierr = MatSeqDenseSetPreallocation(*mat,NULL);CHKERRQ(ierr);
  } else {
    ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr);
    ierr = MatMPIAIJSetPreallocation(*mat,0,NULL,0,NULL);CHKERRQ(ierr);
  }
  ierr = MatSetOption(*mat,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_FALSE);CHKERRQ(ierr);
  if (!ksp->pc) {ierr = KSPGetPC(ksp,&ksp->pc);CHKERRQ(ierr);}
  ierr = PCGetOperators(ksp->pc,&A,NULL);CHKERRQ(ierr);

  for (i=0; i<M; i++) {

    ierr = VecSet(in,0.0);CHKERRQ(ierr);
    ierr = VecSetValues(in,1,&i,&one,INSERT_VALUES);CHKERRQ(ierr);
    ierr = VecAssemblyBegin(in);CHKERRQ(ierr);
    ierr = VecAssemblyEnd(in);CHKERRQ(ierr);

    ierr = KSP_MatMult(ksp,A,in,out);CHKERRQ(ierr);
    ierr = KSP_PCApply(ksp,out,in);CHKERRQ(ierr);

    ierr = VecGetArray(in,&array);CHKERRQ(ierr);
    ierr = MatSetValues(*mat,m,rows,1,&i,array,INSERT_VALUES);CHKERRQ(ierr);
    ierr = VecRestoreArray(in,&array);CHKERRQ(ierr);

  }
  ierr = PetscFree(rows);CHKERRQ(ierr);
  ierr = VecDestroy(&in);CHKERRQ(ierr);
  ierr = VecDestroy(&out);CHKERRQ(ierr);
  ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
  ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #23
0
void DescribeVec(Vec v) {
    int low,high,size,total;
    VecGetLocalSize(v, &size);
    VecGetSize(v, &total);
    VecGetOwnershipRange(v, &low, &high);
    log("Local size %d/%d\n",size,total);
    log("Ownership range %d - %d\n",low,high);
}
Пример #24
0
void Field_solver::get_vector_ownership_range_and_local_size_for_each_process(
    Vec *x, PetscInt *rstart, PetscInt *rend, PetscInt *nlocal )
{
    PetscErrorCode ierr;
    ierr = VecGetOwnershipRange( *x, rstart, rend ); CHKERRXX(ierr);
    ierr = VecGetLocalSize( *x, nlocal ); CHKERRXX(ierr);
    return;
}
Пример #25
0
/*@
  VecWhichGreaterThan - Creates an index set containing the indices
  where the vectors Vec1 > Vec2

  Collective on S

  Input Parameters:
. Vec1, Vec2 - the two vectors to compare

  OutputParameter:
. S - The index set containing the indices i where vec1[i] > vec2[i]

  Level: advanced
@*/
PetscErrorCode VecWhichGreaterThan(Vec Vec1, Vec Vec2, IS * S)
{
  PetscErrorCode ierr;
  PetscInt       n,low,high,low2,high2,n_gt=0,i;
  PetscInt       *gt=NULL;
  PetscScalar    *v1,*v2;
  MPI_Comm       comm;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(Vec1,VEC_CLASSID,1);
  PetscValidHeaderSpecific(Vec2,VEC_CLASSID,2);
  PetscCheckSameComm(Vec1,1,Vec2,2);

  ierr = VecGetOwnershipRange(Vec1, &low, &high);CHKERRQ(ierr);
  ierr = VecGetOwnershipRange(Vec2, &low2, &high2);CHKERRQ(ierr);
  if ( low != low2 || high != high2 ) SETERRQ(PETSC_COMM_SELF,1,"Vectors must be have identical layout");

  ierr = VecGetLocalSize(Vec1,&n);CHKERRQ(ierr);

  if (n>0){

    if (Vec1 == Vec2){
      ierr = VecGetArray(Vec1,&v1);CHKERRQ(ierr);
      v2=v1;
    } else {
      ierr = VecGetArray(Vec1,&v1);CHKERRQ(ierr);
      ierr = VecGetArray(Vec2,&v2);CHKERRQ(ierr);
    }

    ierr = PetscMalloc1(n, &gt );CHKERRQ(ierr);

    for (i=0; i<n; i++){
      if (PetscRealPart(v1[i]) > PetscRealPart(v2[i])) {gt[n_gt]=low+i; n_gt++;}
    }

    if (Vec1 == Vec2){
      ierr = VecRestoreArray(Vec1,&v1);CHKERRQ(ierr);
    } else {
      ierr = VecRestoreArray(Vec1,&v1);CHKERRQ(ierr);
      ierr = VecRestoreArray(Vec2,&v2);CHKERRQ(ierr);
    }
  }
  ierr = PetscObjectGetComm((PetscObject)Vec1,&comm);CHKERRQ(ierr);
  ierr = ISCreateGeneral(comm,n_gt,gt,PETSC_OWN_POINTER,S);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #26
0
Файл: ex1.c Проект: Kun-Qu/petsc
/* 
   FormFunction1 - Evaluates nonlinear function, F(x).

   Input Parameters:
.  snes - the SNES context
.  x    - input vector
.  ctx  - optional user-defined context

   Output Parameter:
.  f - function vector
 */
PetscErrorCode FormFunction1(SNES snes,Vec x,Vec f,void *ctx)
{
  PetscErrorCode    ierr;
  const PetscScalar *xx;
  PetscScalar       *ff;
  AppCtx            *user = (AppCtx*)ctx;
  Vec               xloc=user->xloc,floc=user->rloc;
  VecScatter        scatter=user->scatter;
  MPI_Comm          comm;
  PetscMPIInt       size,rank;
  PetscInt          rstart,rend;

  ierr = PetscObjectGetComm((PetscObject)snes,&comm);CHKERRQ(ierr);
  ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
  if (size > 1){
    /* 
       This is a ridiculous case for testing intermidiate steps from sequential
           code development to parallel implementation.
       (1) scatter x into a sequetial vector;
       (2) each process evaluates all values of floc; 
       (3) scatter floc back to the parallel f.
     */
    ierr = VecScatterBegin(scatter,x,xloc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
    ierr = VecScatterEnd(scatter,x,xloc,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);

    ierr = VecGetOwnershipRange(f,&rstart,&rend);CHKERRQ(ierr);
    ierr = VecGetArrayRead(xloc,&xx);CHKERRQ(ierr);
    ierr = VecGetArray(floc,&ff);CHKERRQ(ierr);
    ff[0] = xx[0]*xx[0] + xx[0]*xx[1] - 3.0;
    ff[1] = xx[0]*xx[1] + xx[1]*xx[1] - 6.0;
    ierr = VecRestoreArray(floc,&ff);CHKERRQ(ierr);
    ierr = VecRestoreArrayRead(xloc,&xx);CHKERRQ(ierr);

    ierr = VecScatterBegin(scatter,floc,f,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
    ierr = VecScatterEnd(scatter,floc,f,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
  } else {
    /*
     Get pointers to vector data.
       - For default PETSc vectors, VecGetArray() returns a pointer to
         the data array.  Otherwise, the routine is implementation dependent.
       - You MUST call VecRestoreArray() when you no longer need access to
         the array.
    */
    ierr = VecGetArrayRead(x,&xx);CHKERRQ(ierr);
    ierr = VecGetArray(f,&ff);CHKERRQ(ierr);

    /* Compute function */
    ff[0] = xx[0]*xx[0] + xx[0]*xx[1] - 3.0;
    ff[1] = xx[0]*xx[1] + xx[1]*xx[1] - 6.0;

    /* Restore vectors */
    ierr = VecRestoreArrayRead(x,&xx);CHKERRQ(ierr);
    ierr = VecRestoreArray(f,&ff);CHKERRQ(ierr); 
  }
  return 0;
}
Пример #27
0
void SparseVector::allocate(int m)
{
    VecCreate(PETSC_COMM_WORLD, &vec_);
    VecSetSizes(vec_, PETSC_DECIDE, m);
    VecSetType(vec_, VECSTANDARD);

    // Get MPI ranges
    VecGetOwnershipRange(vec_, &iLower_, &iUpper_);
}
Пример #28
0
/*
  MatGetDiagonal_MFFD - Gets the diagonal for a matrix free matrix

        y ~= (F(u + ha) - F(u))/h,
  where F = nonlinear function, as set by SNESSetFunction()
        u = current iterate
        h = difference interval
*/
PetscErrorCode MatGetDiagonal_MFFD(Mat mat,Vec a)
{
    MatMFFD        ctx = (MatMFFD)mat->data;
    PetscScalar    h,*aa,*ww,v;
    PetscReal      epsilon = PETSC_SQRT_MACHINE_EPSILON,umin = 100.0*PETSC_SQRT_MACHINE_EPSILON;
    Vec            w,U;
    PetscErrorCode ierr;
    PetscInt       i,rstart,rend;

    PetscFunctionBegin;
    if (!ctx->funci) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER,"Requires calling MatMFFDSetFunctioni() first");

    w    = ctx->w;
    U    = ctx->current_u;
    ierr = (*ctx->func)(ctx->funcctx,U,a);
    CHKERRQ(ierr);
    ierr = (*ctx->funcisetbase)(ctx->funcctx,U);
    CHKERRQ(ierr);
    ierr = VecCopy(U,w);
    CHKERRQ(ierr);

    ierr = VecGetOwnershipRange(a,&rstart,&rend);
    CHKERRQ(ierr);
    ierr = VecGetArray(a,&aa);
    CHKERRQ(ierr);
    for (i=rstart; i<rend; i++) {
        ierr = VecGetArray(w,&ww);
        CHKERRQ(ierr);
        h    = ww[i-rstart];
        if (h == 0.0) h = 1.0;
        if (PetscAbsScalar(h) < umin && PetscRealPart(h) >= 0.0)     h = umin;
        else if (PetscRealPart(h) < 0.0 && PetscAbsScalar(h) < umin) h = -umin;
        h *= epsilon;

        ww[i-rstart] += h;
        ierr          = VecRestoreArray(w,&ww);
        CHKERRQ(ierr);
        ierr          = (*ctx->funci)(ctx->funcctx,i,w,&v);
        CHKERRQ(ierr);
        aa[i-rstart]  = (v - aa[i-rstart])/h;

        /* possibly shift and scale result */
        if ((ctx->vshift != 0.0) || (ctx->vscale != 1.0)) {
            aa[i - rstart] = ctx->vshift + ctx->vscale*aa[i-rstart];
        }

        ierr          = VecGetArray(w,&ww);
        CHKERRQ(ierr);
        ww[i-rstart] -= h;
        ierr          = VecRestoreArray(w,&ww);
        CHKERRQ(ierr);
    }
    ierr = VecRestoreArray(a,&aa);
    CHKERRQ(ierr);
    PetscFunctionReturn(0);
}
Пример #29
0
/*@
   ISComplementVec - Creates the complement of the index set relative to a layout defined by a Vec

   Collective on IS

   Input Parameter:
+  S -  a PETSc IS
-  V - the reference vector space

   Output Parameter:
.  T -  the complement of S

.seealso ISCreateGeneral()

   Level: advanced
@*/
PetscErrorCode ISComplementVec(IS S, Vec V, IS *T)
{
  PetscErrorCode ierr;
  PetscInt       start, end;

  PetscFunctionBegin;
  ierr = VecGetOwnershipRange(V,&start,&end);CHKERRQ(ierr);
  ierr = ISComplement(S,start,end,T);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Пример #30
0
/*@
   VecMPISetGhost - Sets the ghost points for an MPI ghost vector

   Collective on Vec

   Input Parameters:
+  vv - the MPI vector
.  nghost - number of local ghost points
-  ghosts - global indices of ghost points, these do not need to be in increasing order (sorted)


   Notes:
   Use VecGhostGetLocalForm() to access the local, ghosted representation
   of the vector.

   This also automatically sets the ISLocalToGlobalMapping() for this vector.

   You must call this AFTER you have set the type of the vector (with VecSetType()) and the size (with VecSetSizes()).

   Level: advanced

   Concepts: vectors^ghosted

.seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
          VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
          VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
          VecCreateGhostBlock(), VecCreateGhostBlockWithArray()

@*/
PetscErrorCode  VecMPISetGhost(Vec vv,PetscInt nghost,const PetscInt ghosts[])
{
  PetscErrorCode ierr;
  PetscBool      flg;

  PetscFunctionBegin;
  ierr = PetscObjectTypeCompare((PetscObject)vv,VECMPI,&flg);CHKERRQ(ierr);
  /* if already fully existant VECMPI then basically destroy it and rebuild with ghosting */
  if (flg) {
    PetscInt               n,N;
    Vec_MPI                *w;
    PetscScalar            *larray;
    IS                     from,to;
    ISLocalToGlobalMapping ltog;
    PetscInt               rstart,i,*indices;
    MPI_Comm               comm = ((PetscObject)vv)->comm;

    n = vv->map->n;
    N = vv->map->N;
    ierr = (*vv->ops->destroy)(vv);CHKERRQ(ierr);
    ierr = VecSetSizes(vv,n,N);CHKERRQ(ierr);
    ierr = VecCreate_MPI_Private(vv,PETSC_TRUE,nghost,PETSC_NULL);CHKERRQ(ierr);
    w    = (Vec_MPI *)(vv)->data;
    /* Create local representation */
    ierr = VecGetArray(vv,&larray);CHKERRQ(ierr);
    ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,n+nghost,larray,&w->localrep);CHKERRQ(ierr);
    ierr = PetscLogObjectParent(vv,w->localrep);CHKERRQ(ierr);
    ierr = VecRestoreArray(vv,&larray);CHKERRQ(ierr);

    /*
     Create scatter context for scattering (updating) ghost values
     */
    ierr = ISCreateGeneral(comm,nghost,ghosts,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
    ierr = ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);CHKERRQ(ierr);
    ierr = VecScatterCreate(vv,from,w->localrep,to,&w->localupdate);CHKERRQ(ierr);
    ierr = PetscLogObjectParent(vv,w->localupdate);CHKERRQ(ierr);
    ierr = ISDestroy(&to);CHKERRQ(ierr);
    ierr = ISDestroy(&from);CHKERRQ(ierr);

    /* set local to global mapping for ghosted vector */
    ierr = PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);CHKERRQ(ierr);
    ierr = VecGetOwnershipRange(vv,&rstart,PETSC_NULL);CHKERRQ(ierr);
    for (i=0; i<n; i++) {
      indices[i] = rstart + i;
    }
    for (i=0; i<nghost; i++) {
      indices[n+i] = ghosts[i];
    }
    ierr = ISLocalToGlobalMappingCreate(comm,n+nghost,indices,PETSC_OWN_POINTER,&ltog);CHKERRQ(ierr);
    ierr = VecSetLocalToGlobalMapping(vv,ltog);CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingDestroy(&ltog);CHKERRQ(ierr);
  } else if (vv->ops->create == VecCreate_MPI) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set local or global size before setting ghosting");
  else if (!((PetscObject)vv)->type_name) SETERRQ(((PetscObject)vv)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must set type to VECMPI before ghosting");
  PetscFunctionReturn(0);
}