Exemplo n.º 1
0
/*
      DMDAGetFaceInterpolation - Gets the interpolation for a face based coarse space

*/
PetscErrorCode DMDAGetFaceInterpolation(DM da,PC_Exotic *exotic,Mat Aglobal,MatReuse reuse,Mat *P)
{
    PetscErrorCode         ierr;
    PetscInt               dim,i,j,k,m,n,p,dof,Nint,Nface,Nwire,Nsurf,*Iint,*Isurf,cint = 0,csurf = 0,istart,jstart,kstart,*II,N,c = 0;
    PetscInt               mwidth,nwidth,pwidth,cnt,mp,np,pp,Ntotal,gl[6],*globals,Ng,*IIint,*IIsurf,Nt;
    Mat                    Xint, Xsurf,Xint_tmp;
    IS                     isint,issurf,is,row,col;
    ISLocalToGlobalMapping ltg;
    MPI_Comm               comm;
    Mat                    A,Aii,Ais,Asi,*Aholder,iAii;
    MatFactorInfo          info;
    PetscScalar            *xsurf,*xint;
#if defined(PETSC_USE_DEBUG_foo)
    PetscScalar            tmp;
#endif
    PetscTable             ht;

    PetscFunctionBegin;
    ierr = DMDAGetInfo(da,&dim,0,0,0,&mp,&np,&pp,&dof,0,0,0,0,0);
    CHKERRQ(ierr);
    if (dof != 1) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only for single field problems");
    if (dim != 3) SETERRQ(PetscObjectComm((PetscObject)da),PETSC_ERR_SUP,"Only coded for 3d problems");
    ierr   = DMDAGetCorners(da,0,0,0,&m,&n,&p);
    CHKERRQ(ierr);
    ierr   = DMDAGetGhostCorners(da,&istart,&jstart,&kstart,&mwidth,&nwidth,&pwidth);
    CHKERRQ(ierr);
    istart = istart ? -1 : 0;
    jstart = jstart ? -1 : 0;
    kstart = kstart ? -1 : 0;

    /*
      the columns of P are the interpolation of each coarse grid point (one for each vertex and edge)
      to all the local degrees of freedom (this includes the vertices, edges and faces).

      Xint are the subset of the interpolation into the interior

      Xface are the interpolation onto faces but not into the interior

      Xsurf are the interpolation onto the vertices and edges (the surfbasket)
                                        Xint
      Symbolically one could write P = (Xface) after interchanging the rows to match the natural ordering on the domain
                                        Xsurf
    */
    N     = (m - istart)*(n - jstart)*(p - kstart);
    Nint  = (m-2-istart)*(n-2-jstart)*(p-2-kstart);
    Nface = 2*((m-2-istart)*(n-2-jstart) + (m-2-istart)*(p-2-kstart) + (n-2-jstart)*(p-2-kstart));
    Nwire = 4*((m-2-istart) + (n-2-jstart) + (p-2-kstart)) + 8;
    Nsurf = Nface + Nwire;
    ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nint,6,NULL,&Xint);
    CHKERRQ(ierr);
    ierr  = MatCreateSeqDense(MPI_COMM_SELF,Nsurf,6,NULL,&Xsurf);
    CHKERRQ(ierr);
    ierr  = MatDenseGetArray(Xsurf,&xsurf);
    CHKERRQ(ierr);

    /*
       Require that all 12 edges and 6 faces have at least one grid point. Otherwise some of the columns of
       Xsurf will be all zero (thus making the coarse matrix singular).
    */
    if (m-istart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in X direction must be at least 3");
    if (n-jstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Y direction must be at least 3");
    if (p-kstart < 3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Number of grid points per process in Z direction must be at least 3");

    cnt = 0;
    for (j=1; j<n-1-jstart; j++) {
        for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 0*Nsurf] = 1;
    }

    for (k=1; k<p-1-kstart; k++) {
        for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 1*Nsurf] = 1;
        for (j=1; j<n-1-jstart; j++) {
            xsurf[cnt++ + 2*Nsurf] = 1;
            /* these are the interior nodes */
            xsurf[cnt++ + 3*Nsurf] = 1;
        }
        for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 4*Nsurf] = 1;
    }
    for (j=1; j<n-1-jstart; j++) {
        for (i=1; i<m-istart-1; i++) xsurf[cnt++ + 5*Nsurf] = 1;
    }

#if defined(PETSC_USE_DEBUG_foo)
    for (i=0; i<Nsurf; i++) {
        tmp = 0.0;
        for (j=0; j<6; j++) tmp += xsurf[i+j*Nsurf];

        if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xsurf interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
    }
#endif
    ierr = MatDenseRestoreArray(Xsurf,&xsurf);
    CHKERRQ(ierr);
    /* ierr = MatView(Xsurf,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);*/


    /*
         I are the indices for all the needed vertices (in global numbering)
         Iint are the indices for the interior values, I surf for the surface values
              (This is just for the part of the global matrix obtained with MatGetSubMatrix(), it
               is NOT the local DMDA ordering.)
         IIint and IIsurf are the same as the Iint, Isurf except they are in the global numbering
    */
#define Endpoint(a,start,b) (a == 0 || a == (b-1-start))
    ierr = PetscMalloc3(N,&II,Nint,&Iint,Nsurf,&Isurf);
    CHKERRQ(ierr);
    ierr = PetscMalloc2(Nint,&IIint,Nsurf,&IIsurf);
    CHKERRQ(ierr);
    for (k=0; k<p-kstart; k++) {
        for (j=0; j<n-jstart; j++) {
            for (i=0; i<m-istart; i++) {
                II[c++] = i + j*mwidth + k*mwidth*nwidth;

                if (!Endpoint(i,istart,m) && !Endpoint(j,jstart,n) && !Endpoint(k,kstart,p)) {
                    IIint[cint]  = i + j*mwidth + k*mwidth*nwidth;
                    Iint[cint++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
                } else {
                    IIsurf[csurf]  = i + j*mwidth + k*mwidth*nwidth;
                    Isurf[csurf++] = i + j*(m-istart) + k*(m-istart)*(n-jstart);
                }
            }
        }
    }
    if (c != N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"c != N");
    if (cint != Nint) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"cint != Nint");
    if (csurf != Nsurf) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"csurf != Nsurf");
    ierr = DMGetLocalToGlobalMapping(da,&ltg);
    CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingApply(ltg,N,II,II);
    CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingApply(ltg,Nint,IIint,IIint);
    CHKERRQ(ierr);
    ierr = ISLocalToGlobalMappingApply(ltg,Nsurf,IIsurf,IIsurf);
    CHKERRQ(ierr);
    ierr = PetscObjectGetComm((PetscObject)da,&comm);
    CHKERRQ(ierr);
    ierr = ISCreateGeneral(comm,N,II,PETSC_COPY_VALUES,&is);
    CHKERRQ(ierr);
    ierr = ISCreateGeneral(PETSC_COMM_SELF,Nint,Iint,PETSC_COPY_VALUES,&isint);
    CHKERRQ(ierr);
    ierr = ISCreateGeneral(PETSC_COMM_SELF,Nsurf,Isurf,PETSC_COPY_VALUES,&issurf);
    CHKERRQ(ierr);
    ierr = PetscFree3(II,Iint,Isurf);
    CHKERRQ(ierr);

    ierr = ISSort(is);
    CHKERRQ(ierr);
    ierr = MatGetSubMatrices(Aglobal,1,&is,&is,MAT_INITIAL_MATRIX,&Aholder);
    CHKERRQ(ierr);
    A    = *Aholder;
    ierr = PetscFree(Aholder);
    CHKERRQ(ierr);

    ierr = MatGetSubMatrix(A,isint,isint,MAT_INITIAL_MATRIX,&Aii);
    CHKERRQ(ierr);
    ierr = MatGetSubMatrix(A,isint,issurf,MAT_INITIAL_MATRIX,&Ais);
    CHKERRQ(ierr);
    ierr = MatGetSubMatrix(A,issurf,isint,MAT_INITIAL_MATRIX,&Asi);
    CHKERRQ(ierr);

    /*
       Solve for the interpolation onto the interior Xint
    */
    ierr = MatMatMult(Ais,Xsurf,MAT_INITIAL_MATRIX,PETSC_DETERMINE,&Xint_tmp);
    CHKERRQ(ierr);
    ierr = MatScale(Xint_tmp,-1.0);
    CHKERRQ(ierr);

    if (exotic->directSolve) {
        ierr = MatGetFactor(Aii,MATSOLVERPETSC,MAT_FACTOR_LU,&iAii);
        CHKERRQ(ierr);
        ierr = MatFactorInfoInitialize(&info);
        CHKERRQ(ierr);
        ierr = MatGetOrdering(Aii,MATORDERINGND,&row,&col);
        CHKERRQ(ierr);
        ierr = MatLUFactorSymbolic(iAii,Aii,row,col,&info);
        CHKERRQ(ierr);
        ierr = ISDestroy(&row);
        CHKERRQ(ierr);
        ierr = ISDestroy(&col);
        CHKERRQ(ierr);
        ierr = MatLUFactorNumeric(iAii,Aii,&info);
        CHKERRQ(ierr);
        ierr = MatMatSolve(iAii,Xint_tmp,Xint);
        CHKERRQ(ierr);
        ierr = MatDestroy(&iAii);
        CHKERRQ(ierr);
    } else {
        Vec         b,x;
        PetscScalar *xint_tmp;

        ierr = MatDenseGetArray(Xint,&xint);
        CHKERRQ(ierr);
        ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&x);
        CHKERRQ(ierr);
        ierr = MatDenseGetArray(Xint_tmp,&xint_tmp);
        CHKERRQ(ierr);
        ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,1,Nint,0,&b);
        CHKERRQ(ierr);
        ierr = KSPSetOperators(exotic->ksp,Aii,Aii);
        CHKERRQ(ierr);
        for (i=0; i<6; i++) {
            ierr = VecPlaceArray(x,xint+i*Nint);
            CHKERRQ(ierr);
            ierr = VecPlaceArray(b,xint_tmp+i*Nint);
            CHKERRQ(ierr);
            ierr = KSPSolve(exotic->ksp,b,x);
            CHKERRQ(ierr);
            ierr = VecResetArray(x);
            CHKERRQ(ierr);
            ierr = VecResetArray(b);
            CHKERRQ(ierr);
        }
        ierr = MatDenseRestoreArray(Xint,&xint);
        CHKERRQ(ierr);
        ierr = MatDenseRestoreArray(Xint_tmp,&xint_tmp);
        CHKERRQ(ierr);
        ierr = VecDestroy(&x);
        CHKERRQ(ierr);
        ierr = VecDestroy(&b);
        CHKERRQ(ierr);
    }
    ierr = MatDestroy(&Xint_tmp);
    CHKERRQ(ierr);

#if defined(PETSC_USE_DEBUG_foo)
    ierr = MatDenseGetArray(Xint,&xint);
    CHKERRQ(ierr);
    for (i=0; i<Nint; i++) {
        tmp = 0.0;
        for (j=0; j<6; j++) tmp += xint[i+j*Nint];

        if (PetscAbsScalar(tmp-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong Xint interpolation at i %D value %g",i,(double)PetscAbsScalar(tmp));
    }
    ierr = MatDenseRestoreArray(Xint,&xint);
    CHKERRQ(ierr);
    /* ierr =MatView(Xint,PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr); */
#endif


    /*         total faces    */
    Ntotal =  mp*np*(pp+1) + mp*pp*(np+1) + np*pp*(mp+1);

    /*
        For each vertex, edge, face on process (in the same orderings as used above) determine its local number including ghost points
    */
    cnt = 0;
    {
        gl[cnt++] = mwidth+1;
    }
    {
        {
            gl[cnt++] = mwidth*nwidth+1;
        }
        {
            gl[cnt++] = mwidth*nwidth + mwidth; /* these are the interior nodes */ gl[cnt++] = mwidth*nwidth + mwidth+m-istart-1;
        }
        {
            gl[cnt++] = mwidth*nwidth+mwidth*(n-jstart-1)+1;
        }
    }
    {
        gl[cnt++] = mwidth*nwidth*(p-kstart-1) + mwidth+1;
    }

    /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
    /* convert that to global numbering and get them on all processes */
    ierr = ISLocalToGlobalMappingApply(ltg,6,gl,gl);
    CHKERRQ(ierr);
    /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */
    ierr = PetscMalloc1(6*mp*np*pp,&globals);
    CHKERRQ(ierr);
    ierr = MPI_Allgather(gl,6,MPIU_INT,globals,6,MPIU_INT,PetscObjectComm((PetscObject)da));
    CHKERRQ(ierr);

    /* Number the coarse grid points from 0 to Ntotal */
    ierr = MatGetSize(Aglobal,&Nt,NULL);
    CHKERRQ(ierr);
    ierr = PetscTableCreate(Ntotal/3,Nt+1,&ht);
    CHKERRQ(ierr);
    for (i=0; i<6*mp*np*pp; i++) {
        ierr = PetscTableAddCount(ht,globals[i]+1);
        CHKERRQ(ierr);
    }
    ierr = PetscTableGetCount(ht,&cnt);
    CHKERRQ(ierr);
    if (cnt != Ntotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Hash table size %D not equal to total number coarse grid points %D",cnt,Ntotal);
    ierr = PetscFree(globals);
    CHKERRQ(ierr);
    for (i=0; i<6; i++) {
        ierr = PetscTableFind(ht,gl[i]+1,&gl[i]);
        CHKERRQ(ierr);
        gl[i]--;
    }
    ierr = PetscTableDestroy(&ht);
    CHKERRQ(ierr);
    /* PetscIntView(6,gl,PETSC_VIEWER_STDOUT_WORLD); */

    /* construct global interpolation matrix */
    ierr = MatGetLocalSize(Aglobal,&Ng,NULL);
    CHKERRQ(ierr);
    if (reuse == MAT_INITIAL_MATRIX) {
        ierr = MatCreateAIJ(PetscObjectComm((PetscObject)da),Ng,PETSC_DECIDE,PETSC_DECIDE,Ntotal,Nint+Nsurf,NULL,Nint,NULL,P);
        CHKERRQ(ierr);
    } else {
        ierr = MatZeroEntries(*P);
        CHKERRQ(ierr);
    }
    ierr = MatSetOption(*P,MAT_ROW_ORIENTED,PETSC_FALSE);
    CHKERRQ(ierr);
    ierr = MatDenseGetArray(Xint,&xint);
    CHKERRQ(ierr);
    ierr = MatSetValues(*P,Nint,IIint,6,gl,xint,INSERT_VALUES);
    CHKERRQ(ierr);
    ierr = MatDenseRestoreArray(Xint,&xint);
    CHKERRQ(ierr);
    ierr = MatDenseGetArray(Xsurf,&xsurf);
    CHKERRQ(ierr);
    ierr = MatSetValues(*P,Nsurf,IIsurf,6,gl,xsurf,INSERT_VALUES);
    CHKERRQ(ierr);
    ierr = MatDenseRestoreArray(Xsurf,&xsurf);
    CHKERRQ(ierr);
    ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);
    CHKERRQ(ierr);
    ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);
    CHKERRQ(ierr);
    ierr = PetscFree2(IIint,IIsurf);
    CHKERRQ(ierr);


#if defined(PETSC_USE_DEBUG_foo)
    {
        Vec         x,y;
        PetscScalar *yy;
        ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),Ng,PETSC_DETERMINE,&y);
        CHKERRQ(ierr);
        ierr = VecCreateMPI(PetscObjectComm((PetscObject)da),PETSC_DETERMINE,Ntotal,&x);
        CHKERRQ(ierr);
        ierr = VecSet(x,1.0);
        CHKERRQ(ierr);
        ierr = MatMult(*P,x,y);
        CHKERRQ(ierr);
        ierr = VecGetArray(y,&yy);
        CHKERRQ(ierr);
        for (i=0; i<Ng; i++) {
            if (PetscAbsScalar(yy[i]-1.0) > 1.e-10) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Wrong p interpolation at i %D value %g",i,(double)PetscAbsScalar(yy[i]));
        }
        ierr = VecRestoreArray(y,&yy);
        CHKERRQ(ierr);
        ierr = VecDestroy(x);
        CHKERRQ(ierr);
        ierr = VecDestroy(y);
        CHKERRQ(ierr);
    }
#endif

    ierr = MatDestroy(&Aii);
    CHKERRQ(ierr);
    ierr = MatDestroy(&Ais);
    CHKERRQ(ierr);
    ierr = MatDestroy(&Asi);
    CHKERRQ(ierr);
    ierr = MatDestroy(&A);
    CHKERRQ(ierr);
    ierr = ISDestroy(&is);
    CHKERRQ(ierr);
    ierr = ISDestroy(&isint);
    CHKERRQ(ierr);
    ierr = ISDestroy(&issurf);
    CHKERRQ(ierr);
    ierr = MatDestroy(&Xint);
    CHKERRQ(ierr);
    ierr = MatDestroy(&Xsurf);
    CHKERRQ(ierr);
    PetscFunctionReturn(0);
}
Exemplo n.º 2
0
/*@C
   PetscSFSetGraph - Set a parallel star forest

   Collective

   Input Arguments:
+  sf - star forest
.  nroots - number of root vertices on the current process (these are possible targets for other process to attach leaves)
.  nleaves - number of leaf vertices on the current process, each of these references a root on any process
.  ilocal - locations of leaves in leafdata buffers, pass NULL for contiguous storage
.  localmode - copy mode for ilocal
.  iremote - remote locations of root vertices for each leaf on the current process
-  remotemode - copy mode for iremote

   Level: intermediate

.seealso: PetscSFCreate(), PetscSFView(), PetscSFGetGraph()
@*/
PetscErrorCode PetscSFSetGraph(PetscSF sf,PetscInt nroots,PetscInt nleaves,const PetscInt *ilocal,PetscCopyMode localmode,const PetscSFNode *iremote,PetscCopyMode remotemode)
{
  PetscErrorCode     ierr;
  PetscTable         table;
  PetscTablePosition pos;
  PetscMPIInt        size;
  PetscInt           i,*rcount,*ranks;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(sf,PETSCSF_CLASSID,1);
  ierr = PetscLogEventBegin(PETSCSF_SetGraph,sf,0,0,0);CHKERRQ(ierr);
  if (nleaves && ilocal) PetscValidIntPointer(ilocal,4);
  if (nleaves) PetscValidPointer(iremote,6);
  if (nroots < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"roots %D, cannot be negative",nroots);
  if (nleaves < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nleaves %D, cannot be negative",nleaves);
  ierr        = PetscSFReset(sf);CHKERRQ(ierr);
  sf->nroots  = nroots;
  sf->nleaves = nleaves;
  if (ilocal) {
    switch (localmode) {
    case PETSC_COPY_VALUES:
      ierr        = PetscMalloc1(nleaves,&sf->mine_alloc);CHKERRQ(ierr);
      sf->mine    = sf->mine_alloc;
      ierr        = PetscMemcpy(sf->mine,ilocal,nleaves*sizeof(*sf->mine));CHKERRQ(ierr);
      sf->minleaf = PETSC_MAX_INT;
      sf->maxleaf = PETSC_MIN_INT;
      for (i=0; i<nleaves; i++) {
        sf->minleaf = PetscMin(sf->minleaf,ilocal[i]);
        sf->maxleaf = PetscMax(sf->maxleaf,ilocal[i]);
      }
      break;
    case PETSC_OWN_POINTER:
      sf->mine_alloc = (PetscInt*)ilocal;
      sf->mine       = sf->mine_alloc;
      break;
    case PETSC_USE_POINTER:
      sf->mine = (PetscInt*)ilocal;
      break;
    default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown localmode");
    }
  }
  if (!ilocal || nleaves > 0) {
    sf->minleaf = 0;
    sf->maxleaf = nleaves - 1;
  }
  switch (remotemode) {
  case PETSC_COPY_VALUES:
    ierr       = PetscMalloc1(nleaves,&sf->remote_alloc);CHKERRQ(ierr);
    sf->remote = sf->remote_alloc;
    ierr       = PetscMemcpy(sf->remote,iremote,nleaves*sizeof(*sf->remote));CHKERRQ(ierr);
    break;
  case PETSC_OWN_POINTER:
    sf->remote_alloc = (PetscSFNode*)iremote;
    sf->remote       = sf->remote_alloc;
    break;
  case PETSC_USE_POINTER:
    sf->remote = (PetscSFNode*)iremote;
    break;
  default: SETERRQ(PetscObjectComm((PetscObject)sf),PETSC_ERR_ARG_OUTOFRANGE,"Unknown remotemode");
  }

  ierr = MPI_Comm_size(PetscObjectComm((PetscObject)sf),&size);CHKERRQ(ierr);
  ierr = PetscTableCreate(10,size,&table);CHKERRQ(ierr);
  for (i=0; i<nleaves; i++) {
    /* Log 1-based rank */
    ierr = PetscTableAdd(table,iremote[i].rank+1,1,ADD_VALUES);CHKERRQ(ierr);
  }
  ierr = PetscTableGetCount(table,&sf->nranks);CHKERRQ(ierr);
  ierr = PetscMalloc4(sf->nranks,&sf->ranks,sf->nranks+1,&sf->roffset,nleaves,&sf->rmine,nleaves,&sf->rremote);CHKERRQ(ierr);
  ierr = PetscMalloc2(sf->nranks,&rcount,sf->nranks,&ranks);CHKERRQ(ierr);
  ierr = PetscTableGetHeadPosition(table,&pos);CHKERRQ(ierr);
  for (i=0; i<sf->nranks; i++) {
    ierr = PetscTableGetNext(table,&pos,&ranks[i],&rcount[i]);CHKERRQ(ierr);
    ranks[i]--;             /* Convert back to 0-based */
  }
  ierr = PetscTableDestroy(&table);CHKERRQ(ierr);
  ierr = PetscSortIntWithArray(sf->nranks,ranks,rcount);CHKERRQ(ierr);
  sf->roffset[0] = 0;
  for (i=0; i<sf->nranks; i++) {
    ierr = PetscMPIIntCast(ranks[i],sf->ranks+i);CHKERRQ(ierr);
    sf->roffset[i+1] = sf->roffset[i] + rcount[i];
    rcount[i]        = 0;
  }
  for (i=0; i<nleaves; i++) {
    PetscInt lo,hi,irank;
    /* Search for index of iremote[i].rank in sf->ranks */
    lo = 0; hi = sf->nranks;
    while (hi - lo > 1) {
      PetscInt mid = lo + (hi - lo)/2;
      if (iremote[i].rank < sf->ranks[mid]) hi = mid;
      else                                  lo = mid;
    }
    if (hi - lo == 1 && iremote[i].rank == sf->ranks[lo]) irank = lo;
    else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Could not find rank %D in array",iremote[i].rank);
    sf->rmine[sf->roffset[irank] + rcount[irank]]   = ilocal ? ilocal[i] : i;
    sf->rremote[sf->roffset[irank] + rcount[irank]] = iremote[i].index;
    rcount[irank]++;
  }
  ierr = PetscFree2(rcount,ranks);CHKERRQ(ierr);
#if !defined(PETSC_USE_64BIT_INDICES)
  if (nroots == PETSC_DETERMINE) {
    /* Jed, if you have a better way to do this, put it in */
    PetscInt *numRankLeaves, *leafOff, *leafIndices, *numRankRoots, *rootOff, *rootIndices, maxRoots = 0;

    /* All to all to determine number of leaf indices from each (you can do this using Scan and asynch messages) */
    ierr = PetscMalloc4(size,&numRankLeaves,size+1,&leafOff,size,&numRankRoots,size+1,&rootOff);CHKERRQ(ierr);
    ierr = PetscMemzero(numRankLeaves, size * sizeof(PetscInt));CHKERRQ(ierr);
    for (i = 0; i < nleaves; ++i) ++numRankLeaves[iremote[i].rank];
    ierr = MPI_Alltoall(numRankLeaves, 1, MPIU_INT, numRankRoots, 1, MPIU_INT, PetscObjectComm((PetscObject)sf));CHKERRQ(ierr);
    /* Could set nroots to this maximum */
    for (i = 0; i < size; ++i) maxRoots += numRankRoots[i];

    /* Gather all indices */
    ierr = PetscMalloc2(nleaves,&leafIndices,maxRoots,&rootIndices);CHKERRQ(ierr);
    leafOff[0] = 0;
    for (i = 0; i < size; ++i) leafOff[i+1] = leafOff[i] + numRankLeaves[i];
    for (i = 0; i < nleaves; ++i) leafIndices[leafOff[iremote[i].rank]++] = iremote[i].index;
    leafOff[0] = 0;
    for (i = 0; i < size; ++i) leafOff[i+1] = leafOff[i] + numRankLeaves[i];
    rootOff[0] = 0;
    for (i = 0; i < size; ++i) rootOff[i+1] = rootOff[i] + numRankRoots[i];
    ierr = MPI_Alltoallv(leafIndices, numRankLeaves, leafOff, MPIU_INT, rootIndices, numRankRoots, rootOff, MPIU_INT, PetscObjectComm((PetscObject)sf));CHKERRQ(ierr);
    /* Sort and reduce */
    ierr       = PetscSortRemoveDupsInt(&maxRoots, rootIndices);CHKERRQ(ierr);
    ierr       = PetscFree2(leafIndices,rootIndices);CHKERRQ(ierr);
    ierr       = PetscFree4(numRankLeaves,leafOff,numRankRoots,rootOff);CHKERRQ(ierr);
    sf->nroots = maxRoots;
  }
#endif

  sf->graphset = PETSC_TRUE;
  ierr = PetscLogEventEnd(PETSCSF_SetGraph,sf,0,0,0);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}