Exemple #1
0
static PetscErrorCode DMDAGetElements_2D(DM dm,PetscInt *nel,PetscInt *nen,const PetscInt *e[])
{
  PetscErrorCode ierr;
  DM_DA          *da = (DM_DA*)dm->data;
  PetscInt       i,xs,xe,Xs,Xe;
  PetscInt       j,ys,ye,Ys,Ye;
  PetscInt       cnt=0, cell[4], ns=2, nn=3;
  PetscInt       c, split[] = {0,1,3,
                               2,3,1};
  PetscFunctionBegin;
  if (!da->e) {
    if (da->elementtype == DMDA_ELEMENT_P1) {ns=2; nn=3;}
    if (da->elementtype == DMDA_ELEMENT_Q1) {ns=1; nn=4;}
    ierr = DMDAGetCorners(dm,&xs,&ys,0,&xe,&ye,0);CHKERRQ(ierr);
    ierr = DMDAGetGhostCorners(dm,&Xs,&Ys,0,&Xe,&Ye,0);CHKERRQ(ierr);
    xe += xs; Xe += Xs; if (xs != Xs) xs -= 1;
    ye += ys; Ye += Ys; if (ys != Ys) ys -= 1;
    da->ne = ns*(xe - xs - 1)*(ye - ys - 1);
    ierr = PetscMalloc((1 + nn*da->ne)*sizeof(PetscInt),&da->e);CHKERRQ(ierr);
    for (j=ys; j<ye-1; j++) {
      for (i=xs; i<xe-1; i++) {
        cell[0] = (i-Xs  ) + (j-Ys  )*(Xe-Xs);
        cell[1] = (i-Xs+1) + (j-Ys  )*(Xe-Xs);
        cell[2] = (i-Xs+1) + (j-Ys+1)*(Xe-Xs);
        cell[3] = (i-Xs  ) + (j-Ys+1)*(Xe-Xs);
        if (da->elementtype == DMDA_ELEMENT_P1) {
          for (c=0; c<ns*nn; c++)
            da->e[cnt++] = cell[split[c]];
        }
        if (da->elementtype == DMDA_ELEMENT_Q1) {
          for (c=0; c<ns*nn; c++)
            da->e[cnt++] = cell[c];
        }
      }
    }
  }
  *nel = da->ne;
  *nen = nn;
  *e   = da->e;
  PetscFunctionReturn(0);
}
Exemple #2
0
PetscErrorCode InitializeVectors( UserContext* uc)
{
  PetscErrorCode ierr;
  
  
  PetscFunctionBegin;

  ierr = VecCreate(PETSC_COMM_WORLD, &uc->b); CHKERRQ(ierr);
  ierr = VecSetSizes(uc->b, uc->numNodes, uc->numNodes); CHKERRQ(ierr);
  ierr = VecSetType(uc->b, VECSEQ); CHKERRQ(ierr);
  
  ierr = VecDuplicate(uc->b,&uc->p);CHKERRQ(ierr);
  ierr = VecDuplicate(uc->b,&uc->u);CHKERRQ(ierr);
  ierr = VecDuplicate(uc->b,&uc->v);CHKERRQ(ierr);
  ierr = VecDuplicate(uc->b,&uc->px);CHKERRQ(ierr);
  ierr = VecDuplicate(uc->b,&uc->py);CHKERRQ(ierr);
  ierr = VecDuplicate(uc->b,&uc->c);CHKERRQ(ierr);
  ierr = PetscMalloc(uc->n * sizeof(PetscReal), &uc->imageResult ); CHKERRQ(ierr);
    
  PetscFunctionReturn(0);
}
Exemple #3
0
void insertnode(LIST *ilist, PetscInt Node)
{
  node *_new;
  node *current;
  current = ilist->head;

  PetscTruth Exist = PETSC_FALSE;
  while(current) {
    if (Node == current->Node) {
      Exist = PETSC_TRUE;
    }
    if (Exist) break;
    current = current->next;
  }
  if (!Exist) {
    PetscMalloc(sizeof(node), &_new);
    _new->next = ilist->head;
    _new->Node = Node;
    ilist->head = _new;
  }
}
Exemple #4
0
/*@C
   SNESMonitorSetRatio - Sets SNES to use a monitor that prints the
   ratio of the function norm at each iteration.

   Collective on SNES

   Input Parameters:
+   snes - the SNES context
-   viewer - ASCII viewer to print output

   Level: intermediate

.keywords: SNES, nonlinear, monitor, norm

.seealso: SNESMonitorSet(), SNESMonitorSolution(), SNESMonitorDefault()
@*/
PetscErrorCode  SNESMonitorSetRatio(SNES snes,PetscViewer viewer)
{
  PetscErrorCode          ierr;
  SNESMonitorRatioContext *ctx;
  PetscReal               *history;

  PetscFunctionBegin;
  if (!viewer) {
    ierr = PetscViewerASCIIOpen(((PetscObject)snes)->comm,"stdout",&viewer);CHKERRQ(ierr);
    ierr = PetscObjectReference((PetscObject)viewer);CHKERRQ(ierr);
  }
  ierr = PetscNewLog(snes,SNESMonitorRatioContext,&ctx);CHKERRQ(ierr);
  ierr = SNESGetConvergenceHistory(snes,&history,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
  if (!history) {
    ierr = PetscMalloc(100*sizeof(PetscReal),&ctx->history);CHKERRQ(ierr);
    ierr = SNESSetConvergenceHistory(snes,ctx->history,0,100,PETSC_TRUE);CHKERRQ(ierr);
  }
  ctx->viewer = viewer;
  ierr = SNESMonitorSet(snes,SNESMonitorRatio,ctx,SNESMonitorRatioDestroy);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemple #5
0
static PetscErrorCode VecDuplicate_Nest(Vec x,Vec *y)
{
  Vec_Nest       *bx = (Vec_Nest*)x->data;
  Vec            Y;
  Vec            *sub;
  PetscInt       i;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = PetscMalloc(sizeof(Vec)*bx->nb,&sub);CHKERRQ(ierr);
  for (i=0; i<bx->nb; i++) {
    ierr = VecDuplicate(bx->v[i],&sub[i]);CHKERRQ(ierr);
  }
  ierr = VecCreateNest(PetscObjectComm((PetscObject)x),bx->nb,bx->is,sub,&Y);CHKERRQ(ierr);
  for (i=0; i<bx->nb; i++) {
    ierr = VecDestroy(&sub[i]);CHKERRQ(ierr);
  }
  ierr = PetscFree(sub);CHKERRQ(ierr);
  *y   = Y;
  PetscFunctionReturn(0);
}
Exemple #6
0
PetscErrorCode PetscCDGetMIS(PetscCoarsenData *ail, IS *a_mis)
{
  PetscErrorCode ierr;
  PetscCDIntNd   *n;
  PetscInt       ii,kk;
  PetscInt       *permute;

  PetscFunctionBegin;
  for (ii=kk=0;ii<ail->size;ii++){
    n = ail->array[ii];
    if (n) kk++;
  }
  ierr = PetscMalloc(kk*sizeof(PetscInt), &permute);CHKERRQ(ierr);
  for (ii=kk=0;ii<ail->size;ii++){
    n = ail->array[ii];
    if (n) permute[kk++] = ii;
  }
  ierr = ISCreateGeneral(PETSC_COMM_SELF, kk, permute, PETSC_OWN_POINTER, a_mis);CHKERRQ(ierr);

  PetscFunctionReturn(0);
}
Exemple #7
0
PetscErrorCode DMLibMeshGetVariables(DM dm, PetscInt *n, char*** varnames)
{
  PetscErrorCode ierr;
  PetscFunctionBegin;
  PetscValidHeaderSpecific(dm,DM_CLASSID,1);
  PetscBool islibmesh;
  PetscInt i;
  ierr = PetscObjectTypeCompare((PetscObject)dm, DMLIBMESH,&islibmesh);
  if(!islibmesh) SETERRQ2(((PetscObject)dm)->comm, PETSC_ERR_ARG_WRONG, "Got DM oftype %s, not of type %s", ((PetscObject)dm)->type_name, DMLIBMESH);
  DM_libMesh *dlm = (DM_libMesh *)(dm->data);
  PetscValidPointer(n,2);
  *n = dlm->varids->size();
  if(!varnames) PetscFunctionReturn(0);
  ierr = PetscMalloc(*n*sizeof(char*), varnames); CHKERRQ(ierr);
  i = 0;
  for(std::map<std::string, unsigned int>::const_iterator it = dlm->varids->begin(); it != dlm->varids->end(); ++it){
    ierr = PetscStrallocpy(it->first.c_str(), *varnames+i); CHKERRQ(ierr);
    ++i;
  }
  PetscFunctionReturn(0);
}
Exemple #8
0
static PetscErrorCode DMCreateMatrix_Composite_Nest(DM dm,MatType mtype,Mat *J)
{
  const DM_Composite           *com = (DM_Composite*)dm->data;
  const struct DMCompositeLink *rlink,*clink;
  PetscErrorCode               ierr;
  IS                           *isg;
  Mat                          *submats;
  PetscInt                     i,j,n;

  PetscFunctionBegin;
  n = com->nDM;                 /* Total number of entries */

  /* Explicit index sets are not required for MatCreateNest, but getting them here allows MatNest to do consistency
   * checking and allows ISEqual to compare by identity instead of by contents. */
  ierr = DMCompositeGetGlobalISs(dm,&isg);CHKERRQ(ierr);

  /* Get submatrices */
  ierr = PetscMalloc(n*n*sizeof(Mat),&submats);CHKERRQ(ierr);
  for (i=0,rlink=com->next; rlink; i++,rlink=rlink->next) {
    for (j=0,clink=com->next; clink; j++,clink=clink->next) {
      Mat sub = NULL;
      if (i == j) {
        ierr = DMCreateMatrix(rlink->dm,NULL,&sub);CHKERRQ(ierr);
      } else if (com->FormCoupleLocations) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"Cannot manage off-diagonal parts yet");
      submats[i*n+j] = sub;
    }
  }

  ierr = MatCreateNest(PetscObjectComm((PetscObject)dm),n,isg,n,isg,submats,J);CHKERRQ(ierr);

  /* Disown references */
  for (i=0; i<n; i++) {ierr = ISDestroy(&isg[i]);CHKERRQ(ierr);}
  ierr = PetscFree(isg);CHKERRQ(ierr);

  for (i=0; i<n*n; i++) {
    if (submats[i]) {ierr = MatDestroy(&submats[i]);CHKERRQ(ierr);}
  }
  ierr = PetscFree(submats);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemple #9
0
/*@C 
     PetscVSNPrintf - The PETSc version of vsnprintf(). Converts a PETSc format string into a standard C format string and then puts all the 
       function arguments into a string using the format statement.

   Input Parameters:
+   str - location to put result
.   len - the amount of space in str
+   format - the PETSc format string
-   fullLength - the amount of space in str actually used.

    Developer Notes: this function may be called from an error handler, if an error occurs when it is called by the error handler than likely
      a recursion will occur and possible crash.

 Level: developer

@*/
PetscErrorCode  PetscVSNPrintf(char *str,size_t len,const char *format,size_t *fullLength,va_list Argp)
{
  char          *newformat;
  char           formatbuf[8*1024];
  size_t         oldLength,length;
  int            fullLengthInt;
  PetscErrorCode ierr;

  PetscFunctionBegin; 
  ierr = PetscStrlen(format, &oldLength);CHKERRQ(ierr);
  if (oldLength < 8*1024) {
    newformat = formatbuf;
    oldLength = 8*1024-1;
  } else {
    oldLength = PETSC_MAX_LENGTH_FORMAT(oldLength);
    ierr = PetscMalloc(oldLength * sizeof(char), &newformat);CHKERRQ(ierr);
  }
  PetscFormatConvert(format,newformat,oldLength);
  ierr = PetscStrlen(newformat, &length);CHKERRQ(ierr);
#if 0
  if (length > len) {
    newformat[len] = '\0';
  }
#endif
#if defined(PETSC_HAVE_VSNPRINTF_CHAR)
  fullLengthInt = vsnprintf(str,len,newformat,(char *)Argp);
#elif defined(PETSC_HAVE_VSNPRINTF)
  fullLengthInt = vsnprintf(str,len,newformat,Argp);
#elif defined(PETSC_HAVE__VSNPRINTF)
  fullLengthInt = _vsnprintf(str,len,newformat,Argp);
#else
#error "vsnprintf not found"
#endif
  if (fullLengthInt < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"vsnprintf() failed");
  if (fullLength) *fullLength = (size_t)fullLengthInt;
  if (oldLength >= 8*1024) {
    ierr = PetscFree(newformat);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}
Exemple #10
0
/*@C
    PetscSynchronizedPrintf - Prints synchronized output from several processors.
    Output of the first processor is followed by that of the second, etc.

    Not Collective

    Input Parameters:
+   comm - the communicator
-   format - the usual printf() format string 

   Level: intermediate

    Notes:
    REQUIRES a intervening call to PetscSynchronizedFlush() for the information 
    from all the processors to be printed.

    Fortran Note:
    The call sequence is PetscSynchronizedPrintf(MPI_Comm, character(*), PetscErrorCode ierr) from Fortran. 
    That is, you can only pass a single character string from Fortran.

.seealso: PetscSynchronizedFlush(), PetscSynchronizedFPrintf(), PetscFPrintf(), 
          PetscPrintf(), PetscViewerASCIIPrintf(), PetscViewerASCIISynchronizedPrintf()
@*/
PetscErrorCode  PetscSynchronizedPrintf(MPI_Comm comm,const char format[],...)
{
  PetscErrorCode ierr;
  PetscMPIInt    rank;

  PetscFunctionBegin;
  ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
  
  /* First processor prints immediately to stdout */
  if (!rank) {
    va_list Argp;
    va_start(Argp,format);
    ierr = (*PetscVFPrintf)(PETSC_STDOUT,format,Argp);CHKERRQ(ierr);
    if (petsc_history) {
      va_start(Argp,format);
      ierr = (*PetscVFPrintf)(petsc_history,format,Argp);CHKERRQ(ierr);
    }
    va_end(Argp);
  } else { /* other processors add to local queue */
    va_list     Argp;
    PrintfQueue next;
    size_t      fullLength = 8191;

    ierr = PetscNew(struct _PrintfQueue,&next);CHKERRQ(ierr);
    if (petsc_printfqueue) {petsc_printfqueue->next = next; petsc_printfqueue = next; petsc_printfqueue->next = 0;}
    else                   {petsc_printfqueuebase   = petsc_printfqueue = next;}
    petsc_printfqueuelength++;
    next->size = -1;
    while((PetscInt)fullLength >= next->size) {
      next->size = fullLength+1;
      ierr = PetscMalloc(next->size * sizeof(char), &next->string);CHKERRQ(ierr);
      va_start(Argp,format);
      ierr = PetscMemzero(next->string,next->size);CHKERRQ(ierr);
      ierr = PetscVSNPrintf(next->string,next->size,format, &fullLength,Argp);CHKERRQ(ierr);
      va_end(Argp);
    }
  }
    
  PetscFunctionReturn(0);
}
Exemple #11
0
/*
*) Zeros out pack data counters
*) Ensures mesaage length is set
*) Checks send counts properly initialized
*) allocates space for pack data
*/
PetscErrorCode DataExPackInitialize(DataEx de,size_t unit_message_size)
{
  PetscMPIInt    i,np;
  PetscInt       total;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  if (de->topology_status != DEOBJECT_FINALIZED) SETERRQ( de->comm, PETSC_ERR_ORDER, "Topology not finalized" );
  if (de->message_lengths_status != DEOBJECT_FINALIZED) SETERRQ( de->comm, PETSC_ERR_ORDER, "Message lengths not finalized" );
  de->packer_status = DEOBJECT_INITIALIZED;
  ierr = _DataExInitializeTmpStorage(de);CHKERRQ(ierr);
  np = de->n_neighbour_procs;
  de->unit_message_size = unit_message_size;
  total = 0;
  for (i = 0; i < np; ++i) {
    if (de->messages_to_be_sent[i] == -1) {
      PetscMPIInt proc_neighour = de->neighbour_procs[i];
      SETERRQ1( PETSC_COMM_SELF, PETSC_ERR_ORDER, "Messages_to_be_sent[neighbour_proc=%d] is un-initialised. Call DataExSetSendCount() first", (int)proc_neighour );
    }
    total = total + de->messages_to_be_sent[i];
  }
  /* create space for the data to be sent */
  ierr = PetscMalloc(unit_message_size * (total + 1), &de->send_message);CHKERRQ(ierr);
  /* initialize memory */
  ierr = PetscMemzero(de->send_message, unit_message_size * (total + 1));CHKERRQ(ierr);
  /* set total items to send */
  de->send_message_length = total;
  de->message_offsets[0] = 0;
  total = de->messages_to_be_sent[0];
  for (i = 1; i < np; ++i) {
    de->message_offsets[i] = total;
    total = total + de->messages_to_be_sent[i];
  }
  /* init the packer counters */
  de->total_pack_cnt = 0;
  for (i = 0; i < np; ++i) {
    de->pack_cnt[i] = 0;
  }
  PetscFunctionReturn(0);
}
Exemple #12
0
int main(int argc,char **argv)
{
  PetscInt       i,n = 1000,*values;
  int            event;
  PetscRandom    rand;
  PetscScalar    value;
  PetscErrorCode ierr;

  PetscInitialize(&argc,&argv,(char *)0,help);
  ierr = PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);CHKERRQ(ierr);
  
  ierr = PetscRandomCreate(PETSC_COMM_SELF,&rand);CHKERRQ(ierr);
  ierr = PetscRandomSetFromOptions(rand);CHKERRQ(ierr);

  ierr = PetscMalloc(n*sizeof(PetscInt),&values);CHKERRQ(ierr);
  for (i=0; i<n; i++) {
    ierr = PetscRandomGetValue(rand,&value);CHKERRQ(ierr);
    values[i] = (PetscInt)(n*PetscRealPart(value) + 2.0);
  }
  ierr = PetscSortInt(n,values);CHKERRQ(ierr);

  ierr = PetscLogEventRegister("Sort",0,&event);CHKERRQ(ierr);
  ierr = PetscLogEventBegin(event,0,0,0,0);CHKERRQ(ierr);

  for (i=0; i<n; i++) {
    ierr = PetscRandomGetValue(rand,&value);CHKERRQ(ierr);
    values[i] = (PetscInt)(n*PetscRealPart(value) + 2.0);
  }
  ierr = PetscSortInt(n,values);CHKERRQ(ierr);
  ierr = PetscLogEventEnd(event,0,0,0,0);CHKERRQ(ierr);

  for (i=1; i<n; i++) {
    if (values[i] < values[i-1]) SETERRQ(1,"Values not sorted");
  }
  ierr = PetscFree(values);CHKERRQ(ierr);
  ierr = PetscRandomDestroy(rand);CHKERRQ(ierr);

  ierr = PetscFinalize();CHKERRQ(ierr);
  return 0;
}
Exemple #13
0
/*@
      MatCreateFFT - Creates a matrix object that provides FFT via an external package

   Collective on MPI_Comm

   Input Parameter:
+   comm - MPI communicator
.   ndim - the ndim-dimensional transform
.   dim - array of size ndim, dim[i] contains the vector length in the i-dimension
-   type - package type, e.g., FFTW or FFTCU

   Output Parameter:
.   A  - the matrix

  Options Database Keys:
+ -mat_fft_type - set FFT type

   Level: intermediate

@*/
PetscErrorCode MatCreateFFT(MPI_Comm comm,PetscInt ndim,const PetscInt dim[],MatType mattype,Mat *A)
{
  PetscErrorCode ierr;
  PetscMPIInt    size;
  Mat            FFT;
  PetscInt       N,i;
  Mat_FFT        *fft;

  PetscFunctionBegin;
  if (ndim < 1) SETERRQ1(comm,PETSC_ERR_USER,"ndim %d must be > 0",ndim);
  ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);

  ierr      = MatCreate(comm,&FFT);CHKERRQ(ierr);
  ierr      = PetscNewLog(FFT,Mat_FFT,&fft);CHKERRQ(ierr);
  FFT->data = (void*)fft;
  N         = 1;
  for (i=0; i<ndim; i++) {
    if (dim[i] < 1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_USER,"dim[%d]=%d must be > 0",i,dim[i]);
    N *= dim[i];
  }

  ierr = PetscMalloc(ndim*sizeof(PetscInt),&fft->dim);CHKERRQ(ierr);
  ierr = PetscMemcpy(fft->dim,dim,ndim*sizeof(PetscInt));CHKERRQ(ierr);

  fft->ndim = ndim;
  fft->n    = PETSC_DECIDE;
  fft->N    = N;
  fft->data = NULL;

  ierr = MatSetType(FFT,mattype);CHKERRQ(ierr);

  FFT->ops->destroy = MatDestroy_FFT;

  /* get runtime options */
  ierr = PetscOptionsBegin(PetscObjectComm((PetscObject)FFT),((PetscObject)FFT)->prefix,"FFT Options","Mat");CHKERRQ(ierr);
  PetscOptionsEnd();

  *A = FFT;
  PetscFunctionReturn(0);
}
Exemple #14
0
static PetscErrorCode PetscCommBuildTwoSided_Allreduce(MPI_Comm comm,PetscMPIInt count,MPI_Datatype dtype,PetscMPIInt nto,const PetscMPIInt *toranks,const void *todata,PetscMPIInt *nfrom,PetscMPIInt **fromranks,void *fromdata)
{
  PetscErrorCode ierr;
  PetscMPIInt    size,*iflags,nrecvs,tag,*franks,i;
  MPI_Aint       lb,unitbytes;
  char           *tdata,*fdata;
  MPI_Request    *reqs,*sendreqs;
  MPI_Status     *statuses;

  PetscFunctionBegin;
  ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
  ierr = PetscCalloc1(size,&iflags);CHKERRQ(ierr);
  for (i=0; i<nto; i++) iflags[toranks[i]] = 1;
  ierr = PetscGatherNumberOfMessages(comm,iflags,NULL,&nrecvs);CHKERRQ(ierr);
  ierr = PetscFree(iflags);CHKERRQ(ierr);

  ierr     = PetscCommDuplicate(comm,&comm,&tag);CHKERRQ(ierr);
  ierr     = MPI_Type_get_extent(dtype,&lb,&unitbytes);CHKERRQ(ierr);
  if (lb != 0) SETERRQ1(comm,PETSC_ERR_SUP,"Datatype with nonzero lower bound %ld\n",(long)lb);
  ierr     = PetscMalloc(nrecvs*count*unitbytes,&fdata);CHKERRQ(ierr);
  tdata    = (char*)todata;
  ierr     = PetscMalloc2(nto+nrecvs,&reqs,nto+nrecvs,&statuses);CHKERRQ(ierr);
  sendreqs = reqs + nrecvs;
  for (i=0; i<nrecvs; i++) {
    ierr = MPI_Irecv((void*)(fdata+count*unitbytes*i),count,dtype,MPI_ANY_SOURCE,tag,comm,reqs+i);CHKERRQ(ierr);
  }
  for (i=0; i<nto; i++) {
    ierr = MPI_Isend((void*)(tdata+count*unitbytes*i),count,dtype,toranks[i],tag,comm,sendreqs+i);CHKERRQ(ierr);
  }
  ierr = MPI_Waitall(nto+nrecvs,reqs,statuses);CHKERRQ(ierr);
  ierr = PetscMalloc1(nrecvs,&franks);CHKERRQ(ierr);
  for (i=0; i<nrecvs; i++) franks[i] = statuses[i].MPI_SOURCE;
  ierr = PetscFree2(reqs,statuses);CHKERRQ(ierr);
  ierr = PetscCommDestroy(&comm);CHKERRQ(ierr);

  *nfrom            = nrecvs;
  *fromranks        = franks;
  *(void**)fromdata = fdata;
  PetscFunctionReturn(0);
}
Exemple #15
0
PetscErrorCode CharacteristicSetUp_DA(Characteristic c)
{
  PetscMPIInt    blockLen[2];
  MPI_Aint       indices[2];
  MPI_Datatype   oldtypes[2]; 
  PetscInt       dim, numValues;
  PetscErrorCode ierr;

  ierr = DAGetInfo(c->velocityDA, &dim, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0);CHKERRQ(ierr);
  if (c->structured) {
    c->numIds = dim;
  } else {
    c->numIds = 3;
  }
  if (c->numFieldComp > MAX_COMPONENTS) {
    SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE, "The maximum number of fields allowed is %d, you have %d. You can recompile after increasing MAX_COMPONENTS.", MAX_COMPONENTS, c->numFieldComp);
  }
  numValues = 4 + MAX_COMPONENTS;

  /* Create new MPI datatype for communication of characteristic point structs */
  blockLen[0] = 1+c->numIds; indices[0] = 0;                              oldtypes[0] = MPIU_INT;
  blockLen[1] = numValues;   indices[1] = (1+c->numIds)*sizeof(PetscInt); oldtypes[1] = MPIU_SCALAR;
  ierr = MPI_Type_struct(2, blockLen, indices, oldtypes, &c->itemType);CHKERRQ(ierr);
  ierr = MPI_Type_commit(&c->itemType);CHKERRQ(ierr);

  /* Initialize the local queue for char foot values */
  ierr = VecGetLocalSize(c->velocity, &c->queueMax);CHKERRQ(ierr);
  ierr = PetscMalloc(c->queueMax * sizeof(CharacteristicPointDA2D), &c->queue);CHKERRQ(ierr);
  c->queueSize = 0;

  /* Allocate communication structures */
  if (c->numNeighbors <= 0) {
    SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "Invalid number of neighbors %d. Call CharactersiticSetNeighbors() before setup.", c->numNeighbors);
  }
  ierr = PetscMalloc(c->numNeighbors * sizeof(PetscInt), &c->needCount);CHKERRQ(ierr);
  ierr = PetscMalloc(c->numNeighbors * sizeof(PetscInt), &c->localOffsets);CHKERRQ(ierr);
  ierr = PetscMalloc(c->numNeighbors * sizeof(PetscInt), &c->fillCount);CHKERRQ(ierr);
  ierr = PetscMalloc(c->numNeighbors * sizeof(PetscInt), &c->remoteOffsets);CHKERRQ(ierr);
  ierr = PetscMalloc((c->numNeighbors-1) * sizeof(MPI_Request), &c->request);CHKERRQ(ierr);
  ierr = PetscMalloc((c->numNeighbors-1) * sizeof(MPI_Status),  &c->status);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemple #16
0
PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec global,void *ctx)
{
  VecScatter     scatter;
  IS             from,to;
  PetscInt       i,n,*idx;
  Vec            tmp_vec;
  PetscErrorCode ierr;
  PetscScalar    *tmp;

  /* Get the size of the vector */
  ierr = VecGetSize(global,&n);CHKERRQ(ierr);

  /* Set the index sets */
  ierr = PetscMalloc(n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
  for(i=0; i<n; i++) idx[i]=i;
 
  /* Create local sequential vectors */
  ierr = VecCreateSeq(PETSC_COMM_SELF,n,&tmp_vec);CHKERRQ(ierr);

  /* Create scatter context */
  ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idx,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
  ierr = ISCreateGeneral(PETSC_COMM_SELF,n,idx,PETSC_COPY_VALUES,&to);CHKERRQ(ierr);
  ierr = VecScatterCreate(global,from,tmp_vec,to,&scatter);CHKERRQ(ierr);
  ierr = VecScatterBegin(scatter,global,tmp_vec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
  ierr = VecScatterEnd(scatter,global,tmp_vec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);

  ierr = VecGetArray(tmp_vec,&tmp);CHKERRQ(ierr);
  ierr = PetscPrintf(PETSC_COMM_WORLD,"At t =%14.6e u = %14.6e  %14.6e  %14.6e \n",
                     time,PetscRealPart(tmp[0]),PetscRealPart(tmp[1]),PetscRealPart(tmp[2]));CHKERRQ(ierr);
  ierr = PetscPrintf(PETSC_COMM_WORLD,"At t =%14.6e errors = %14.6e  %14.6e  %14.6e \n",
                     time,PetscRealPart(tmp[0]-solx(time)),PetscRealPart(tmp[1]-soly(time)),PetscRealPart(tmp[2]-solz(time)));CHKERRQ(ierr);
  ierr = VecRestoreArray(tmp_vec,&tmp);CHKERRQ(ierr);
  ierr = VecScatterDestroy(&scatter);CHKERRQ(ierr);
  ierr = ISDestroy(&from);CHKERRQ(ierr);
  ierr = ISDestroy(&to);CHKERRQ(ierr);
  ierr = PetscFree(idx);CHKERRQ(ierr);
  ierr = VecDestroy(&tmp_vec);CHKERRQ(ierr);
  return 0;
}
/*@
  DMPlexDistributeData - Distribute field data to match a given PetscSF, usually the SF from mesh distribution

  Collective on DM

  Input Parameters:
+ dm - The DMPlex object
. pointSF - The PetscSF describing the communication pattern
. originalSection - The PetscSection for existing data layout
. datatype - The type of data
- originalData - The existing data

  Output Parameters:
+ newSection - The PetscSF describing the new data layout
- newData - The new data

  Level: developer

.seealso: DMPlexDistribute(), DMPlexDistributeField()
@*/
PetscErrorCode DMPlexDistributeData(DM dm, PetscSF pointSF, PetscSection originalSection, MPI_Datatype datatype, void *originalData, PetscSection newSection, void **newData)
{
  PetscSF        fieldSF;
  PetscInt      *remoteOffsets, fieldSize;
  PetscMPIInt    dataSize;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = PetscLogEventBegin(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
  ierr = PetscSFDistributeSection(pointSF, originalSection, &remoteOffsets, newSection);CHKERRQ(ierr);

  ierr = PetscSectionGetStorageSize(newSection, &fieldSize);CHKERRQ(ierr);
  ierr = MPI_Type_size(datatype, &dataSize);CHKERRQ(ierr);
  ierr = PetscMalloc(fieldSize * dataSize, newData);CHKERRQ(ierr);

  ierr = PetscSFCreateSectionSF(pointSF, originalSection, remoteOffsets, newSection, &fieldSF);CHKERRQ(ierr);
  ierr = PetscSFBcastBegin(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
  ierr = PetscSFBcastEnd(fieldSF, datatype, originalData, *newData);CHKERRQ(ierr);
  ierr = PetscSFDestroy(&fieldSF);CHKERRQ(ierr);
  ierr = PetscLogEventEnd(DMPLEX_DistributeData,dm,0,0,0);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemple #18
0
/*@C
   PetscViewersGetViewer - Gets a PetscViewer from a PetscViewer collection

   Not Collective, but PetscViewer will be collective object on PetscViewers

   Input Parameter:
+   viewers - object created with PetscViewersCreate()
-   n - number of PetscViewer you want

   Output Parameter:
.  viewer - the PetscViewer

   Level: intermediate

   Concepts: PetscViewer^array of

.seealso: PetscViewersCreate(), PetscViewersDestroy()

@*/
PetscErrorCode  PetscViewersGetViewer(PetscViewers viewers,PetscInt n,PetscViewer *viewer)
{
  PetscErrorCode ierr;

  PetscFunctionBegin;
  if (n < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Cannot access using a negative index - %d\n",n);
  if (n >= viewers->n) {
    PetscViewer *v;
    int    newn = n + 64; /* add 64 new ones at a time */

    ierr = PetscMalloc(newn*sizeof(PetscViewer),&v);CHKERRQ(ierr);
    ierr = PetscMemzero(v,newn*sizeof(PetscViewer));CHKERRQ(ierr);
    ierr = PetscMemcpy(v,viewers->viewer,viewers->n*sizeof(PetscViewer));CHKERRQ(ierr);
    ierr = PetscFree(viewers->viewer);CHKERRQ(ierr);
    viewers->viewer = v;
  }
  if (!viewers->viewer[n]) {
    ierr = PetscViewerCreate(viewers->comm,&viewers->viewer[n]);CHKERRQ(ierr);
  }
  *viewer = viewers->viewer[n];
  PetscFunctionReturn(0);
}
Exemple #19
0
/*@
   PetscSequentialPhaseBegin - Begins a sequential section of code.  

   Collective on MPI_Comm

   Input Parameters:
+  comm - Communicator to sequentialize.  
-  ng   - Number in processor group.  This many processes are allowed to execute
   at the same time (usually 1)

   Level: intermediate

   Notes:
   PetscSequentialPhaseBegin() and PetscSequentialPhaseEnd() provide a
   way to force a section of code to be executed by the processes in
   rank order.  Typically, this is done with
.vb
      PetscSequentialPhaseBegin(comm, 1);
      <code to be executed sequentially>
      PetscSequentialPhaseEnd(comm, 1);
.ve

   Often, the sequential code contains output statements (e.g., printf) to
   be executed.  Note that you may need to flush the I/O buffers before
   calling PetscSequentialPhaseEnd().  Also, note that some systems do
   not propagate I/O in any order to the controling terminal (in other words, 
   even if you flush the output, you may not get the data in the order
   that you want).

.seealso: PetscSequentialPhaseEnd()

   Concepts: sequential stage

@*/
PetscErrorCode  PetscSequentialPhaseBegin(MPI_Comm comm,int ng)
{
  PetscErrorCode ierr;
  PetscMPIInt    size;
  MPI_Comm       local_comm,*addr_local_comm;

  PetscFunctionBegin;
  ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
  if (size == 1) PetscFunctionReturn(0);

  /* Get the private communicator for the sequential operations */
  if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) {
    ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Seq_keyval,0);CHKERRQ(ierr);
  }

  ierr = MPI_Comm_dup(comm,&local_comm);CHKERRQ(ierr);
  ierr = PetscMalloc(sizeof(MPI_Comm),&addr_local_comm);CHKERRQ(ierr);
  *addr_local_comm = local_comm;
  ierr = MPI_Attr_put(comm,Petsc_Seq_keyval,(void*)addr_local_comm);CHKERRQ(ierr);
  ierr = PetscSequentialPhaseBegin_Private(local_comm,ng);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemple #20
0
/*@
    ISAllGather - Given an index set (IS) on each processor, generates a large
    index set (same on each processor) by concatenating together each
    processors index set.

    Collective on IS

    Input Parameter:
.   is - the distributed index set

    Output Parameter:
.   isout - the concatenated index set (same on all processors)

    Notes:
    ISAllGather() is clearly not scalable for large index sets.

    The IS created on each processor must be created with a common
    communicator (e.g., PETSC_COMM_WORLD). If the index sets were created
    with PETSC_COMM_SELF, this routine will not work as expected, since
    each process will generate its own new IS that consists only of
    itself.

    The communicator for this new IS is PETSC_COMM_SELF

    Level: intermediate

    Concepts: gather^index sets
    Concepts: index sets^gathering to all processors
    Concepts: IS^gathering to all processors

.seealso: ISCreateGeneral(), ISCreateStride(), ISCreateBlock()
@*/
PetscErrorCode  ISAllGather(IS is,IS *isout)
{
  PetscErrorCode ierr;
  PetscInt       *indices,n,i,N,step,first;
  const PetscInt *lindices;
  MPI_Comm       comm;
  PetscMPIInt    size,*sizes = NULL,*offsets = NULL,nn;
  PetscBool      stride;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(is,IS_CLASSID,1);
  PetscValidPointer(isout,2);

  ierr = PetscObjectGetComm((PetscObject)is,&comm);CHKERRQ(ierr);
  ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
  ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
  ierr = PetscObjectTypeCompare((PetscObject)is,ISSTRIDE,&stride);CHKERRQ(ierr);
  if (size == 1 && stride) { /* should handle parallel ISStride also */
    ierr = ISStrideGetInfo(is,&first,&step);CHKERRQ(ierr);
    ierr = ISCreateStride(PETSC_COMM_SELF,n,first,step,isout);CHKERRQ(ierr);
  } else {
    ierr = PetscMalloc2(size,PetscMPIInt,&sizes,size,PetscMPIInt,&offsets);CHKERRQ(ierr);

    ierr       = PetscMPIIntCast(n,&nn);CHKERRQ(ierr);
    ierr       = MPI_Allgather(&nn,1,MPI_INT,sizes,1,MPI_INT,comm);CHKERRQ(ierr);
    offsets[0] = 0;
    for (i=1; i<size; i++) offsets[i] = offsets[i-1] + sizes[i-1];
    N = offsets[size-1] + sizes[size-1];

    ierr = PetscMalloc(N*sizeof(PetscInt),&indices);CHKERRQ(ierr);
    ierr = ISGetIndices(is,&lindices);CHKERRQ(ierr);
    ierr = MPI_Allgatherv((void*)lindices,nn,MPIU_INT,indices,sizes,offsets,MPIU_INT,comm);CHKERRQ(ierr);
    ierr = ISRestoreIndices(is,&lindices);CHKERRQ(ierr);
    ierr = PetscFree2(sizes,offsets);CHKERRQ(ierr);

    ierr = ISCreateGeneral(PETSC_COMM_SELF,N,indices,PETSC_OWN_POINTER,isout);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}
Exemple #21
0
PetscErrorCode  DMCoarsen_ADDA(DM dm, MPI_Comm comm,DM *dmc)
{
  PetscErrorCode ierr;
  PetscInt       *nodesc;
  PetscInt       dofc;
  PetscInt       i;
  DM_ADDA        *dd = (DM_ADDA*)dm->data;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
  PetscValidPointer(dmc, 3);
  ierr = PetscMalloc(dd->dim*sizeof(PetscInt), &nodesc);CHKERRQ(ierr);
  for (i=0; i<dd->dim; i++) {
    nodesc[i] = (dd->nodes[i] % dd->refine[i]) ? dd->nodes[i] / dd->refine[i] + 1 : dd->nodes[i] / dd->refine[i];
  }
  dofc = (dd->dof % dd->dofrefine) ? dd->dof / dd->dofrefine + 1 : dd->dof / dd->dofrefine;
  ierr = DMADDACreate(PetscObjectComm((PetscObject)dm), dd->dim, nodesc, dd->procs, dofc, dd->periodic, dmc);CHKERRQ(ierr);
  ierr = PetscFree(nodesc);CHKERRQ(ierr);
  /* copy refinement factors */
  ierr = DMADDASetRefinement(*dmc, dd->refine, dd->dofrefine);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemple #22
0
/*@
    ISComplement - Given an index set (IS) generates the complement index set. That is all
       all indices that are NOT in the given set.

    Collective on IS

    Input Parameter:
+   is - the index set
.   nmin - the first index desired in the local part of the complement
-   nmax - the largest index desired in the local part of the complement (note that all indices in is must be greater or equal to nmin and less than nmax)

    Output Parameter:
.   isout - the complement

    Notes:  The communicator for this new IS is the same as for the input IS

      For a parallel IS, this will generate the local part of the complement on each process

      To generate the entire complement (on each process) of a parallel IS, first call ISAllGather() and then
    call this routine.

    Level: intermediate

    Concepts: gather^index sets
    Concepts: index sets^gathering to all processors
    Concepts: IS^gathering to all processors

.seealso: ISCreateGeneral(), ISCreateStride(), ISCreateBlock(), ISAllGather()
@*/
PetscErrorCode  ISComplement(IS is,PetscInt nmin,PetscInt nmax,IS *isout)
{
  PetscErrorCode ierr;
  const PetscInt *indices;
  PetscInt       n,i,j,unique,cnt,*nindices;
  PetscBool      sorted;

  PetscFunctionBegin;
  PetscValidHeaderSpecific(is,IS_CLASSID,1);
  PetscValidPointer(isout,3);
  if (nmin < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nmin %D cannot be negative",nmin);
  if (nmin > nmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"nmin %D cannot be greater than nmax %D",nmin,nmax);
  ierr = ISSorted(is,&sorted);CHKERRQ(ierr);
  if (!sorted) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Index set must be sorted");

  ierr = ISGetLocalSize(is,&n);CHKERRQ(ierr);
  ierr = ISGetIndices(is,&indices);CHKERRQ(ierr);
#if defined(PETSC_USE_DEBUG)
  for (i=0; i<n; i++) {
    if (indices[i] <  nmin) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D's value %D is smaller than minimum given %D",i,indices[i],nmin);
    if (indices[i] >= nmax) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index %D's value %D is larger than maximum given %D",i,indices[i],nmax);
  }
#endif
  /* Count number of unique entries */
  unique = (n>0);
  for (i=0; i<n-1; i++) {
    if (indices[i+1] != indices[i]) unique++;
  }
  ierr = PetscMalloc((nmax-nmin-unique)*sizeof(PetscInt),&nindices);CHKERRQ(ierr);
  cnt  = 0;
  for (i=nmin,j=0; i<nmax; i++) {
    if (j<n && i==indices[j]) do { j++; } while (j<n && i==indices[j]);
    else nindices[cnt++] = i;
  }
  if (cnt != nmax-nmin-unique) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Number of entries found in complement %D does not match expected %D",cnt,nmax-nmin-unique);
  ierr = ISCreateGeneral(PetscObjectComm((PetscObject)is),cnt,nindices,PETSC_OWN_POINTER,isout);CHKERRQ(ierr);
  ierr = ISRestoreIndices(is,&indices);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemple #23
0
/*@C
   PetscViewerBinaryWriteStringArray - writes to a binary file, only from the first process an array of strings

   Collective on MPI_Comm

   Input Parameters:
+  viewer - the binary viewer
-  data - location of the array of strings


   Level: intermediate

   Concepts: binary files

    Notes: array of strings is null terminated

.seealso: PetscViewerASCIIOpen(), PetscViewerSetFormat(), PetscViewerDestroy(),
          VecView(), MatView(), VecLoad(), MatLoad(), PetscViewerBinaryGetDescriptor(),
          PetscViewerBinaryGetInfoPointer(), PetscFileMode, PetscViewer, PetscBinaryViewerRead()
@*/
PetscErrorCode  PetscViewerBinaryWriteStringArray(PetscViewer viewer,char **data)
{
  PetscErrorCode     ierr;
  PetscInt           i,n = 0,*sizes;

  /* count number of strings */
  while (data[n++]);
  n--;
  ierr = PetscMalloc((n+1)*sizeof(PetscInt),&sizes);CHKERRQ(ierr);
  sizes[0] = n;
  for (i=0; i<n; i++) {
    size_t tmp;
    ierr       = PetscStrlen(data[i],&tmp);CHKERRQ(ierr);
    sizes[i+1] = tmp + 1;   /* size includes space for the null terminator */
  }
  ierr = PetscViewerBinaryWrite(viewer,sizes,n+1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
  for (i=0; i<n; i++) {
    ierr = PetscViewerBinaryWrite(viewer,data[i],sizes[i+1],PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
  }
  ierr = PetscFree(sizes);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemple #24
0
/*
*) Ensures all data has been packed
*/
PetscErrorCode DataExPackFinalize(DataEx de)
{
  PetscMPIInt i,np;
  PetscInt    total;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  if (de->packer_status != DEOBJECT_INITIALIZED) SETERRQ( de->comm, PETSC_ERR_ORDER, "Packer has not been initialized. Must call DataExPackInitialize() first." );
  np = de->n_neighbour_procs;
  for (i = 0; i < np; ++i) {
    if (de->pack_cnt[i] != de->messages_to_be_sent[i]) SETERRQ3( PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "Not all messages for neighbour[%d] have been packed. Expected %D : Inserted %D",
                (int)de->neighbour_procs[i], de->messages_to_be_sent[i], de->pack_cnt[i] );
  }
  /* init */
  for (i = 0; i < np; ++i) {
    de->messages_to_be_recvieved[i] = -1;
  }
  /* figure out the recv counts here */
  for (i = 0; i < np; ++i) {
    ierr = MPI_Isend(&de->messages_to_be_sent[i], 1, MPIU_INT, de->neighbour_procs[i], de->send_tags[i], de->comm, &de->_requests[i]);CHKERRQ(ierr);
  }
  for (i = 0; i < np; ++i) {
    ierr = MPI_Irecv(&de->messages_to_be_recvieved[i], 1, MPIU_INT, de->neighbour_procs[i], de->recv_tags[i], de->comm, &de->_requests[np+i]);CHKERRQ(ierr);
  }
  ierr = MPI_Waitall(2*np, de->_requests, de->_stats);CHKERRQ(ierr);
  /* create space for the data to be recvieved */
  total = 0;
  for (i = 0; i < np; ++i) {
    total = total + de->messages_to_be_recvieved[i];
  }
  ierr = PetscMalloc(de->unit_message_size * (total + 1), &de->recv_message);CHKERRQ(ierr);
  /* initialize memory */
  ierr = PetscMemzero(de->recv_message, de->unit_message_size * (total + 1));CHKERRQ(ierr);
  /* set total items to recieve */
  de->recv_message_length = total;
  de->packer_status = DEOBJECT_FINALIZED;
  de->communication_status = DEOBJECT_INITIALIZED;
  PetscFunctionReturn(0);
}
Exemple #25
0
// gridsizes is array of length nsamples, to be PetscFree'd by caller
PetscErrorCode SampleGridRangeCreate(PetscMPIInt nranks,PetscInt minlocal,PetscInt maxlocal,PetscInt maxsamples,PetscInt *nsamples,PetscInt **gridsizes) {
  PetscErrorCode ierr;
  int64_t target;
  PetscInt gsize[100][3],n;

  PetscFunctionBegin;
  if (maxsamples < 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_ARG_INCOMP,"The max number of samples must be at least 2");

  // Build a list of compatible grid sizes in descending order
  for (target=maxlocal,n=0; target>=minlocal; n++) {
    ierr = FindCompatibleProblemSize(nranks,target,gsize[n]);CHKERRQ(ierr);
    if (SampleGridNumElements(gsize[n]) < minlocal) {
      if (!n) n = 1; // Keep whatever we found if it's the only one
      break;
    }
    target = (SampleGridNumElements(gsize[n]) - 1)/nranks;
  }

  // Filter the list by greedily removing interior sample locations whose removal would leave behind the smallest
  // possible ratio between successive sizes
  while (n > maxsamples) {
    PetscInt loc = -1;
    double   ratio = 1e10;
    for (PetscInt i=1; i<n-1; i++) {
      double r = (double)SampleGridNumElements(gsize[i-1]) / SampleGridNumElements(gsize[i+1]);
      if (r < ratio) {
        loc = i;
        ratio = r;
      }
    }
    ierr = PetscMemmove(gsize[loc],gsize[loc+1],(char*)gsize[n]-(char*)gsize[loc+1]);CHKERRQ(ierr);
    n--;
  }

  *nsamples = n;
  ierr = PetscMalloc(n*sizeof gsize[0],gridsizes);CHKERRQ(ierr);
  ierr = PetscMemcpy(*gridsizes,gsize[0],n*sizeof gsize[0]);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemple #26
0
PetscErrorCode iniStepTimer( const char pre[], PetscInt Iter0, StepTimer *thetimer )
{

    PetscErrorCode ierr;
    PetscBool flg;
    PetscInt it;
    PetscInt tmparr[MAX_NUM_INTERVALS];

/*  read time step data */
    thetimer->startTimeStep = Iter0 + 1; /* by default we start at first time step */
    ierr = PetscOptionsGetInt(pre,"-start_time_step",&thetimer->startTimeStep,&flg);CHKERRQ(ierr);

    thetimer->maxNumIntervals=MAX_NUM_INTERVALS;
    ierr = PetscOptionsGetIntArray(pre,"-time_steps",tmparr,&thetimer->maxNumIntervals,&flg);CHKERRQ(ierr);
    if (!flg) SETERRQ1(PETSC_COMM_WORLD,1,"Must indicate number of step timer time steps with the -%stime_step flag",pre);

    if (thetimer->maxNumIntervals==1) {
      thetimer->fixedStep=PETSC_TRUE;
	  thetimer->currInterval=0; /* Not used but we set it anyway to be safe */
      thetimer->numTimeSteps=tmparr[0];
      ierr = PetscPrintf(PETSC_COMM_WORLD,"Fixed interval of %d specified for StepTimer object %s\n", thetimer->numTimeSteps, pre);CHKERRQ(ierr);	  
    } else {
      thetimer->fixedStep=PETSC_FALSE;      
	  PetscMalloc(thetimer->maxNumIntervals*sizeof(PetscInt), &thetimer->timeIntervals);
      ierr = PetscPrintf(PETSC_COMM_WORLD,"Variable number of intervals specified for StepTimer object %s\n", pre);CHKERRQ(ierr);	  
	  for (it=0; it<thetimer->maxNumIntervals; it++) {
		thetimer->timeIntervals[it] = tmparr[it];
		ierr = PetscPrintf(PETSC_COMM_WORLD,"  Interval #%d=%d\n", it+1,thetimer->timeIntervals[it]);CHKERRQ(ierr);        
	  }	  
	  thetimer->currInterval=0;
	  thetimer->numTimeSteps=thetimer->timeIntervals[thetimer->currInterval];
    }
    
	thetimer->count=0;
	    
    return 0;
    
}
Exemple #27
0
    VectorPetsc( Vector<value_type> const& v, std::vector<int> const& index )
        :
        super(),
        //super(v,index),
        M_destroy_vec_on_exit( false )
    {
#if defined(__clang__)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wunsequenced"
#endif

#if (PETSC_VERSION_MAJOR == 3) && (PETSC_VERSION_MINOR >= 2)

        VectorPetsc<T> const* V = dynamic_cast<VectorPetsc<T> const*> ( &v );
        int ierr=0;
        IS is;
        PetscInt *map;
        int n = index.size();
        PetscMalloc(n*sizeof(PetscInt),&map);
        for (int i=0; i<n; i++) map[i] = index[i];
        ierr = ISCreateGeneral(Environment::worldComm(),n,map,PETSC_COPY_VALUES,&is);
        CHKERRABORT( this->comm(),ierr );
        PetscFree(map);

        datamap_ptrtype dm( new datamap_type(n, n, V->comm()) );
        this->setMap(dm);
        /* init */
        ierr = VecGetSubVector(V->vec(), is, &this->M_vec);
        CHKERRABORT( this->comm(),ierr );
        this->M_is_initialized = true;
        /* close */
        this->close(); /* no // assembly required */
#endif

#if defined(__clang__)
#pragma clang diagnostic pop
#endif
    }
Exemple #28
0
/*
   Gets the natural number for each global number on the process.

   Used by DMDAGetAO() and DMDAGlobalToNatural_Create()
*/
PetscErrorCode DMDAGetNatural_Private(DM da,PetscInt *outNlocal,IS *isnatural)
{
  PetscErrorCode ierr;
  PetscInt       Nlocal,i,j,k,*lidx,lict = 0;
  DM_DA          *dd = (DM_DA*)da->data;

  PetscFunctionBegin;
  Nlocal = (dd->xe-dd->xs);
  if (dd->dim > 1) Nlocal *= (dd->ye-dd->ys);
  if (dd->dim > 2) Nlocal *= (dd->ze-dd->zs);

  ierr = PetscMalloc(Nlocal*sizeof(PetscInt),&lidx);CHKERRQ(ierr);

  if (dd->dim == 1) {
    for (i=dd->xs; i<dd->xe; i++) {
      /*  global number in natural ordering */
      lidx[lict++] = i;
    }
  } else if (dd->dim == 2) {
    for (j=dd->ys; j<dd->ye; j++) {
      for (i=dd->xs; i<dd->xe; i++) {
        /*  global number in natural ordering */
        lidx[lict++] = i + j*dd->M*dd->w;
      }
    }
  } else if (dd->dim == 3) {
    for (k=dd->zs; k<dd->ze; k++) {
      for (j=dd->ys; j<dd->ye; j++) {
        for (i=dd->xs; i<dd->xe; i++) {
          lidx[lict++] = i + j*dd->M*dd->w + k*dd->M*dd->N*dd->w;
        }
      }
    }
  }
  *outNlocal = Nlocal;
  ierr       = ISCreateGeneral(PetscObjectComm((PetscObject)da),Nlocal,lidx,PETSC_OWN_POINTER,isnatural);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemple #29
0
PetscErrorCode KSPBuildSolution_PGMRES(KSP ksp,Vec ptr,Vec *result)
{
  KSP_PGMRES     *pgmres = (KSP_PGMRES*)ksp->data;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  if (!ptr) {
    if (!pgmres->sol_temp) {
      ierr = VecDuplicate(ksp->vec_sol,&pgmres->sol_temp);CHKERRQ(ierr);
      ierr = PetscLogObjectParent(ksp,pgmres->sol_temp);CHKERRQ(ierr);
    }
    ptr = pgmres->sol_temp;
  }
  if (!pgmres->nrs) {
    /* allocate the work area */
    ierr = PetscMalloc(pgmres->max_k*sizeof(PetscScalar),&pgmres->nrs);CHKERRQ(ierr);
    ierr = PetscLogObjectMemory(ksp,pgmres->max_k*sizeof(PetscScalar));CHKERRQ(ierr);
  }

  ierr = KSPPGMRESBuildSoln(pgmres->nrs,ksp->vec_sol,ptr,ksp,pgmres->it);CHKERRQ(ierr);
  if (result) *result = ptr;
  PetscFunctionReturn(0);
}
Exemple #30
0
PetscErrorCode  PetscViewerVTKAddField_VTK(PetscViewer viewer,PetscObject dm,PetscErrorCode (*PetscViewerVTKWriteFunction)(PetscObject,PetscViewer),PetscViewerVTKFieldType fieldtype,PetscObject vec)
{
  PetscViewer_VTK          *vtk = (PetscViewer_VTK*)viewer->data;
  PetscViewerVTKObjectLink link, tail = vtk->link;
  PetscErrorCode           ierr;

  PetscFunctionBegin;
  if (vtk->dm) {
    if (dm != vtk->dm) SETERRQ(PetscObjectComm((PetscObject)viewer),PETSC_ERR_ARG_INCOMP,"Cannot write a field from more than one grid to the same VTK file");
  }
  vtk->dm    = dm;
  vtk->write = PetscViewerVTKWriteFunction;
  ierr       = PetscMalloc(sizeof(struct _n_PetscViewerVTKObjectLink),&link);CHKERRQ(ierr);
  link->ft   = fieldtype;
  link->vec  = vec;
  link->next = NULL;
  /* Append to list */
  if (tail) {
    while (tail->next) tail = tail->next;
    tail->next = link;
  } else vtk->link = link;
  PetscFunctionReturn(0);
}