Exemplo n.º 1
0
PetscErrorCode ISView_General_Binary(IS is,PetscViewer viewer)
{
  PetscErrorCode ierr;
  IS_General     *isa = (IS_General*) is->data;
  PetscMPIInt    rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
  PetscInt       n,N,len,j,tr[2];
  int            fdes;
  MPI_Status     status;
  PetscInt       message_count,flowcontrolcount,*values;

  PetscFunctionBegin;
  ierr = PetscLayoutGetLocalSize(is->map, &n);CHKERRQ(ierr);
  ierr = PetscLayoutGetSize(is->map, &N);CHKERRQ(ierr);
  ierr = PetscViewerBinaryGetDescriptor(viewer,&fdes);CHKERRQ(ierr);

  /* determine maximum message to arrive */
  ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)is),&rank);CHKERRQ(ierr);
  ierr = MPI_Comm_size(PetscObjectComm((PetscObject)is),&size);CHKERRQ(ierr);

  tr[0] = IS_FILE_CLASSID;
  tr[1] = N;
  ierr  = PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
  ierr  = MPI_Reduce(&n,&len,1,MPIU_INT,MPI_SUM,0,PetscObjectComm((PetscObject)is));CHKERRQ(ierr);

  ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
  if (!rank) {
    ierr = PetscBinaryWrite(fdes,isa->idx,n,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);

    ierr = PetscMalloc1(len,&values);CHKERRQ(ierr);
    ierr = PetscMPIIntCast(len,&mesgsize);CHKERRQ(ierr);
    /* receive and save messages */
    for (j=1; j<size; j++) {
      ierr = PetscViewerFlowControlStepMaster(viewer,j,&message_count,flowcontrolcount);CHKERRQ(ierr);
      ierr = MPI_Recv(values,mesgsize,MPIU_INT,j,tag,PetscObjectComm((PetscObject)is),&status);CHKERRQ(ierr);
      ierr = MPI_Get_count(&status,MPIU_INT,&mesglen);CHKERRQ(ierr);
      ierr = PetscBinaryWrite(fdes,values,(PetscInt)mesglen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
    }
    ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
    ierr = PetscFree(values);CHKERRQ(ierr);
  } else {
    ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
    ierr = PetscMPIIntCast(n,&mesgsize);CHKERRQ(ierr);
    ierr = MPI_Send(isa->idx,mesgsize,MPIU_INT,0,tag,PetscObjectComm((PetscObject)is));CHKERRQ(ierr);
    ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 2
0
Arquivo: sysio.c Projeto: petsc/petsc
/*@C
   PetscBinarySynchronizedWrite - writes to a binary file.

   Collective on MPI_Comm

   Input Parameters:
+  comm - the MPI communicator
.  fd - the file
.  n  - the number of items to write
.  p - the buffer
.  istemp - the buffer may be changed
-  type - the type of items to write (PETSC_INT, PETSC_DOUBLE or PETSC_SCALAR)

   Level: developer

   Notes:
   Process 0 does a PetscBinaryWrite()

   PetscBinarySynchronizedWrite() uses byte swapping to work on all machines.
   Integers are stored on the file as 32 long, regardless of whether
   they are stored in the machine as 32 or 64, this means the same
   binary file may be read on any machine.

   Notes:
    because byte-swapping may be done on the values in data it cannot be declared const

   WARNING: This is NOT like PetscSynchronizedFPrintf()! This routine ignores calls on all but process 0,
   while PetscSynchronizedFPrintf() has all processes print their strings in order.

   Concepts: files^synchronized writing of binary files
   Concepts: binary files^reading, synchronized

.seealso: PetscBinaryWrite(), PetscBinaryOpen(), PetscBinaryClose(), PetscBinaryRead(), PetscBinarySynchronizedRead(),
          PetscBinarySynchronizedSeek()
@*/
PetscErrorCode  PetscBinarySynchronizedWrite(MPI_Comm comm,int fd,void *p,PetscInt n,PetscDataType type,PetscBool istemp)
{
  PetscErrorCode ierr;
  PetscMPIInt    rank;

  PetscFunctionBegin;
  ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
  if (!rank) {
    ierr = PetscBinaryWrite(fd,p,n,type,istemp);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 3
0
PetscErrorCode writeProfileSurfaceScalarData(char *fileName, PetscScalar *arr, PetscInt numValsPerProfile, PetscTruth appendToFile)
{
  PetscErrorCode ierr;
  PetscScalar *tmpArr;
  PetscInt *displs, *rcounts, cumpro;
  PetscInt ipro;
  size_t m1, m2;
/*   off_t  off, offset;   */
  PetscViewer fd;
  PetscInt fp;
/*   PetscInt iShift; */
  PetscMPIInt numProcessors, myId;

  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&myId);CHKERRQ(ierr);  
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&numProcessors);CHKERRQ(ierr);

  m1 = numValsPerProfile*totalNumProfiles*sizeof(PetscScalar);
  m2 = numProcessors*sizeof(PetscInt);  
/*   Allocate memory for temporary arrays */
  ierr = PetscMalloc(m1,&tmpArr);CHKERRQ(ierr);
  ierr = PetscMalloc(m2,&displs);CHKERRQ(ierr);
  ierr = PetscMalloc(m2,&rcounts);CHKERRQ(ierr);

  cumpro=0;
  for (ipro=1; ipro<=numProcessors; ipro++) {
    displs[ipro-1]=numValsPerProfile*cumpro;
    rcounts[ipro-1]=numValsPerProfile*gNumProfiles[ipro-1];
    cumpro = cumpro + gNumProfiles[ipro-1];
/*     ierr=PetscPrintf(PETSC_COMM_WORLD,"cumpro=%d, displs=%d\n",cumpro,displs[ipro-1],rcounts[ipro-1]);CHKERRQ(ierr);         */
  }
  
  MPI_Gatherv(arr,numValsPerProfile*lNumProfiles,MPI_DOUBLE,tmpArr,rcounts,displs,MPI_DOUBLE,0, PETSC_COMM_WORLD); 

  if (myId==0) { /* this shouldn't really be necessary, but without it, all processors seem to be writing in append mode */
	if (appendToFile) {
	  ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,fileName,FILE_MODE_APPEND,&fd);CHKERRQ(ierr);
	} else {
	  ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,fileName,FILE_MODE_WRITE,&fd);CHKERRQ(ierr);
	}  
  
	ierr = PetscViewerBinaryGetDescriptor(fd,&fp);CHKERRQ(ierr);
	ierr = PetscBinaryWrite(fp,tmpArr,numValsPerProfile*totalNumProfiles,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr);
	ierr = PetscViewerDestroy(fd);CHKERRQ(ierr);
  }
  
  ierr = PetscFree(tmpArr);CHKERRQ(ierr);
  ierr = PetscFree(displs);CHKERRQ(ierr);
  ierr = PetscFree(rcounts);CHKERRQ(ierr);
    
  return 0;
}
Exemplo n.º 4
0
/*@C
     PetscHTTPRequest - Send a request to an HTTP server

   Input Parameters:
+   type - either "POST" or "GET"
.   url -  URL of request host/path
.   header - additional header information, may be NULL
.   ctype - data type of body, for example application/json
.   body - data to send to server
.   sock - obtained with PetscOpenSocket()
-   buffsize - size of buffer

   Output Parameter:
.   buff - everything returned from server

    Level: advanced

.seealso: PetscHTTPSRequest(), PetscOpenSocket(), PetscHTTPSConnect(), PetscPullJSONValue()
@*/
PetscErrorCode PetscHTTPRequest(const char type[],const char url[],const char header[],const char ctype[],const char body[],int sock,char buff[],size_t buffsize)
{
  char           *request;
  size_t         request_len;
  PetscErrorCode ierr;

  PetscFunctionBegin;
  ierr = PetscHTTPBuildRequest(type,url,header,ctype,body,&request);CHKERRQ(ierr);
  ierr = PetscStrlen(request,&request_len);CHKERRQ(ierr);

  ierr = PetscBinaryWrite(sock,request,request_len,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
  ierr = PetscFree(request);CHKERRQ(ierr);
  PetscBinaryRead(sock,buff,buffsize,PETSC_CHAR);
  buff[buffsize-1] = 0;
  ierr = PetscInfo1(NULL,"HTTP result follows: \n%s\n",buff);CHKERRQ(ierr);
  PetscFunctionReturn(0);
}
Exemplo n.º 5
0
PetscErrorCode writeBinaryScalarData(char *fileName, PetscScalar *arr, PetscInt N, PetscTruth appendToFile)
{
  PetscErrorCode ierr;
  PetscViewer fd;
  PetscInt fp;
  PetscMPIInt myId;

  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&myId);CHKERRQ(ierr);  

  if (myId==0) { /* this shouldn't really be necessary, but without it, all processors seem to be writing in append mode */
	if (appendToFile) {
	  ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,fileName,FILE_MODE_APPEND,&fd);CHKERRQ(ierr);
	} else {
	  ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,fileName,FILE_MODE_WRITE,&fd);CHKERRQ(ierr);
	}  
  
	ierr = PetscViewerBinaryGetDescriptor(fd,&fp);CHKERRQ(ierr);
	ierr = PetscBinaryWrite(fp,arr,N,PETSC_SCALAR,PETSC_FALSE);CHKERRQ(ierr);
	ierr = PetscViewerDestroy(fd);CHKERRQ(ierr);
  }
  
  return 0;
}
Exemplo n.º 6
0
Arquivo: ex6.c Projeto: Kun-Qu/petsc
int main(int argc,char **args)
{
  PetscErrorCode ierr;
  PetscMPIInt    size;
  int            fd;
  PetscInt       i,m = 10,sz;
  PetscScalar    *avec,*array;
  Vec            vec;
  PetscViewer    view_out,view_in;

  PetscInitialize(&argc,&args,(char *)0,help);
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
  if (size != 1) SETERRQ(PETSC_COMM_SELF,1,"This is a uniprocessor example only!");
  
  ierr = PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);CHKERRQ(ierr);

  /* ---------------------------------------------------------------------- */
  /*          PART 1: Write some data to a file in binary format            */
  /* ---------------------------------------------------------------------- */

  /* Allocate array and set values */
  ierr = PetscMalloc(m*sizeof(PetscScalar),&array);CHKERRQ(ierr);
  for (i=0; i<m; i++) {
    array[i] = i*10.0;
  }

  /* Open viewer for binary output */
  ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,"input.dat",FILE_MODE_WRITE,&view_out);CHKERRQ(ierr);
  ierr = PetscViewerBinaryGetDescriptor(view_out,&fd);CHKERRQ(ierr);

  /* Write binary output */
  ierr = PetscBinaryWrite(fd,&m,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
  ierr = PetscBinaryWrite(fd,array,m,PETSC_SCALAR,PETSC_FALSE);CHKERRQ(ierr);

  /* Destroy the output viewer and work array */
  ierr = PetscViewerDestroy(&view_out);CHKERRQ(ierr);
  ierr = PetscFree(array);CHKERRQ(ierr);

  /* ---------------------------------------------------------------------- */
  /*          PART 2: Read data from file and form a vector                 */
  /* ---------------------------------------------------------------------- */

  /* Open input binary viewer */
  ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,"input.dat",FILE_MODE_READ,&view_in);CHKERRQ(ierr);
  ierr = PetscViewerBinaryGetDescriptor(view_in,&fd);CHKERRQ(ierr);

  /* Create vector and get pointer to data space */
  ierr = VecCreate(PETSC_COMM_SELF,&vec);CHKERRQ(ierr);
  ierr = VecSetSizes(vec,PETSC_DECIDE,m);CHKERRQ(ierr);
  ierr = VecSetFromOptions(vec);CHKERRQ(ierr);
  ierr = VecGetArray(vec,&avec);CHKERRQ(ierr);

  /* Read data into vector */
  ierr = PetscBinaryRead(fd,&sz,1,PETSC_INT);CHKERRQ(ierr);
  if (sz <=0) SETERRQ(PETSC_COMM_SELF,1,"Error: Must have array length > 0");

  ierr = PetscPrintf(PETSC_COMM_SELF,"reading data in binary from input.dat, sz =%D ...\n",sz);CHKERRQ(ierr); 
  ierr = PetscBinaryRead(fd,avec,sz,PETSC_SCALAR);CHKERRQ(ierr);

  /* View vector */
  ierr = VecRestoreArray(vec,&avec);CHKERRQ(ierr);
  ierr = VecView(vec,PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);

  /* Free data structures */
  ierr = VecDestroy(&vec);CHKERRQ(ierr);
  ierr = PetscViewerDestroy(&view_in);CHKERRQ(ierr);
  ierr = PetscFinalize();
  return 0;
}
Exemplo n.º 7
0
PetscErrorCode VecView_Seq_Binary(Vec xin,PetscViewer viewer)
{
  PetscErrorCode    ierr;
  int               fdes;
  PetscInt          n = xin->map->n,classid=VEC_FILE_CLASSID;
  FILE              *file;
  const PetscScalar *xv;
#if defined(PETSC_HAVE_MPIIO)
  PetscBool         isMPIIO;
#endif
  PetscBool         skipHeader;
  PetscViewerFormat format;

  PetscFunctionBegin;
  /* Write vector header */
  ierr = PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);CHKERRQ(ierr);
  if (!skipHeader) {
    ierr = PetscViewerBinaryWrite(viewer,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
    ierr = PetscViewerBinaryWrite(viewer,&n,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
  }

  /* Write vector contents */
#if defined(PETSC_HAVE_MPIIO)
  ierr = PetscViewerBinaryGetUseMPIIO(viewer,&isMPIIO);CHKERRQ(ierr);
  if (!isMPIIO) {
#endif
    ierr = PetscViewerBinaryGetDescriptor(viewer,&fdes);CHKERRQ(ierr);
    ierr = VecGetArrayRead(xin,&xv);CHKERRQ(ierr);
    ierr = PetscBinaryWrite(fdes,(void*)xv,n,PETSC_SCALAR,PETSC_FALSE);CHKERRQ(ierr);
    ierr = VecRestoreArrayRead(xin,&xv);CHKERRQ(ierr);
    ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
    if (format == PETSC_VIEWER_BINARY_MATLAB) {
      MPI_Comm   comm;
      FILE       *info;
      const char *name;

      ierr = PetscObjectGetName((PetscObject)xin,&name);CHKERRQ(ierr);
      ierr = PetscObjectGetComm((PetscObject)viewer,&comm);CHKERRQ(ierr);
      ierr = PetscViewerBinaryGetInfoPointer(viewer,&info);CHKERRQ(ierr);
      ierr = PetscFPrintf(comm,info,"#--- begin code written by PetscViewerBinary for MATLAB format ---#\n");CHKERRQ(ierr);
      ierr = PetscFPrintf(comm,info,"#$$ Set.%s = PetscBinaryRead(fd);\n",name);CHKERRQ(ierr);
      ierr = PetscFPrintf(comm,info,"#--- end code written by PetscViewerBinary for MATLAB format ---#\n\n");CHKERRQ(ierr);
    }
#if defined(PETSC_HAVE_MPIIO)
  } else {
    MPI_Offset   off;
    MPI_File     mfdes;
    PetscMPIInt  lsize;

    ierr = PetscMPIIntCast(n,&lsize);CHKERRQ(ierr);
    ierr = PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);CHKERRQ(ierr);
    ierr = PetscViewerBinaryGetMPIIOOffset(viewer,&off);CHKERRQ(ierr);
    ierr = MPI_File_set_view(mfdes,off,MPIU_SCALAR,MPIU_SCALAR,(char*)"native",MPI_INFO_NULL);CHKERRQ(ierr);
    ierr = VecGetArrayRead(xin,&xv);CHKERRQ(ierr);
    ierr = MPIU_File_write_all(mfdes,(void*)xv,lsize,MPIU_SCALAR,MPI_STATUS_IGNORE);CHKERRQ(ierr);
    ierr = VecRestoreArrayRead(xin,&xv);CHKERRQ(ierr);
    ierr = PetscViewerBinaryAddMPIIOOffset(viewer,n*sizeof(PetscScalar));CHKERRQ(ierr);
  }
#endif

  ierr = PetscViewerBinaryGetInfoPointer(viewer,&file);CHKERRQ(ierr);
  if (file) {
    if (((PetscObject)xin)->prefix) {
      ierr = PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,PetscAbs(xin->map->bs));CHKERRQ(ierr);
    } else {
      ierr = PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",PetscAbs(xin->map->bs));CHKERRQ(ierr);
    }
  }
  PetscFunctionReturn(0);
}
Exemplo n.º 8
0
void PETSC_STDCALL   petscbinarywrite_(int *fd,void*p,PetscInt *n,PetscDataType *type,PetscTruth *istemp, int *__ierr ){
*__ierr = PetscBinaryWrite(*fd,p,*n,*type,*istemp);
}
Exemplo n.º 9
0
static PetscErrorCode ISView_General_Binary(IS is,PetscViewer viewer)
{
  PetscErrorCode ierr;
  PetscBool      skipHeader,useMPIIO;
  IS_General     *isa = (IS_General*) is->data;
  PetscMPIInt    rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
  PetscInt       n,N,len,j,tr[2];
  int            fdes;
  MPI_Status     status;
  PetscInt       message_count,flowcontrolcount,*values;

  PetscFunctionBegin;
  /* ierr = ISGetLayout(is,&map);CHKERRQ(ierr); */
  ierr = PetscLayoutGetLocalSize(is->map, &n);CHKERRQ(ierr);
  ierr = PetscLayoutGetSize(is->map, &N);CHKERRQ(ierr);

  tr[0] = IS_FILE_CLASSID;
  tr[1] = N;

  ierr = PetscViewerBinaryGetSkipHeader(viewer,&skipHeader);CHKERRQ(ierr);
  if (!skipHeader) {
    ierr  = PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
  }

  ierr = PetscViewerBinaryGetUseMPIIO(viewer,&useMPIIO);CHKERRQ(ierr);
#if defined(PETSC_HAVE_MPIIO)
  if (useMPIIO) {
    MPI_File       mfdes;
    MPI_Offset     off;
    PetscMPIInt    lsize;
    PetscInt       rstart;
    const PetscInt *iarray;

    ierr = PetscMPIIntCast(n,&lsize);CHKERRQ(ierr);
    ierr = PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);CHKERRQ(ierr);
    ierr = PetscViewerBinaryGetMPIIOOffset(viewer,&off);CHKERRQ(ierr);
    ierr = PetscLayoutGetRange(is->map,&rstart,NULL);CHKERRQ(ierr);
    off += rstart*(MPI_Offset)sizeof(PetscInt); /* off is MPI_Offset, not PetscMPIInt */
    ierr = MPI_File_set_view(mfdes,off,MPIU_INT,MPIU_INT,(char*)"native",MPI_INFO_NULL);CHKERRQ(ierr);
    ierr = ISGetIndices(is,&iarray);CHKERRQ(ierr);
    ierr = MPIU_File_write_all(mfdes,(void*)iarray,lsize,MPIU_INT,MPI_STATUS_IGNORE);CHKERRQ(ierr);
    ierr = ISRestoreIndices(is,&iarray);CHKERRQ(ierr);
    ierr = PetscViewerBinaryAddMPIIOOffset(viewer,N*(MPI_Offset)sizeof(PetscInt));CHKERRQ(ierr);
    PetscFunctionReturn(0);
  }
#endif

  ierr = PetscViewerBinaryGetDescriptor(viewer,&fdes);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)is),&rank);CHKERRQ(ierr);
  ierr = MPI_Comm_size(PetscObjectComm((PetscObject)is),&size);CHKERRQ(ierr);

  /* determine maximum message to arrive */
  ierr = MPI_Reduce(&n,&len,1,MPIU_INT,MPI_MAX,0,PetscObjectComm((PetscObject)is));CHKERRQ(ierr);

  ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr);
  if (!rank) {
    ierr = PetscBinaryWrite(fdes,isa->idx,n,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);

    ierr = PetscMalloc1(len,&values);CHKERRQ(ierr);
    ierr = PetscMPIIntCast(len,&mesgsize);CHKERRQ(ierr);
    /* receive and save messages */
    for (j=1; j<size; j++) {
      ierr = PetscViewerFlowControlStepMaster(viewer,j,&message_count,flowcontrolcount);CHKERRQ(ierr);
      ierr = MPI_Recv(values,mesgsize,MPIU_INT,j,tag,PetscObjectComm((PetscObject)is),&status);CHKERRQ(ierr);
      ierr = MPI_Get_count(&status,MPIU_INT,&mesglen);CHKERRQ(ierr);
      ierr = PetscBinaryWrite(fdes,values,(PetscInt)mesglen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr);
    }
    ierr = PetscViewerFlowControlEndMaster(viewer,&message_count);CHKERRQ(ierr);
    ierr = PetscFree(values);CHKERRQ(ierr);
  } else {
    ierr = PetscViewerFlowControlStepWorker(viewer,rank,&message_count);CHKERRQ(ierr);
    ierr = PetscMPIIntCast(n,&mesgsize);CHKERRQ(ierr);
    ierr = MPI_Send(isa->idx,mesgsize,MPIU_INT,0,tag,PetscObjectComm((PetscObject)is));CHKERRQ(ierr);
    ierr = PetscViewerFlowControlEndWorker(viewer,&message_count);CHKERRQ(ierr);
  }
  PetscFunctionReturn(0);
}