PETSC_EXTERN PetscErrorCode PetscDLLibraryRegister_petscsys(void) #endif { PetscErrorCode ierr; PetscFunctionBegin; /* If we got here then PETSc was properly loaded */ ierr = PetscSysInitializePackage(); CHKERRQ(ierr); ierr = PetscDrawInitializePackage(); CHKERRQ(ierr); ierr = PetscViewerInitializePackage(); CHKERRQ(ierr); ierr = PetscRandomInitializePackage(); CHKERRQ(ierr); #if defined(PETSC_USE_SINGLE_LIBRARY) ierr = PetscDLLibraryRegister_petscvec(); CHKERRQ(ierr); ierr = PetscDLLibraryRegister_petscmat(); CHKERRQ(ierr); ierr = PetscDLLibraryRegister_petscdm(); CHKERRQ(ierr); ierr = PetscDLLibraryRegister_petscksp(); CHKERRQ(ierr); ierr = PetscDLLibraryRegister_petscsnes(); CHKERRQ(ierr); ierr = PetscDLLibraryRegister_petscts(); CHKERRQ(ierr); #endif PetscFunctionReturn(0); }
/*@C PetscCommBuildTwoSided - discovers communicating ranks given one-sided information, moving constant-sized data in the process (often message lengths) Collective on MPI_Comm Input Arguments: + comm - communicator . count - number of entries to send/receive (must match on all ranks) . dtype - datatype to send/receive from each rank (must match on all ranks) . nto - number of ranks to send data to . toranks - ranks to send to (array of length nto) - todata - data to send to each rank (packed) Output Arguments: + nfrom - number of ranks receiving messages from . fromranks - ranks receiving messages from (length nfrom; caller should PetscFree()) - fromdata - packed data from each rank, each with count entries of type dtype (length nfrom, caller responsible for PetscFree()) Level: developer Options Database Keys: . -build_twosided <allreduce|ibarrier|redscatter> - algorithm to set up two-sided communication Notes: This memory-scalable interface is an alternative to calling PetscGatherNumberOfMessages() and PetscGatherMessageLengths(), possibly with a subsequent round of communication to send other constant-size data. Basic data types as well as contiguous types are supported, but non-contiguous (e.g., strided) types are not. References: . 1. - Hoefler, Siebert and Lumsdaine, The MPI_Ibarrier implementation uses the algorithm in Scalable communication protocols for dynamic sparse data exchange, 2010. .seealso: PetscGatherNumberOfMessages(), PetscGatherMessageLengths() @*/ PetscErrorCode PetscCommBuildTwoSided(MPI_Comm comm,PetscMPIInt count,MPI_Datatype dtype,PetscMPIInt nto,const PetscMPIInt *toranks,const void *todata,PetscMPIInt *nfrom,PetscMPIInt **fromranks,void *fromdata) { PetscErrorCode ierr; PetscBuildTwoSidedType buildtype = PETSC_BUILDTWOSIDED_NOTSET; PetscFunctionBegin; ierr = PetscSysInitializePackage();CHKERRQ(ierr); ierr = PetscLogEventBegin(PETSC_BuildTwoSided,0,0,0,0);CHKERRQ(ierr); ierr = PetscCommBuildTwoSidedGetType(comm,&buildtype);CHKERRQ(ierr); switch (buildtype) { case PETSC_BUILDTWOSIDED_IBARRIER: #if defined(PETSC_HAVE_MPI_IBARRIER) || defined(PETSC_HAVE_MPIX_IBARRIER) ierr = PetscCommBuildTwoSided_Ibarrier(comm,count,dtype,nto,toranks,todata,nfrom,fromranks,fromdata);CHKERRQ(ierr); #else SETERRQ(comm,PETSC_ERR_PLIB,"MPI implementation does not provide MPI_Ibarrier (part of MPI-3)"); #endif break; case PETSC_BUILDTWOSIDED_ALLREDUCE: ierr = PetscCommBuildTwoSided_Allreduce(comm,count,dtype,nto,toranks,todata,nfrom,fromranks,fromdata);CHKERRQ(ierr); break; case PETSC_BUILDTWOSIDED_REDSCATTER: #if defined(PETSC_HAVE_MPI_REDUCE_SCATTER_BLOCK) ierr = PetscCommBuildTwoSided_RedScatter(comm,count,dtype,nto,toranks,todata,nfrom,fromranks,fromdata);CHKERRQ(ierr); #else SETERRQ(comm,PETSC_ERR_PLIB,"MPI implementation does not provide MPI_Reduce_scatter_block (part of MPI-2.2)"); #endif break; default: SETERRQ(comm,PETSC_ERR_PLIB,"Unknown method for building two-sided communication"); } ierr = PetscLogEventEnd(PETSC_BuildTwoSided,0,0,0,0);CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@C PetscContainerCreate - Creates a PETSc object that has room to hold a single pointer. This allows one to attach any type of data (accessible through a pointer) with the PetscObjectCompose() function to a PetscObject. The data item itself is attached by a call to PetscContainerSetPointer(). Collective on MPI_Comm Input Parameters: . comm - MPI communicator that shares the object Output Parameters: . container - the container created Level: advanced .seealso: PetscContainerDestroy(), PetscContainerSetPointer(), PetscContainerGetPointer() @*/ PetscErrorCode PetscContainerCreate(MPI_Comm comm,PetscContainer *container) { PetscErrorCode ierr; PetscContainer contain; PetscFunctionBegin; PetscValidPointer(container,2); ierr = PetscSysInitializePackage();CHKERRQ(ierr); ierr = PetscHeaderCreate(contain,PETSC_CONTAINER_CLASSID,"PetscContainer","Container","Sys",comm,PetscContainerDestroy,NULL);CHKERRQ(ierr); *container = contain; PetscFunctionReturn(0); }
/*@ PetscConvEstCreate - Create a PetscConvEst object Collective on MPI_Comm Input Parameter: . comm - The communicator for the PetscConvEst object Output Parameter: . ce - The PetscConvEst object Level: beginner .keywords: PetscConvEst, convergence, create .seealso: PetscConvEstDestroy(), PetscConvEstGetConvRate() @*/ PetscErrorCode PetscConvEstCreate(MPI_Comm comm, PetscConvEst *ce) { PetscErrorCode ierr; PetscFunctionBegin; PetscValidPointer(ce, 2); ierr = PetscSysInitializePackage();CHKERRQ(ierr); ierr = PetscHeaderCreate(*ce, PETSC_OBJECT_CLASSID, "PetscConvEst", "ConvergenceEstimator", "SNES", comm, PetscConvEstDestroy, PetscConvEstView);CHKERRQ(ierr); (*ce)->monitor = PETSC_FALSE; (*ce)->Nr = 4; PetscFunctionReturn(0); }
/*@C PetscCommBuildTwoSidedFReq - discovers communicating ranks given one-sided information, calling user-defined functions during rendezvous, returns requests Collective on MPI_Comm Input Arguments: + comm - communicator . count - number of entries to send/receive in initial rendezvous (must match on all ranks) . dtype - datatype to send/receive from each rank (must match on all ranks) . nto - number of ranks to send data to . toranks - ranks to send to (array of length nto) . todata - data to send to each rank (packed) . ntags - number of tags needed by send/recv callbacks . send - callback invoked on sending process when ready to send primary payload . recv - callback invoked on receiving process after delivery of rendezvous message - ctx - context for callbacks Output Arguments: + nfrom - number of ranks receiving messages from . fromranks - ranks receiving messages from (length nfrom; caller should PetscFree()) . fromdata - packed data from each rank, each with count entries of type dtype (length nfrom, caller responsible for PetscFree()) . toreqs - array of nto*ntags sender requests (caller must wait on these, then PetscFree()) - fromreqs - array of nfrom*ntags receiver requests (caller must wait on these, then PetscFree()) Level: developer Notes: This memory-scalable interface is an alternative to calling PetscGatherNumberOfMessages() and PetscGatherMessageLengths(), possibly with a subsequent round of communication to send other data. Basic data types as well as contiguous types are supported, but non-contiguous (e.g., strided) types are not. References: . 1. - Hoefler, Siebert and Lumsdaine, The MPI_Ibarrier implementation uses the algorithm in Scalable communication protocols for dynamic sparse data exchange, 2010. .seealso: PetscCommBuildTwoSided(), PetscCommBuildTwoSidedF(), PetscGatherNumberOfMessages(), PetscGatherMessageLengths() @*/ PetscErrorCode PetscCommBuildTwoSidedFReq(MPI_Comm comm,PetscMPIInt count,MPI_Datatype dtype,PetscMPIInt nto,const PetscMPIInt *toranks,const void *todata, PetscMPIInt *nfrom,PetscMPIInt **fromranks,void *fromdata,PetscMPIInt ntags,MPI_Request **toreqs,MPI_Request **fromreqs, PetscErrorCode (*send)(MPI_Comm,const PetscMPIInt[],PetscMPIInt,PetscMPIInt,void*,MPI_Request[],void*), PetscErrorCode (*recv)(MPI_Comm,const PetscMPIInt[],PetscMPIInt,void*,MPI_Request[],void*),void *ctx) { PetscErrorCode ierr,(*f)(MPI_Comm,PetscMPIInt,MPI_Datatype,PetscMPIInt,const PetscMPIInt[],const void*, PetscMPIInt*,PetscMPIInt**,void*,PetscMPIInt,MPI_Request**,MPI_Request**, PetscErrorCode (*send)(MPI_Comm,const PetscMPIInt[],PetscMPIInt,PetscMPIInt,void*,MPI_Request[],void*), PetscErrorCode (*recv)(MPI_Comm,const PetscMPIInt[],PetscMPIInt,void*,MPI_Request[],void*),void *ctx); PetscBuildTwoSidedType buildtype = PETSC_BUILDTWOSIDED_NOTSET; PetscMPIInt i,size; PetscFunctionBegin; ierr = PetscSysInitializePackage();CHKERRQ(ierr); ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); for (i=0; i<nto; i++) { if (toranks[i] < 0 || size <= toranks[i]) SETERRQ3(comm,PETSC_ERR_ARG_OUTOFRANGE,"toranks[%d] %d not in comm size %d",i,toranks[i],size); } ierr = PetscLogEventBegin(PETSC_BuildTwoSidedF,0,0,0,0);CHKERRQ(ierr); ierr = PetscCommBuildTwoSidedGetType(comm,&buildtype);CHKERRQ(ierr); switch (buildtype) { case PETSC_BUILDTWOSIDED_IBARRIER: #if defined(PETSC_HAVE_MPI_IBARRIER) || defined(PETSC_HAVE_MPIX_IBARRIER) f = PetscCommBuildTwoSidedFReq_Ibarrier; #else SETERRQ(comm,PETSC_ERR_PLIB,"MPI implementation does not provide MPI_Ibarrier (part of MPI-3)"); #endif break; case PETSC_BUILDTWOSIDED_ALLREDUCE: case PETSC_BUILDTWOSIDED_REDSCATTER: f = PetscCommBuildTwoSidedFReq_Reference; break; default: SETERRQ(comm,PETSC_ERR_PLIB,"Unknown method for building two-sided communication"); } ierr = (*f)(comm,count,dtype,nto,toranks,todata,nfrom,fromranks,fromdata,ntags,toreqs,fromreqs,send,recv,ctx);CHKERRQ(ierr); ierr = PetscLogEventEnd(PETSC_BuildTwoSidedF,0,0,0,0);CHKERRQ(ierr); PetscFunctionReturn(0); }
/* PetscInitialize_DynamicLibraries - Adds the default dynamic link libraries to the search path. */ PETSC_INTERN PetscErrorCode PetscInitialize_DynamicLibraries(void) { char *libname[32]; PetscErrorCode ierr; PetscInt nmax,i; #if defined(PETSC_USE_DYNAMIC_LIBRARIES) && defined(PETSC_USE_SHARED_LIBRARIES) PetscBool preload; #endif PetscFunctionBegin; nmax = 32; ierr = PetscOptionsGetStringArray(NULL,NULL,"-dll_prepend",libname,&nmax,NULL);CHKERRQ(ierr); for (i=0; i<nmax; i++) { ierr = PetscDLLibraryPrepend(PETSC_COMM_WORLD,&PetscDLLibrariesLoaded,libname[i]);CHKERRQ(ierr); ierr = PetscFree(libname[i]);CHKERRQ(ierr); } #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) || !defined(PETSC_USE_SHARED_LIBRARIES) /* This just initializes the most basic PETSc stuff. The classes, from PetscDraw to PetscTS, are initialized the first time an XXCreate() is called. */ ierr = PetscSysInitializePackage();CHKERRQ(ierr); #else preload = PETSC_FALSE; ierr = PetscOptionsGetBool(NULL,NULL,"-dynamic_library_preload",&preload,NULL);CHKERRQ(ierr); if (preload) { PetscBool found; #if defined(PETSC_USE_SINGLE_LIBRARY) ierr = PetscLoadDynamicLibrary("",&found);CHKERRQ(ierr); if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Unable to locate PETSc dynamic library \n You cannot move the dynamic libraries!"); #else ierr = PetscLoadDynamicLibrary("sys",&found);CHKERRQ(ierr); if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Unable to locate PETSc dynamic library \n You cannot move the dynamic libraries!"); ierr = PetscLoadDynamicLibrary("vec",&found);CHKERRQ(ierr); if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Unable to locate PETSc Vec dynamic library \n You cannot move the dynamic libraries!"); ierr = PetscLoadDynamicLibrary("mat",&found);CHKERRQ(ierr); if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Unable to locate PETSc Mat dynamic library \n You cannot move the dynamic libraries!"); ierr = PetscLoadDynamicLibrary("dm",&found);CHKERRQ(ierr); if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Unable to locate PETSc DM dynamic library \n You cannot move the dynamic libraries!"); ierr = PetscLoadDynamicLibrary("ksp",&found);CHKERRQ(ierr); if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Unable to locate PETSc KSP dynamic library \n You cannot move the dynamic libraries!"); ierr = PetscLoadDynamicLibrary("snes",&found);CHKERRQ(ierr); if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Unable to locate PETSc SNES dynamic library \n You cannot move the dynamic libraries!"); ierr = PetscLoadDynamicLibrary("ts",&found);CHKERRQ(ierr); if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Unable to locate PETSc TS dynamic library \n You cannot move the dynamic libraries!"); #endif } #endif nmax = 32; ierr = PetscOptionsGetStringArray(NULL,NULL,"-dll_append",libname,&nmax,NULL);CHKERRQ(ierr); for (i=0; i<nmax; i++) { ierr = PetscDLLibraryAppend(PETSC_COMM_WORLD,&PetscDLLibrariesLoaded,libname[i]);CHKERRQ(ierr); ierr = PetscFree(libname[i]);CHKERRQ(ierr); } #if defined(PETSC_HAVE_THREADSAFETY) /* These must be done here because it is not safe for individual threads to call these initialize routines */ ierr = AOInitializePackage();CHKERRQ(ierr); ierr = PetscSFInitializePackage();CHKERRQ(ierr); #if !defined(PETSC_USE_COMPLEX) ierr = CharacteristicInitializePackage();CHKERRQ(ierr); #endif ierr = ISInitializePackage();CHKERRQ(ierr); ierr = VecInitializePackage();CHKERRQ(ierr); ierr = MatInitializePackage();CHKERRQ(ierr); ierr = DMInitializePackage();CHKERRQ(ierr); ierr = PCInitializePackage();CHKERRQ(ierr); ierr = KSPInitializePackage();CHKERRQ(ierr); ierr = SNESInitializePackage();CHKERRQ(ierr); ierr = TSInitializePackage();CHKERRQ(ierr); ierr = PetscCommDuplicate(PETSC_COMM_SELF,&PETSC_COMM_SELF_INNER,NULL);CHKERRQ(ierr); ierr = PetscCommDuplicate(PETSC_COMM_WORLD,&PETSC_COMM_WORLD_INNER,NULL);CHKERRQ(ierr); #endif PetscFunctionReturn(0); }