PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) { PetscErrorCode ierr; PetscFunctionBegin; options->debug = 0; options->dim = 2; options->interpolate = PETSC_FALSE; options->refinementLimit = 0.0; ierr = MPI_Comm_size(comm, &options->numProcs);CHKERRQ(ierr); ierr = MPI_Comm_rank(comm, &options->rank);CHKERRQ(ierr); ierr = PetscOptionsBegin(comm, "", "Mesh Distribution Options", "DMMESH");CHKERRQ(ierr); ierr = PetscOptionsInt("-debug", "The debugging level", "ex1.c", options->debug, &options->debug, PETSC_NULL);CHKERRQ(ierr); ierr = PetscOptionsInt("-dim", "The topological mesh dimension", "ex1.c", options->dim, &options->dim, PETSC_NULL);CHKERRQ(ierr); ierr = PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex1.c", options->interpolate, &options->interpolate, PETSC_NULL);CHKERRQ(ierr); ierr = PetscOptionsReal("-refinement_limit", "The largest allowable cell volume", "ex1.c", options->refinementLimit, &options->refinementLimit, PETSC_NULL);CHKERRQ(ierr); ierr = PetscStrcpy(options->filename, "");CHKERRQ(ierr); ierr = PetscOptionsString("-filename", "The input filename", "ex1.c", options->filename, options->filename, 2048, PETSC_NULL);CHKERRQ(ierr); ierr = PetscStrcpy(options->partitioner, "chaco");CHKERRQ(ierr); ierr = PetscOptionsString("-partitioner", "The graph partitioner", "ex1.c", options->partitioner, options->partitioner, 2048, PETSC_NULL);CHKERRQ(ierr); ierr = PetscOptionsEnd(); ierr = PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent);CHKERRQ(ierr); PetscFunctionReturn(0); };
EXTERN_C_BEGIN #undef __FUNCT__ #define __FUNCT__ "MatPartitioningCreate_Party" PetscErrorCode PETSCMAT_DLLEXPORT MatPartitioningCreate_Party(MatPartitioning part) { PetscErrorCode ierr; MatPartitioning_Party *party; PetscFunctionBegin; ierr = PetscNewLog(part,MatPartitioning_Party, &party);CHKERRQ(ierr); part->data = (void*) party; PetscStrcpy(party->global_method, "gcf,gbf"); PetscStrcpy(party->local_method, "kl"); PetscStrcpy(party->redm, "lam"); PetscStrcpy(party->redo, "w3"); party->nbvtxcoarsed = 200; party->rec = 1; party->output = 1; party->mesg_log = NULL; part->ops->apply = MatPartitioningApply_Party; part->ops->view = MatPartitioningView_Party; part->ops->destroy = MatPartitioningDestroy_Party; part->ops->setfromoptions = MatPartitioningSetFromOptions_Party; PetscFunctionReturn(0); }
PETSC_EXTERN PetscErrorCode MatPartitioningCreate_Party(MatPartitioning part) { PetscErrorCode ierr; MatPartitioning_Party *party; PetscFunctionBegin; ierr = PetscNewLog(part,&party);CHKERRQ(ierr); part->data = (void*)party; ierr = PetscStrcpy(party->global,"gcf,gbf");CHKERRQ(ierr); ierr = PetscStrcpy(party->local,"kl");CHKERRQ(ierr); party->redm = PETSC_TRUE; party->redo = PETSC_TRUE; party->recursive = PETSC_TRUE; party->verbose = PETSC_FALSE; party->nbvtxcoarsed = 200; part->ops->apply = MatPartitioningApply_Party; part->ops->view = MatPartitioningView_Party; part->ops->destroy = MatPartitioningDestroy_Party; part->ops->setfromoptions = MatPartitioningSetFromOptions_Party; ierr = PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetGlobal_C",MatPartitioningPartySetGlobal_Party);CHKERRQ(ierr); ierr = PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetLocal_C",MatPartitioningPartySetLocal_Party);CHKERRQ(ierr); ierr = PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetCoarseLevel_C",MatPartitioningPartySetCoarseLevel_Party);CHKERRQ(ierr); ierr = PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetMatchOptimization_C",MatPartitioningPartySetMatchOptimization_Party);CHKERRQ(ierr); ierr = PetscObjectComposeFunction((PetscObject)part,"MatPartitioningPartySetBipart_C",MatPartitioningPartySetBipart_Party);CHKERRQ(ierr); PetscFunctionReturn(0); }
EXTERN_C_BEGIN #undef __FUNCT__ #define __FUNCT__ "MatPartitioningCreate_Scotch" PetscErrorCode PETSCMAT_DLLEXPORT MatPartitioningCreate_Scotch(MatPartitioning part) { PetscErrorCode ierr; MatPartitioning_Scotch *scotch; PetscFunctionBegin; SETERRQ(PETSC_ERR_SUP,"Sorry, the PETSc interface to scotch has not been updated to the latest Scotch version"); ierr = PetscNewLog(part,MatPartitioning_Scotch, &scotch);CHKERRQ(ierr); part->data = (void*) scotch; scotch->map = 0; scotch->global_method = MP_SCOTCH_GR_GPS; scotch->local_method = MP_SCOTCH_KERNIGHAN_LIN; PetscStrcpy(scotch->arch, "archgraph.src"); scotch->nbvtxcoarsed = 200; PetscStrcpy(scotch->strategy, ""); scotch->multilevel = 0; scotch->mesg_log = NULL; PetscStrcpy(scotch->host_list, "host_list"); part->ops->apply = MatPartitioningApply_Scotch; part->ops->view = MatPartitioningView_Scotch; part->ops->destroy = MatPartitioningDestroy_Scotch; part->ops->setfromoptions = MatPartitioningSetFromOptions_Scotch; PetscFunctionReturn(0); }
/** * output_singular * ------ * Output the left and right singular vectors. */ PetscErrorCode output_singular(char *output_name, const Vec u, const Vec v) { PetscFunctionBegin; PetscErrorCode ierr; char output_name_prefixed[PETSC_MAX_PATH_LEN]; //const char *prefix = "/out/"; const char *u_extension = ".U"; const char *v_extension = ".V"; //ierr = PetscStrcpy(output_name_prefixed, getenv("FD3D_ROOT")); CHKERRQ(ierr); //ierr = PetscStrcat(output_name_prefixed, prefix); CHKERRQ(ierr); //ierr = PetscStrcat(output_name_prefixed, output_name); CHKERRQ(ierr); ierr = PetscStrcpy(output_name_prefixed, output_name); CHKERRQ(ierr); char u_file[PETSC_MAX_PATH_LEN]; char v_file[PETSC_MAX_PATH_LEN]; ierr = PetscStrcpy(u_file, output_name_prefixed); CHKERRQ(ierr); ierr = PetscStrcat(u_file, u_extension); CHKERRQ(ierr); ierr = PetscStrcpy(v_file, output_name_prefixed); CHKERRQ(ierr); ierr = PetscStrcat(v_file, v_extension); CHKERRQ(ierr); PetscViewer viewer; //viewer = PETSC_VIEWER_STDOUT_WORLD; //ierr = PetscViewerHDF5Open(PETSC_COMM_WORLD, h_file, FILE_MODE_WRITE, &viewer); CHKERRQ(ierr); /** Write the left singular vector u. */ ierr = PetscViewerCreate(PETSC_COMM_WORLD, &viewer); CHKERRQ(ierr); ierr = PetscViewerSetType(viewer, PETSCVIEWERBINARY); CHKERRQ(ierr); ierr = PetscViewerFileSetMode(viewer, FILE_MODE_WRITE); CHKERRQ(ierr); ierr = PetscViewerBinarySkipInfo(viewer); CHKERRQ(ierr); ierr = PetscViewerFileSetName(viewer, u_file); CHKERRQ(ierr); /* ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD, e_file, FILE_MODE_WRITE, &viewer); CHKERRQ(ierr); */ ierr = VecView(u, viewer); CHKERRQ(ierr); /** Write the right singular vector v. */ ierr = PetscViewerDestroy(&viewer); CHKERRQ(ierr); ierr = PetscViewerCreate(PETSC_COMM_WORLD, &viewer); CHKERRQ(ierr); ierr = PetscViewerSetType(viewer, PETSCVIEWERBINARY); CHKERRQ(ierr); ierr = PetscViewerFileSetMode(viewer, FILE_MODE_WRITE); CHKERRQ(ierr); ierr = PetscViewerBinarySkipInfo(viewer); CHKERRQ(ierr); ierr = PetscViewerFileSetName(viewer, v_file); CHKERRQ(ierr); /* ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD, h_file, FILE_MODE_WRITE, &viewer); CHKERRQ(ierr); */ ierr = VecView(v, viewer); CHKERRQ(ierr); ierr = PetscViewerDestroy(&viewer); CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@C PetscBoxUpload - Loads a file to the Box Drive This routine has not yet been written; it is just copied from Google Drive Not collective, only the first process in the MPI_Comm uploads the file Input Parameters: + comm - MPI communicator . access_token - obtained with PetscBoxRefresh(), pass NULL to have PETSc generate one - filename - file to upload; if you upload multiple times it will have different names each time on Box Drive Options Database: . -box_refresh_token XXX Usage Patterns: With PETSc option -box_refresh_token XXX given PetscBoxUpload(comm,NULL,filename); will upload file with no user interaction Without PETSc option -box_refresh_token XXX given PetscBoxUpload(comm,NULL,filename); for first use will prompt user to authorize access to Box Drive with their processor With PETSc option -box_refresh_token XXX given PetscBoxRefresh(comm,NULL,access_token,sizeof(access_token)); PetscBoxUpload(comm,access_token,filename); With refresh token entered in some way by the user PetscBoxRefresh(comm,refresh_token,access_token,sizeof(access_token)); PetscBoxUpload(comm,access_token,filename); PetscBoxAuthorize(comm,access_token,refresh_token,sizeof(access_token)); PetscBoxUpload(comm,access_token,filename); Level: intermediate .seealso: PetscURLShorten(), PetscBoxAuthorize(), PetscBoxRefresh() @*/ PetscErrorCode PetscBoxUpload(MPI_Comm comm,const char access_token[],const char filename[]) { SSL_CTX *ctx; SSL *ssl; int sock; PetscErrorCode ierr; char head[1024],buff[8*1024],*body,*title; PetscMPIInt rank; struct stat sb; size_t len,blen,rd; FILE *fd; PetscFunctionBegin; ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); if (!rank) { ierr = PetscStrcpy(head,"Authorization: Bearer ");CHKERRQ(ierr); ierr = PetscStrcat(head,access_token);CHKERRQ(ierr); ierr = PetscStrcat(head,"\r\n");CHKERRQ(ierr); ierr = PetscStrcat(head,"uploadType: multipart\r\n");CHKERRQ(ierr); ierr = stat(filename,&sb); if (ierr) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Unable to stat file: %s",filename); len = 1024 + sb.st_size; ierr = PetscMalloc1(len,&body);CHKERRQ(ierr); ierr = PetscStrcpy(body,"--foo_bar_baz\r\n" "Content-Type: application/json\r\n\r\n" "{");CHKERRQ(ierr); ierr = PetscPushJSONValue(body,"title",filename,len);CHKERRQ(ierr); ierr = PetscStrcat(body,",");CHKERRQ(ierr); ierr = PetscPushJSONValue(body,"mimeType","text.html",len);CHKERRQ(ierr); ierr = PetscStrcat(body,",");CHKERRQ(ierr); ierr = PetscPushJSONValue(body,"description","a file",len);CHKERRQ(ierr); ierr = PetscStrcat(body, "}\r\n\r\n" "--foo_bar_baz\r\n" "Content-Type: text/html\r\n\r\n");CHKERRQ(ierr); ierr = PetscStrlen(body,&blen);CHKERRQ(ierr); fd = fopen (filename, "r"); if (!fd) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Unable to open file: %s",filename); rd = fread (body+blen, sizeof (unsigned char), sb.st_size, fd); if (rd != (size_t)sb.st_size) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Unable to read entire file: %s %d %d",filename,(int)rd,(int)sb.st_size); fclose(fd); body[blen + rd] = 0; ierr = PetscStrcat(body,"\r\n\r\n" "--foo_bar_baz\r\n");CHKERRQ(ierr); ierr = PetscSSLInitializeContext(&ctx);CHKERRQ(ierr); ierr = PetscHTTPSConnect("www.boxapis.com",443,ctx,&sock,&ssl);CHKERRQ(ierr); ierr = PetscHTTPSRequest("POST","www.boxapis.com/upload/drive/v2/files/",head,"multipart/related; boundary=\"foo_bar_baz\"",body,ssl,buff,sizeof(buff));CHKERRQ(ierr); ierr = PetscFree(body);CHKERRQ(ierr); ierr = PetscSSLDestroyContext(ctx);CHKERRQ(ierr); close(sock); ierr = PetscStrstr(buff,"\"title\"",&title);CHKERRQ(ierr); if (!title) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Upload of file %s failed",filename); } PetscFunctionReturn(0); }
/*@C PetscGetFileFromPath - Finds a file from a name and a path string. A default can be provided. Not Collective Input Parameters: + path - A string containing "directory:directory:..." (without the quotes, of course). As a special case, if the name is a single FILE, that file is used. . defname - default name . name - file name to use with the directories from env - mode - file mode desired (usually r for readable, w for writable, or e for executable) Output Parameter: . fname - qualified file name Level: developer Developer Notes: Wrongly returns 1 as an error code sometimes. Maybe should have additional flag argument indicating if it found it. Most arguments likely should be const. Concepts: files^finding in path Concepts: path^searching for file @*/ PetscErrorCode PetscGetFileFromPath(char *path,char *defname,char *name,char *fname,char mode) { char *p,*cdir,trial[PETSC_MAX_PATH_LEN],*senv,*env; size_t ln; PetscErrorCode ierr; PetscBool flg; PetscFunctionBegin; /* Setup default */ ierr = PetscGetFullPath(defname,fname,PETSC_MAX_PATH_LEN);CHKERRQ(ierr); if (path) { /* Check to see if the path is a valid regular FILE */ ierr = PetscTestFile(path,mode,&flg);CHKERRQ(ierr); if (flg) { ierr = PetscStrcpy(fname,path);CHKERRQ(ierr); PetscFunctionReturn(1); } /* Make a local copy of path and mangle it */ ierr = PetscStrallocpy(path,&senv);CHKERRQ(ierr); env = senv; while (env) { /* Find next directory in env */ cdir = env; ierr = PetscStrchr(env,PETSC_PATH_SEPARATOR,&p);CHKERRQ(ierr); if (p) { *p = 0; env = p + 1; } else env = 0; /* Form trial file name */ ierr = PetscStrcpy(trial,cdir);CHKERRQ(ierr); ierr = PetscStrlen(trial,&ln);CHKERRQ(ierr); if (trial[ln-1] != '/') trial[ln++] = '/'; ierr = PetscStrcpy(trial + ln,name);CHKERRQ(ierr); ierr = PetscTestFile(path,mode,&flg);CHKERRQ(ierr); if (flg) { /* need PetscGetFullPath rather then copy in case path has . in it */ ierr = PetscGetFullPath(trial,fname,PETSC_MAX_PATH_LEN);CHKERRQ(ierr); ierr = PetscFree(senv);CHKERRQ(ierr); PetscFunctionReturn(1); } } ierr = PetscFree(senv);CHKERRQ(ierr); } ierr = PetscTestFile(path,mode,&flg);CHKERRQ(ierr); if (flg) PetscFunctionReturn(1); PetscFunctionReturn(0); }
/*@ MatPartitioningPartySetMatchOptimization - Activate matching optimization for graph reduction Input Parameter: . part - the partitioning context . opt - activate optimization Level: advanced @*/ PetscErrorCode PETSCMAT_DLLEXPORT MatPartitioningPartySetMatchOptimization(MatPartitioning part, PetscTruth opt) { MatPartitioning_Party *party = (MatPartitioning_Party *) part->data; PetscFunctionBegin; if (opt) PetscStrcpy(party->redo, "w3"); else PetscStrcpy(party->redo, ""); PetscFunctionReturn(0); }
EXTERN_C_BEGIN #undef __FUNCT__ #define __FUNCT__ "PetscThreadCommCreate_OpenMP" PetscErrorCode PetscThreadCommCreate_OpenMP(PetscThreadComm tcomm) { PetscErrorCode ierr; PetscFunctionBegin; ierr = PetscStrcpy(tcomm->type,OPENMP);CHKERRQ(ierr); tcomm->ops->runkernel = PetscThreadCommRunKernel_OpenMP; tcomm->ops->getrank = PetscThreadCommGetRank_OpenMP; #pragma omp parallel num_threads(tcomm->nworkThreads) shared(tcomm) { #if defined(PETSC_HAVE_SCHED_CPU_SET_T) cpu_set_t mset; PetscInt ncores, icorr,trank; PetscGetNCores(&ncores); CPU_ZERO(&mset); trank = omp_get_thread_num(); icorr = tcomm->affinities[trank]%ncores; CPU_SET(icorr,&mset); sched_setaffinity(0,sizeof(cpu_set_t),&mset); #endif } PetscFunctionReturn(0); }
static PetscErrorCode PCSetApType_PCD_Feelpp(PC pc, const char * type ) { PetscErrorCode ierr; PC_PCD_Feelpp *pcpcd = (PC_PCD_Feelpp*)pc->data; PetscStrcpy(pcpcd->pcdApType, type); PetscFunctionReturn(0); }
PetscErrorCode PetscLs(MPI_Comm comm,const char libname[],char found[],size_t tlen,PetscBool *flg) { PetscErrorCode ierr; size_t len; char *f,program[PETSC_MAX_PATH_LEN]; FILE *fp; PetscFunctionBegin; ierr = PetscStrcpy(program,"ls ");CHKERRQ(ierr); ierr = PetscStrcat(program,libname);CHKERRQ(ierr); #if defined(PETSC_HAVE_POPEN) ierr = PetscPOpen(comm,PETSC_NULL,program,"r",&fp);CHKERRQ(ierr); #else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"Cannot run external programs on this machine"); #endif f = fgets(found,tlen,fp); if (f) *flg = PETSC_TRUE; else *flg = PETSC_FALSE; while (f) { ierr = PetscStrlen(found,&len);CHKERRQ(ierr); f = fgets(found+len,tlen-len,fp); } if (*flg) {ierr = PetscInfo2(0,"ls on %s gives \n%s\n",libname,found);CHKERRQ(ierr);} #if defined(PETSC_HAVE_POPEN) ierr = PetscPClose(comm,fp);CHKERRQ(ierr); #else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"Cannot run external programs on this machine"); #endif PetscFunctionReturn(0); }
/*@C PetscFOpen - Has the first process in the communicator open a file; all others do nothing. Logically Collective on MPI_Comm Input Parameters: + comm - the communicator . name - the filename - mode - the mode for fopen(), usually "w" Output Parameter: . fp - the file pointer Level: developer Notes: NULL (0), "stderr" or "stdout" may be passed in as the filename Fortran Note: This routine is not supported in Fortran. Concepts: opening ASCII file Concepts: files^opening ASCII .seealso: PetscFClose(), PetscSynchronizedFGets(), PetscSynchronizedPrintf(), PetscSynchronizedFlush(), PetscFPrintf() @*/ PetscErrorCode PetscFOpen(MPI_Comm comm,const char name[],const char mode[],FILE **fp) { PetscErrorCode ierr; PetscMPIInt rank; FILE *fd; char fname[PETSC_MAX_PATH_LEN],tname[PETSC_MAX_PATH_LEN]; PetscFunctionBegin; ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); if (!rank) { PetscBool isstdout,isstderr; ierr = PetscStrcmp(name,"stdout",&isstdout);CHKERRQ(ierr); ierr = PetscStrcmp(name,"stderr",&isstderr);CHKERRQ(ierr); if (isstdout || !name) fd = PETSC_STDOUT; else if (isstderr) fd = PETSC_STDERR; else { PetscBool devnull; ierr = PetscStrreplace(PETSC_COMM_SELF,name,tname,PETSC_MAX_PATH_LEN);CHKERRQ(ierr); ierr = PetscFixFilename(tname,fname);CHKERRQ(ierr); ierr = PetscStrbeginswith(fname,"/dev/null",&devnull);CHKERRQ(ierr); if (devnull) { ierr = PetscStrcpy(fname,"/dev/null");CHKERRQ(ierr); } ierr = PetscInfo1(0,"Opening file %s\n",fname);CHKERRQ(ierr); fd = fopen(fname,mode); if (!fd) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Unable to open file %s\n",fname); } } else fd = 0; *fp = fd; PetscFunctionReturn(0); }
/*@C PetscURLShorten - Uses Google's service to get a short url for a long url Input Parameters: + url - long URL you want shortened - lenshorturl - length of buffer to contain short URL Output Parameter: . shorturl - the shortened URL .seealso: PetscGoogleDriveRefresh(), PetscGoogleDriveUpload(), PetscGoogleDriveAuthorize() @*/ PetscErrorCode PetscURLShorten(const char url[],char shorturl[],size_t lenshorturl) { SSL_CTX *ctx; SSL *ssl; int sock; PetscErrorCode ierr; char buff[1024],body[512]; PetscBool found; PetscFunctionBegin; ierr = PetscSSLInitializeContext(&ctx); CHKERRQ(ierr); ierr = PetscHTTPSConnect("www.googleapis.com",443,ctx,&sock,&ssl); CHKERRQ(ierr); ierr = PetscStrcpy(body,"{"); CHKERRQ(ierr); ierr = PetscPushJSONValue(body,"longUrl",url,sizeof(body)-2); CHKERRQ(ierr); ierr = PetscStrcat(body,"}"); CHKERRQ(ierr); ierr = PetscHTTPSRequest("POST","www.googleapis.com/urlshortener/v1/url",NULL,"application/json",body,ssl,buff,sizeof(buff)); CHKERRQ(ierr); ierr = PetscSSLDestroyContext(ctx); CHKERRQ(ierr); close(sock); ierr = PetscPullJSONValue(buff,"id",shorturl,lenshorturl,&found); CHKERRQ(ierr); if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_LIB,"Google drive did not return short URL"); PetscFunctionReturn(0); }
PETSC_EXTERN PetscErrorCode PCCreate_PCD_Feelpp(PC pc) { PC_PCD_Feelpp *pcpcd; PetscErrorCode ierr; PetscFunctionBegin; #if PETSC_VERSION_GREATER_OR_EQUAL_THAN( 3,5,0 ) ierr = PetscNewLog(pc,&pcpcd);CHKERRQ(ierr); #else ierr = PetscNewLog(pc,PC_PCD_Feelpp,&pcpcd);CHKERRQ(ierr); #endif pc->data = (void*)pcpcd; pc->ops->apply = PCApply_PCD_Feelpp; pc->ops->applytranspose = 0; pc->ops->setup = PCSetUp_PCD_Feelpp; pc->ops->reset = PCReset_PCD_Feelpp; pc->ops->destroy = PCDestroy_PCD_Feelpp; pc->ops->view = PCView_PCD_Feelpp; pc->ops->applyrichardson = 0; pcpcd->pcdOrder = 1; PetscStrcpy(pcpcd->pcdApType, "Laplacian"); pcpcd->MvDiag = NULL; pcpcd->matApBTBt = NULL; PetscFunctionReturn(0); }
static PetscErrorCode PetscViewerFileClose_Binary(PetscViewer v) { PetscViewer_Binary *vbinary = (PetscViewer_Binary*)v->data; PetscErrorCode ierr; PetscMPIInt rank; int err; PetscFunctionBegin; ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)v),&rank);CHKERRQ(ierr); if ((!rank || vbinary->btype == FILE_MODE_READ) && vbinary->fdes) { close(vbinary->fdes); if (!rank && vbinary->storecompressed) { char par[PETSC_MAX_PATH_LEN],buf[PETSC_MAX_PATH_LEN]; FILE *fp; /* compress the file */ ierr = PetscStrcpy(par,"gzip -f ");CHKERRQ(ierr); ierr = PetscStrcat(par,vbinary->filename);CHKERRQ(ierr); #if defined(PETSC_HAVE_POPEN) ierr = PetscPOpen(PETSC_COMM_SELF,NULL,par,"r",&fp);CHKERRQ(ierr); if (fgets(buf,1024,fp)) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error from command %s\n%s",par,buf); ierr = PetscPClose(PETSC_COMM_SELF,fp,NULL);CHKERRQ(ierr); #else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"Cannot run external programs on this machine"); #endif } } if (vbinary->fdes_info) { err = fclose(vbinary->fdes_info); if (err) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"fclose() failed on file"); } ierr = PetscFree(vbinary->filename);CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@C PETSC_VIEWER_BINARY_ - Creates a binary PetscViewer shared by all processors in a communicator. Collective on MPI_Comm Input Parameter: . comm - the MPI communicator to share the binary PetscViewer Level: intermediate Options Database Keys: + -viewer_binary_filename <name> . -viewer_binary_skip_info - -viewer_binary_skip_options Environmental variables: - PETSC_VIEWER_BINARY_FILENAME Notes: Unlike almost all other PETSc routines, PETSC_VIEWER_BINARY_ does not return an error code. The binary PetscViewer is usually used in the form $ XXXView(XXX object,PETSC_VIEWER_BINARY_(comm)); .seealso: PETSC_VIEWER_BINARY_WORLD, PETSC_VIEWER_BINARY_SELF, PetscViewerBinaryOpen(), PetscViewerCreate(), PetscViewerDestroy() @*/ PetscViewer PETSC_VIEWER_BINARY_(MPI_Comm comm) { PetscErrorCode ierr; PetscBool flg; PetscViewer viewer; char fname[PETSC_MAX_PATH_LEN]; MPI_Comm ncomm; PetscFunctionBegin; ierr = PetscCommDuplicate(comm,&ncomm,NULL);if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_BINARY_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} if (Petsc_Viewer_Binary_keyval == MPI_KEYVAL_INVALID) { ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Viewer_Binary_keyval,0); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_BINARY_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} } ierr = MPI_Attr_get(ncomm,Petsc_Viewer_Binary_keyval,(void**)&viewer,(int*)&flg); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_BINARY_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} if (!flg) { /* PetscViewer not yet created */ ierr = PetscOptionsGetenv(ncomm,"PETSC_VIEWER_BINARY_FILENAME",fname,PETSC_MAX_PATH_LEN,&flg); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_BINARY_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} if (!flg) { ierr = PetscStrcpy(fname,"binaryoutput"); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_BINARY_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} } ierr = PetscViewerBinaryOpen(ncomm,fname,FILE_MODE_WRITE,&viewer); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_BINARY_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} ierr = PetscObjectRegisterDestroy((PetscObject)viewer); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_BINARY_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} ierr = MPI_Attr_put(ncomm,Petsc_Viewer_Binary_keyval,(void*)viewer); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_BINARY_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} } ierr = PetscCommDestroy(&ncomm); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_BINARY_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} PetscFunctionReturn(viewer); }
PetscErrorCode PetscViewerFileSetName_MPIIO(PetscViewer viewer,const char name[]) { PetscMPIInt rank; PetscErrorCode ierr; size_t len; PetscViewer_Binary *vbinary = (PetscViewer_Binary*)viewer->data; char *gz; PetscBool found; PetscFileMode type = vbinary->btype; PetscFunctionBegin; if (type == (PetscFileMode) -1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER,"Must call PetscViewerFileSetMode() before PetscViewerFileSetName()"); ierr = PetscViewerFileClose_MPIIO(viewer);CHKERRQ(ierr); ierr = PetscOptionsGetBool(((PetscObject)viewer)->prefix,"-viewer_binary_skip_info",&vbinary->skipinfo,NULL);CHKERRQ(ierr); ierr = PetscOptionsGetBool(((PetscObject)viewer)->prefix,"-viewer_binary_skip_options",&vbinary->skipoptions,NULL);CHKERRQ(ierr); ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)viewer),&rank);CHKERRQ(ierr); ierr = PetscStrallocpy(name,&vbinary->filename);CHKERRQ(ierr); vbinary->storecompressed = PETSC_FALSE; /* only first processor opens file if writeable */ if (type == FILE_MODE_READ) { MPI_File_open(PetscObjectComm((PetscObject)viewer),vbinary->filename,MPI_MODE_RDONLY,MPI_INFO_NULL,&vbinary->mfdes);CHKERRQ(ierr); } else if (type == FILE_MODE_WRITE) { MPI_File_open(PetscObjectComm((PetscObject)viewer),vbinary->filename,MPI_MODE_WRONLY | MPI_MODE_CREATE,MPI_INFO_NULL,&vbinary->mfdes);CHKERRQ(ierr); } /* try to open info file: all processors open this file if read only Below is identical code to the code for Binary above, should be put in seperate routine */ if (!vbinary->skipinfo && (!rank || type == FILE_MODE_READ)) { char infoname[PETSC_MAX_PATH_LEN],iname[PETSC_MAX_PATH_LEN]; ierr = PetscStrcpy(infoname,name);CHKERRQ(ierr); /* remove .gz if it ends library name */ ierr = PetscStrstr(infoname,".gz",&gz);CHKERRQ(ierr); if (gz) { ierr = PetscStrlen(gz,&len);CHKERRQ(ierr); if (len == 3) *gz = 0; } ierr = PetscStrcat(infoname,".info");CHKERRQ(ierr); ierr = PetscFixFilename(infoname,iname);CHKERRQ(ierr); if (type == FILE_MODE_READ) { ierr = PetscFileRetrieve(PetscObjectComm((PetscObject)viewer),iname,infoname,PETSC_MAX_PATH_LEN,&found);CHKERRQ(ierr); ierr = PetscOptionsInsertFile(PetscObjectComm((PetscObject)viewer),infoname,PETSC_FALSE);CHKERRQ(ierr); } else { vbinary->fdes_info = fopen(infoname,"w"); if (!vbinary->fdes_info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_FILE_OPEN,"Cannot open .info file %s for writing",infoname); } } #if defined(PETSC_USE_LOG) PetscLogObjectState((PetscObject)viewer,"File: %s",name); #endif PetscFunctionReturn(0); }
static PetscErrorCode PetscViewerFileClose_ASCII(PetscViewer viewer) { PetscErrorCode ierr; PetscMPIInt rank; PetscViewer_ASCII *vascii = (PetscViewer_ASCII *)viewer->data; int err; PetscFunctionBegin; ierr = MPI_Comm_rank(((PetscObject)viewer)->comm,&rank);CHKERRQ(ierr); if (!rank && vascii->fd != stderr && vascii->fd != PETSC_STDOUT) { if (vascii->fd && vascii->closefile) { err = fclose(vascii->fd); if (err) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SYS,"fclose() failed on file"); } if (vascii->storecompressed) { char par[PETSC_MAX_PATH_LEN],buf[PETSC_MAX_PATH_LEN]; FILE *fp; ierr = PetscStrcpy(par,"gzip ");CHKERRQ(ierr); ierr = PetscStrcat(par,vascii->filename);CHKERRQ(ierr); #if defined(PETSC_HAVE_POPEN) ierr = PetscPOpen(PETSC_COMM_SELF,PETSC_NULL,par,"r",&fp);CHKERRQ(ierr); if (fgets(buf,1024,fp)) { SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Error from compression command %s\n%s",par,buf); } ierr = PetscPClose(PETSC_COMM_SELF,fp);CHKERRQ(ierr); #else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP_SYS,"Cannot run external programs on this machine"); #endif } } ierr = PetscFree(vascii->filename);CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@C PetscDrawAppendTitle - Appends to the title of a PetscDraw context. Not collective (any processor or all can call this) Input Parameters: + draw - the graphics context - title - the title Note: A copy of the string is made, so you may destroy the title string after calling this routine. Level: advanced .seealso: PetscDrawSetTitle(), PetscDrawGetTitle() @*/ PetscErrorCode PetscDrawAppendTitle(PetscDraw draw,const char title[]) { PetscErrorCode ierr; size_t len1,len2,len; char *newtitle; PetscFunctionBegin; PetscValidHeaderSpecific(draw,PETSC_DRAW_CLASSID,1); if (!title) PetscFunctionReturn(0); if (draw->title) { ierr = PetscStrlen(title,&len1);CHKERRQ(ierr); ierr = PetscStrlen(draw->title,&len2);CHKERRQ(ierr); len = len1 + len2; ierr = PetscMalloc1((len + 1),&newtitle);CHKERRQ(ierr); ierr = PetscStrcpy(newtitle,draw->title);CHKERRQ(ierr); ierr = PetscStrcat(newtitle,title);CHKERRQ(ierr); ierr = PetscFree(draw->title);CHKERRQ(ierr); draw->title = newtitle; } else { ierr = PetscStrallocpy(title,&draw->title);CHKERRQ(ierr); } if (draw->ops->settitle) { ierr = (*draw->ops->settitle)(draw,draw->title);CHKERRQ(ierr); } PetscFunctionReturn(0); }
int main(int argc, char **argv) { PetscErrorCode ierr; Vec U,cv,eta; DM da,da2; PetscViewer viewer,view_vtk_cv,view_vtk_eta; char filename[PETSC_MAX_PATH_LEN],cv_filename[PETSC_MAX_PATH_LEN],eta_filename[PETSC_MAX_PATH_LEN]; PetscBool flg,sflg = PETSC_FALSE; PetscInt i,n=10000; PetscInt seed; PetscInitialize(&argc,&argv, (char*)0, help); ierr = PetscOptionsSetValue("-viewer_binary_skip_info","true");CHKERRQ(ierr); ierr = PetscOptionsGetString(PETSC_NULL,"-f",filename,PETSC_MAX_PATH_LEN,&flg);CHKERRQ(ierr); if (!flg) { ierr = PetscOptionsGetInt(PETSC_NULL,"-random_seed",&seed,&sflg);CHKERRQ(ierr); if (!sflg) { ierr = PetscStrcpy(filename,"ex61.data");CHKERRQ(ierr); } else { sprintf(filename,"ex61.data.%d",seed); } } ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,filename,FILE_MODE_READ,&viewer);CHKERRQ(ierr); /* Get physics and time parameters */ ierr = DMCreate(PETSC_COMM_WORLD,&da);CHKERRQ(ierr); ierr = DMLoad(da,viewer);CHKERRQ(ierr); ierr = DMCreateGlobalVector(da,&U);CHKERRQ(ierr); ierr = DMDAGetReducedDA(da,1,&da2);CHKERRQ(ierr); ierr = DMCreateGlobalVector(da2,&cv);CHKERRQ(ierr); ierr = DMCreateGlobalVector(da2,&eta);CHKERRQ(ierr); for (i=0; i<n; i++) { /* when this fails it simply means the file is finished */ ierr = VecLoad(U,viewer);CHKERRQ(ierr); ierr = VecStrideGather(U,1,cv,INSERT_VALUES);CHKERRQ(ierr); ierr = VecStrideGather(U,4,eta,INSERT_VALUES);CHKERRQ(ierr); sprintf(cv_filename,"%s_cv_%d.vtk",filename,i); sprintf(eta_filename,"%s_eta_%d.vtk",filename,i); ierr = PetscViewerASCIIOpen(PETSC_COMM_WORLD,cv_filename,&view_vtk_cv);CHKERRQ(ierr); ierr = PetscViewerASCIIOpen(PETSC_COMM_WORLD,eta_filename,&view_vtk_eta);CHKERRQ(ierr); ierr = PetscViewerSetFormat(view_vtk_cv, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); ierr = PetscViewerSetFormat(view_vtk_eta, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr); ierr = DMView(da2,view_vtk_cv);CHKERRQ(ierr); ierr = DMView(da2,view_vtk_eta);CHKERRQ(ierr); ierr = VecView(cv,view_vtk_cv);CHKERRQ(ierr); ierr = VecView(eta,view_vtk_eta);CHKERRQ(ierr); ierr = PetscViewerDestroy(&view_vtk_cv);CHKERRQ(ierr); ierr = PetscViewerDestroy(&view_vtk_eta);CHKERRQ(ierr); } ierr = VecDestroy(&U);CHKERRQ(ierr); ierr = VecDestroy(&cv);CHKERRQ(ierr); ierr = VecDestroy(&eta);CHKERRQ(ierr); ierr = DMDestroy(&da);CHKERRQ(ierr); ierr = DMDestroy(&da2);CHKERRQ(ierr); PetscFinalize(); return 0; }
PetscErrorCode PETSC_DLLEXPORT PetscGetFullPath(const char path[],char fullpath[],size_t flen) { PetscErrorCode ierr; PetscFunctionBegin; ierr = PetscStrcpy(fullpath,path);CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@C PetscSetDebugTerminal - Sets the terminal to use (instead of xterm) for debugging. Not Collective Input Parameters: + terminal - name of terminal and any flags required to execute a program. For example "xterm -e", "urxvt -e". Options Database Keys: -debug_terminal terminal - use this terminal instead of xterm Level: developer Notes: You can start the debugger for all processes in the same GNU screen session. mpirun -n 4 ./myapp -start_in_debugger -debug_terminal "screen -X -S debug screen" will open 4 windows in the session named "debug". Fortran Note: This routine is not supported in Fortran. Concepts: debugger^setting .seealso: PetscSetDebugger() @*/ PetscErrorCode PetscSetDebugTerminal(const char terminal[]) { PetscErrorCode ierr; PetscFunctionBegin; ierr = PetscStrcpy(DebugTerminal,terminal);CHKERRQ(ierr); PetscFunctionReturn(0); }
static PetscErrorCode TestFieldProjection(DM dm, DM auxdm, DMLabel label, Vec la, const char name[], AppCtx *user) { PetscErrorCode (**afuncs)(PetscInt, PetscReal, const PetscReal [], PetscInt, PetscScalar *, void *); void (**funcs)(PetscInt, PetscInt, PetscInt, const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscInt[], const PetscInt[], const PetscScalar[], const PetscScalar[], const PetscScalar[], PetscReal, const PetscReal[], PetscInt, const PetscScalar[], PetscScalar[]); Vec lx, lu; PetscInt Nf, f; PetscInt val[1] = {1}; char lname[PETSC_MAX_PATH_LEN]; PetscErrorCode ierr; PetscFunctionBeginUser; if (auxdm) { ierr = PetscObjectCompose((PetscObject) dm, "dmAux", (PetscObject) auxdm);CHKERRQ(ierr); ierr = PetscObjectCompose((PetscObject) dm, "A", (PetscObject) la);CHKERRQ(ierr); } ierr = DMGetNumFields(dm, &Nf);CHKERRQ(ierr); ierr = PetscMalloc2(Nf, &funcs, Nf, &afuncs);CHKERRQ(ierr); for (f = 0; f < Nf; ++f) afuncs[f] = linear; funcs[0] = linear_vector; funcs[1] = linear_scalar; ierr = DMGetLocalVector(dm, &lu);CHKERRQ(ierr); ierr = PetscStrcpy(lname, "Local Field Input ");CHKERRQ(ierr); ierr = PetscStrcat(lname, name);CHKERRQ(ierr); ierr = PetscObjectSetName((PetscObject) lu, lname);CHKERRQ(ierr); if (!label) {ierr = DMProjectFunctionLocal(dm, 0.0, afuncs, NULL, INSERT_VALUES, lu);CHKERRQ(ierr);} else {ierr = DMProjectFunctionLabelLocal(dm, 0.0, label, 1, val, 0, NULL, afuncs, NULL, INSERT_VALUES, lu);CHKERRQ(ierr);} ierr = VecViewFromOptions(lu, NULL, "-local_input_view");CHKERRQ(ierr); ierr = DMGetLocalVector(dm, &lx);CHKERRQ(ierr); ierr = PetscStrcpy(lname, "Local Field ");CHKERRQ(ierr); ierr = PetscStrcat(lname, name);CHKERRQ(ierr); ierr = PetscObjectSetName((PetscObject) lx, lname);CHKERRQ(ierr); if (!label) {ierr = DMProjectFieldLocal(dm, 0.0, lu, funcs, INSERT_VALUES, lx);CHKERRQ(ierr);} else {ierr = DMProjectFieldLabelLocal(dm, 0.0, label, 1, val, 0, NULL, lu, funcs, INSERT_VALUES, lx);CHKERRQ(ierr);} ierr = VecViewFromOptions(lx, NULL, "-local_field_view");CHKERRQ(ierr); ierr = DMRestoreLocalVector(dm, &lx);CHKERRQ(ierr); ierr = DMRestoreLocalVector(dm, &lu);CHKERRQ(ierr); ierr = PetscFree2(funcs, afuncs);CHKERRQ(ierr); if (auxdm) { ierr = PetscObjectCompose((PetscObject) dm, "dmAux", NULL);CHKERRQ(ierr); ierr = PetscObjectCompose((PetscObject) dm, "A", NULL);CHKERRQ(ierr); } PetscFunctionReturn(0); }
/*@C MatPartitioningPartySetLocal - Set method for local partitioning. Input Parameter: . part - the partitioning context . method - One of MP_PARTY_HELPFUL_SETS, MP_PARTY_KERNIGHAN_LIN, or MP_PARTY_NONE. Check the Party Library Users Manual for details. Level: advanced @*/ PetscErrorCode PETSCMAT_DLLEXPORT MatPartitioningPartySetLocal(MatPartitioning part, const char *local) { MatPartitioning_Party *party = (MatPartitioning_Party *) part->data; PetscFunctionBegin; PetscStrcpy(party->local_method, local); PetscFunctionReturn(0); }
PETSC_EXTERN PetscErrorCode PetscThreadCommCreate_NoThread(PetscThreadComm tcomm) { PetscErrorCode ierr; PetscFunctionBegin; if (tcomm->nworkThreads != 1) SETERRQ1(PETSC_COMM_WORLD,PETSC_ERR_ARG_WRONG,"Cannot have more than 1 thread for the nonthread communicator,threads requested = %D",tcomm->nworkThreads); ierr = PetscStrcpy(tcomm->type,NOTHREAD);CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@ PetscBagSetFromOptions - Allows setting options from a bag Collective on PetscBag Input Parameter: . bag - the bag of values Level: beginner .seealso: PetscBag, PetscBagSetName(), PetscBagDestroy(), PetscBagLoad(), PetscBagGetData() PetscBagRegisterReal(), PetscBagRegisterInt(), PetscBagRegisterBool(), PetscBagRegisterScalar() PetscBagSetFromOptions(), PetscBagCreate(), PetscBagGetName(), PetscBagView(), PetscBagRegisterEnum() @*/ PetscErrorCode PetscBagSetFromOptions(PetscBag bag) { PetscErrorCode ierr; PetscBagItem nitem = bag->bagitems; char name[PETSC_BAG_NAME_LENGTH+1],helpname[PETSC_BAG_NAME_LENGTH+PETSC_BAG_HELP_LENGTH+3]; PetscInt n; PetscFunctionBegin; ierr = PetscStrcpy(helpname,bag->bagname);CHKERRQ(ierr); ierr = PetscStrcat(helpname," ");CHKERRQ(ierr); ierr = PetscStrcat(helpname,bag->baghelp);CHKERRQ(ierr); ierr = PetscOptionsBegin(bag->bagcomm,bag->bagprefix,helpname,0);CHKERRQ(ierr); while (nitem) { name[0] = '-'; name[1] = 0; ierr = PetscStrcat(name,nitem->name);CHKERRQ(ierr); if (nitem->dtype == PETSC_CHAR) { /* special handling for fortran required? [due to space padding vs null termination] */ char *value = (char*)(((char*)bag) + nitem->offset); ierr = PetscOptionsString(name,nitem->help,"",value,value,nitem->msize,NULL);CHKERRQ(ierr); } else if (nitem->dtype == PETSC_REAL) { PetscReal *value = (PetscReal*)(((char*)bag) + nitem->offset); if (nitem->msize == 1) { ierr = PetscOptionsReal(name,nitem->help,"",*value,value,NULL);CHKERRQ(ierr); } else { n = nitem->msize; ierr = PetscOptionsRealArray(name,nitem->help,"",value,&n,NULL);CHKERRQ(ierr); } } else if (nitem->dtype == PETSC_SCALAR) { PetscScalar *value = (PetscScalar*)(((char*)bag) + nitem->offset); ierr = PetscOptionsScalar(name,nitem->help,"",*value,value,NULL);CHKERRQ(ierr); } else if (nitem->dtype == PETSC_INT) { PetscInt *value = (PetscInt*)(((char*)bag) + nitem->offset); if (nitem->msize == 1) { ierr = PetscOptionsInt(name,nitem->help,"",*value,value,NULL);CHKERRQ(ierr); } else { n = nitem->msize; ierr = PetscOptionsIntArray(name,nitem->help,"",value,&n,NULL);CHKERRQ(ierr); } } else if (nitem->dtype == PETSC_ENUM) { PetscEnum *value = (PetscEnum*)(((char*)bag) + nitem->offset); PetscInt i = 0; while (nitem->list[i++]) ; ierr = PetscOptionsEnum(name,nitem->help,nitem->list[i-3],(const char*const*)nitem->list,*value,value,NULL);CHKERRQ(ierr); } else if (nitem->dtype == PETSC_BOOL) { PetscBool *value = (PetscBool*)(((char*)bag) + nitem->offset); if (nitem->msize == 1) { ierr = PetscOptionsBool(name,nitem->help,"",*value,value,NULL);CHKERRQ(ierr); } else { n = nitem->msize; ierr = PetscOptionsBoolArray(name,nitem->help,"",value,&n,NULL);CHKERRQ(ierr); } } nitem = nitem->next; } PetscOptionsEnd(); PetscFunctionReturn(0); }
/*@C MatPartitioningScotchSetStrategy - Set the strategy to be used by Scotch. This is an alternative way of specifying the global method, the local method, the coarse level and the multilevel option. Input Parameter: . part - the partitioning context . level - the strategy in Scotch format. Check Scotch documentation. Level: advanced .seealso: MatPartitioningScotchSetGlobal(), MatPartitioningScotchSetLocal(), MatPartitioningScotchSetCoarseLevel(), MatPartitioningScotchSetMultilevel(), @*/ PetscErrorCode PETSCMAT_DLLEXPORT MatPartitioningScotchSetStrategy(MatPartitioning part, char *strat) { MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data; PetscFunctionBegin; PetscStrcpy(scotch->strategy, strat); PetscFunctionReturn(0); }
PetscErrorCode ProcessOptions(MPI_Comm comm, AppCtx *options) { const char *bcTypes[2] = {"neumann", "dirichlet"}; const char *runTypes[2] = {"full", "test"}; PetscInt bc, run; PetscErrorCode ierr; PetscFunctionBeginUser; options->debug = 0; options->runType = RUN_FULL; options->dim = 2; options->interpolate = PETSC_FALSE; options->refinementLimit = 0.0; options->bcType = DIRICHLET; options->numBatches = 1; options->numBlocks = 1; options->jacobianMF = PETSC_FALSE; options->showInitial = PETSC_FALSE; options->showSolution = PETSC_TRUE; options->fem.f0Funcs = (void (**)(const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscReal[], PetscScalar[])) &options->f0Funcs; options->fem.f1Funcs = (void (**)(const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscReal[], PetscScalar[])) &options->f1Funcs; options->fem.g0Funcs = (void (**)(const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscReal[], PetscScalar[])) &options->g0Funcs; options->fem.g1Funcs = (void (**)(const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscReal[], PetscScalar[])) &options->g1Funcs; options->fem.g2Funcs = (void (**)(const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscReal[], PetscScalar[])) &options->g2Funcs; options->fem.g3Funcs = (void (**)(const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscScalar[], const PetscReal[], PetscScalar[])) &options->g3Funcs; ierr = MPI_Comm_size(comm, &options->numProcs);CHKERRQ(ierr); ierr = MPI_Comm_rank(comm, &options->rank);CHKERRQ(ierr); ierr = PetscOptionsBegin(comm, "", "Stokes Problem Options", "DMPLEX");CHKERRQ(ierr); ierr = PetscOptionsInt("-debug", "The debugging level", "ex62.c", options->debug, &options->debug, NULL);CHKERRQ(ierr); run = options->runType; ierr = PetscOptionsEList("-run_type", "The run type", "ex62.c", runTypes, 2, runTypes[options->runType], &run, NULL);CHKERRQ(ierr); options->runType = (RunType) run; ierr = PetscOptionsInt("-dim", "The topological mesh dimension", "ex62.c", options->dim, &options->dim, NULL);CHKERRQ(ierr); spatialDim = options->dim; ierr = PetscOptionsBool("-interpolate", "Generate intermediate mesh elements", "ex62.c", options->interpolate, &options->interpolate, NULL);CHKERRQ(ierr); ierr = PetscOptionsReal("-refinement_limit", "The largest allowable cell volume", "ex62.c", options->refinementLimit, &options->refinementLimit, NULL);CHKERRQ(ierr); ierr = PetscStrcpy(options->partitioner, "chaco");CHKERRQ(ierr); ierr = PetscOptionsString("-partitioner", "The graph partitioner", "pflotran.cxx", options->partitioner, options->partitioner, 2048, NULL);CHKERRQ(ierr); bc = options->bcType; ierr = PetscOptionsEList("-bc_type","Type of boundary condition","ex62.c",bcTypes,2,bcTypes[options->bcType],&bc,NULL);CHKERRQ(ierr); options->bcType = (BCType) bc; ierr = PetscOptionsInt("-gpu_batches", "The number of cell batches per kernel", "ex62.c", options->numBatches, &options->numBatches, NULL);CHKERRQ(ierr); ierr = PetscOptionsInt("-gpu_blocks", "The number of concurrent blocks per kernel", "ex62.c", options->numBlocks, &options->numBlocks, NULL);CHKERRQ(ierr); ierr = PetscOptionsBool("-jacobian_mf", "Calculate the action of the Jacobian on the fly", "ex62.c", options->jacobianMF, &options->jacobianMF, NULL);CHKERRQ(ierr); ierr = PetscOptionsBool("-show_initial", "Output the initial guess for verification", "ex62.c", options->showInitial, &options->showInitial, NULL);CHKERRQ(ierr); ierr = PetscOptionsBool("-show_solution", "Output the solution for verification", "ex62.c", options->showSolution, &options->showSolution, NULL);CHKERRQ(ierr); ierr = PetscOptionsEnd(); ierr = PetscLogEventRegister("CreateMesh", DM_CLASSID, &options->createMeshEvent);CHKERRQ(ierr); PetscFunctionReturn(0); }
/*@C PetscPOpen - Runs a program on processor zero and sends either its input or output to a file. Logically Collective on MPI_Comm, but only process 0 runs the command Input Parameters: + comm - MPI communicator, only processor zero runs the program . machine - machine to run command on or NULL, or string with 0 in first location . program - name of program to run - mode - either r or w Output Parameter: . fp - the file pointer where program input or output may be read or NULL if don't care Level: intermediate Notes: Use PetscPClose() to close the file pointer when you are finished with it Does not work under Windows If machine is not provided will use the value set with PetsPOpenSetMachine() if that was provided, otherwise will use the machine running node zero of the communicator The program string may contain ${DISPLAY}, ${HOMEDIRECTORY} or ${WORKINGDIRECTORY}; these will be replaced with relevent values. .seealso: PetscFOpen(), PetscFClose(), PetscPClose(), PetscPOpenSetMachine() @*/ PetscErrorCode PetscPOpen(MPI_Comm comm,const char machine[],const char program[],const char mode[],FILE **fp) { PetscErrorCode ierr; PetscMPIInt rank; size_t i,len,cnt; char commandt[PETSC_MAX_PATH_LEN],command[PETSC_MAX_PATH_LEN]; FILE *fd; PetscFunctionBegin; /* all processors have to do the string manipulation because PetscStrreplace() is a collective operation */ if (PetscPOpenMachine[0] || (machine && machine[0])) { ierr = PetscStrcpy(command,"ssh ");CHKERRQ(ierr); if (PetscPOpenMachine[0]) { ierr = PetscStrcat(command,PetscPOpenMachine);CHKERRQ(ierr); } else { ierr = PetscStrcat(command,machine);CHKERRQ(ierr); } ierr = PetscStrcat(command," \" export DISPLAY=${DISPLAY}; ");CHKERRQ(ierr); /* Copy program into command but protect the " with a \ in front of it */ ierr = PetscStrlen(command,&cnt);CHKERRQ(ierr); ierr = PetscStrlen(program,&len);CHKERRQ(ierr); for (i=0; i<len; i++) { if (program[i] == '\"') command[cnt++] = '\\'; command[cnt++] = program[i]; } command[cnt] = 0; ierr = PetscStrcat(command,"\"");CHKERRQ(ierr); } else { ierr = PetscStrcpy(command,program);CHKERRQ(ierr); } ierr = PetscStrreplace(comm,command,commandt,1024);CHKERRQ(ierr); ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); if (!rank) { ierr = PetscInfo1(0,"Running command :%s\n",commandt);CHKERRQ(ierr); if (!(fd = popen(commandt,mode))) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"Cannot run command %s",commandt); if (fp) *fp = fd; } PetscFunctionReturn(0); }
/*@C MatPartitioningScotchSetHostList - Specify host list file for mapping. Input Parameter: . part - the partitioning context . file - the name of file Level: advanced Notes: The file must consist in a list of hostnames (one per line). These hosts are the ones referred to in the architecture file (see MatPartitioningScotchSetArch()): the first host corresponds to index 0, the second one to index 1, and so on. If the name is not set, then the default "host_list" is used. .seealso: MatPartitioningScotchSetArch(), MatPartitioningScotchSetMapping() @*/ PetscErrorCode PETSCMAT_DLLEXPORT MatPartitioningScotchSetHostList(MatPartitioning part, const char *filename) { MatPartitioning_Scotch *scotch = (MatPartitioning_Scotch *) part->data; PetscFunctionBegin; PetscStrcpy(scotch->host_list, filename); PetscFunctionReturn(0); }