/*@C PETSC_VIEWER_SOCKET_ - Creates a socket viewer shared by all processors in a communicator. Collective on MPI_Comm Input Parameter: . comm - the MPI communicator to share the socket PetscViewer Level: intermediate Options Database Keys: For use with the default PETSC_VIEWER_SOCKET_WORLD or if NULL is passed for machine or PETSC_DEFAULT is passed for port $ -viewer_socket_machine <machine> $ -viewer_socket_port <port> Environmental variables: + PETSC_VIEWER_SOCKET_PORT portnumber - PETSC_VIEWER_SOCKET_MACHINE machine name Notes: Unlike almost all other PETSc routines, PetscViewer_SOCKET_ does not return an error code. The socket PetscViewer is usually used in the form $ XXXView(XXX object,PETSC_VIEWER_SOCKET_(comm)); Currently the only socket client available is MATLAB. See src/dm/examples/tests/ex12.c and ex12.m for an example of usage. Connects to a waiting socket and stays connected until PetscViewerDestroy() is called. Use this for communicating with an interactive MATLAB session, see PETSC_VIEWER_MATLAB_() for writing output to a .mat file. Use PetscMatlabEngineCreate() or PETSC_MATLAB_ENGINE_(), PETSC_MATLAB_ENGINE_SELF, or PETSC_MATLAB_ENGINE_WORLD for communicating with a MATLAB Engine .seealso: PETSC_VIEWER_SOCKET_WORLD, PETSC_VIEWER_SOCKET_SELF, PetscViewerSocketOpen(), PetscViewerCreate(), PetscViewerSocketSetConnection(), PetscViewerDestroy(), PETSC_VIEWER_SOCKET_(), PetscViewerBinaryWrite(), PetscViewerBinaryRead(), PetscViewerBinaryWriteStringArray(), PetscViewerBinaryGetDescriptor(), PETSC_VIEWER_MATLAB_() @*/ PetscViewer PETSC_VIEWER_SOCKET_(MPI_Comm comm) { PetscErrorCode ierr; PetscBool flg; PetscViewer viewer; MPI_Comm ncomm; PetscFunctionBegin; ierr = PetscCommDuplicate(comm,&ncomm,NULL);if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_SOCKET_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} if (Petsc_Viewer_Socket_keyval == MPI_KEYVAL_INVALID) { ierr = MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Viewer_Socket_keyval,0); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_SOCKET_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} } ierr = MPI_Comm_get_attr(ncomm,Petsc_Viewer_Socket_keyval,(void**)&viewer,(int*)&flg); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_SOCKET_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} if (!flg) { /* PetscViewer not yet created */ ierr = PetscViewerSocketOpen(ncomm,0,0,&viewer); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_SOCKET_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} ierr = PetscObjectRegisterDestroy((PetscObject)viewer); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_SOCKET_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} ierr = MPI_Comm_set_attr(ncomm,Petsc_Viewer_Socket_keyval,(void*)viewer); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_SOCKET_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} } ierr = PetscCommDestroy(&ncomm); if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_SOCKET_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");PetscFunctionReturn(0);} PetscFunctionReturn(viewer); }
int main(int argc,char **args) { PetscErrorCode ierr; PetscViewer socketviewer; PetscInt rank,size; Vec x,y; Mat A; PetscScalar *a; int vsize,portnumber = 5050; //PetscViewer_Socket *vmatlab; PetscInitialize(&argc,&args,(char *)0,help); MPI_Comm_size(PETSC_COMM_WORLD,&size); MPI_Comm_rank(PETSC_COMM_WORLD,&rank); // Rank 0 connects to socket (use PETSC_DEFAULT, not necessary to be #5050) //PetscViewerSocketOpen(PETSC_COMM_WORLD,0,portnumber,&socketviewer); PetscViewerSocketOpen(PETSC_COMM_WORLD,0,PETSC_DEFAULT,&socketviewer); // Example for receiving and sending a Scalar // It becomes a VECSEQ with size one. VecReceive(socketviewer,PETSC_NULL,&x); //VecView(x,0); // We cannot send VECSEQ type vector, so use PetscRealView, and notice the a is a pointer. VecGetArray(x,&a); PetscRealView(1,a, socketviewer); // Example for receving and sending a SEQ Vector // If the size of the receiving vector is larger than 1, it becomes a VECMPI type vector. VecReceive(socketviewer,VECSEQ,&x); VecGetArray(x,&a); VecGetSize(x,&vsize); PetscRealView(vsize,a, socketviewer); // Example for receving and sending a MPI Vector VecReceive(socketviewer,VECMPI,&y); //VecView(y,0); // Use VecView to send a VECMPI type vector VecView(y, socketviewer); // Example for receving and sending a MPI Matrix MatReceiveTranspose(socketviewer,MATMPIAIJ,&A); //MatView(A,0); // MatView(A,socketviewer); ///////////////////////////////////////////////////////////////////////////////////////// ierr = VecDestroy(x); CHKERRQ(ierr); ierr = PetscFinalize(); CHKERRQ(ierr); return 0; }
int main(int argc,char **args) { Vec b; PetscViewer fd; PetscErrorCode ierr; PetscInt i; PetscInitialize(&argc,&args,(char *)0,help); /* server indicates we WAIT for someone to connect to our socket */ ierr = PetscViewerSocketOpen(PETSC_COMM_WORLD,"server",PETSC_DEFAULT,&fd);CHKERRQ(ierr); ierr = VecCreateMPI(PETSC_COMM_WORLD,10000,PETSC_DECIDE,&b);CHKERRQ(ierr); for (i=0;i<1000;i++){ ierr = VecView(b,fd);CHKERRQ(ierr); ierr = VecDestroy(&b);CHKERRQ(ierr); ierr = VecCreate(PETSC_COMM_WORLD,&b);CHKERRQ(ierr); ierr = VecLoad(b,fd);CHKERRQ(ierr); } ierr = VecDestroy(&b);CHKERRQ(ierr); ierr = PetscFinalize(); return 0; }
/*@C PETSC_VIEWER_SOCKET_ - Creates a socket viewer shared by all processors in a communicator. Collective on MPI_Comm Input Parameter: . comm - the MPI communicator to share the socket PetscViewer Level: intermediate Options Database Keys: For use with the default PETSC_VIEWER_SOCKET_WORLD or if PETSC_NULL is passed for machine or PETSC_DEFAULT is passed for port $ -viewer_socket_machine <machine> $ -viewer_socket_port <port> Environmental variables: + PETSC_VIEWER_SOCKET_PORT portnumber - PETSC_VIEWER_SOCKET_MACHINE machine name Notes: Unlike almost all other PETSc routines, PetscViewer_SOCKET_ does not return an error code. The socket PetscViewer is usually used in the form $ XXXView(XXX object,PETSC_VIEWER_SOCKET_(comm)); Currently the only socket client available is Matlab. See src/dm/da/examples/tests/ex12.c and ex12.m for an example of usage. Connects to a waiting socket and stays connected until PetscViewerDestroy() is called. Use this for communicating with an interactive Matlab session, see PETSC_VIEWER_MATLAB_() for communicating with the Matlab engine. .seealso: PETSC_VIEWER_SOCKET_WORLD, PETSC_VIEWER_SOCKET_SELF, PetscViewerSocketOpen(), PetscViewerCreate(), PetscViewerSocketSetConnection(), PetscViewerDestroy(), PETSC_VIEWER_SOCKET_(), PetscViewerBinaryWrite(), PetscViewerBinaryRead(), PetscViewerBinaryWriteStringArray(), PetscBinaryViewerGetDescriptor(), PETSC_VIEWER_MATLAB_() @*/ PetscViewer PETSC_DLLEXPORT PETSC_VIEWER_SOCKET_(MPI_Comm comm) { PetscErrorCode ierr; PetscTruth flg; PetscViewer viewer; PetscFunctionBegin; if (Petsc_Viewer_Socket_keyval == MPI_KEYVAL_INVALID) { ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Viewer_Socket_keyval,0); if (ierr) {PetscError(__LINE__,"PETSC_VIEWER_SOCKET_",__FILE__,__SDIR__,1,1," ");PetscFunctionReturn(0);} } ierr = MPI_Attr_get(comm,Petsc_Viewer_Socket_keyval,(void **)&viewer,(int*)&flg); if (ierr) {PetscError(__LINE__,"PETSC_VIEWER_SOCKET_",__FILE__,__SDIR__,1,1," ");PetscFunctionReturn(0);} if (!flg) { /* PetscViewer not yet created */ ierr = PetscViewerSocketOpen(comm,0,0,&viewer); if (ierr) {PetscError(__LINE__,"PETSC_VIEWER_SOCKET_",__FILE__,__SDIR__,1,1," ");PetscFunctionReturn(0);} ierr = PetscObjectRegisterDestroy((PetscObject)viewer); if (ierr) {PetscError(__LINE__,"PETSC_VIEWER_SOCKET_",__FILE__,__SDIR__,1,1," ");PetscFunctionReturn(0);} ierr = MPI_Attr_put(comm,Petsc_Viewer_Socket_keyval,(void*)viewer); if (ierr) {PetscError(__LINE__,"PETSC_VIEWER_SOCKET_",__FILE__,__SDIR__,1,1," ");PetscFunctionReturn(0);} } PetscFunctionReturn(viewer); }
int main(int argc,char **args) { PetscInt rank,size,npt; PetscErrorCode ierr; Vec x,y0,tempvec, *vinda,*vindb,*vindc; PetscInt i,j,k,l,n,p,m,m2,pmax,puse,Istart,Iend,localsize,niter; PetscScalar dx,dy,dx2,dy2; PetscScalar *Mixnorm; PetscInt iter,*iterind,*nind; FILE *fidoutput; char fname[50]; PetscViewer socketviewer; PetscInt withMatlab; PetscTruth Matlabflag; PetscLogDouble v1,v2,elapsed_time; PetscInitialize(&argc,&args,(char *)0,help); MPI_Comm_size(PETSC_COMM_WORLD,&size); MPI_Comm_rank(PETSC_COMM_WORLD,&rank); ierr = PetscPrintf(PETSC_COMM_WORLD,"\nPETSC: Petsc Initializes successfully! \n"); ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: comm_size is %d \n", size); ierr = PetscOptionsGetInt(PETSC_NULL,"-withMatlab",&withMatlab,&Matlabflag);CHKERRQ(ierr); if (Matlabflag == PETSC_FALSE){withMatlab = 0;}else{withMatlab = 1;} if(withMatlab==1){ // Rank 0 connects to socket, use default socket PetscViewerSocketOpen(PETSC_COMM_WORLD,0,PETSC_DEFAULT,&socketviewer); ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: socket opened! \n");CHKERRQ(ierr); // Receive n from Matlab IntReceive(socketviewer, &nind); n = *nind; // Receive iter from Matlab IntReceive(socketviewer, &iterind); iter = *iterind; }else{ ierr = PetscOptionsGetInt(PETSC_NULL,"-ngrid",&n,PETSC_NULL);CHKERRQ(ierr); ierr = PetscOptionsGetInt(PETSC_NULL,"-niter",&iter,PETSC_NULL);CHKERRQ(ierr); } ///////////////////////////////////////////////////////////////////////////////////// ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: number of grid is %d \n", n); ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: number of iteration is %d \n", iter); Mixnorm = malloc(iter*sizeof(PetscScalar)); dx = 1.0/n; dy = 1.0/n; dx2 = dx/2-dx/1e6; dy2 = dy/2-dy/1e6; npt = 5; pmax = 4e6; puse = pmax; ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: estimated buffer size (per processer) %f Mbytes \n", pmax*1.0/1e6*8*16 ); ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: estimated variable size %f Mbytes\n", 1.0*n*n/1e6*8*2); ///////////////////////////////////////////////////////////////////////////////////// ierr = VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE ,n,&tempvec);CHKERRQ(ierr); ierr = VecGetOwnershipRange(tempvec,&Istart,&Iend);CHKERRQ(ierr); localsize = Iend-Istart; ierr = VecDestroy(tempvec);CHKERRQ(ierr); ///////////////////////////////////////////////////////////////////////////////////// // Create initial vector Vec x0; PetscScalar *x0array; x0array = malloc((localsize)*n*sizeof(PetscScalar)); k = 0; for(i=Istart;i<Iend;i++){ for(j=0;j<n;j++){ *(x0array+k) = cos(2*M_PI*(dx/2+i*dx)); //*(x0array+k) = cos(2*M_PI*(dy/2+j*dy)); k++; } } ierr = VecCreateMPIWithArray(PETSC_COMM_WORLD,n*localsize,PETSC_DECIDE,x0array,&x0);CHKERRQ(ierr); ierr = VecDuplicate(x0,&x);CHKERRQ(ierr); ierr = VecCreateSeq(PETSC_COMM_SELF,pmax*npt,&y0);CHKERRQ(ierr); ierr = VecNorm(x0,NORM_2,Mixnorm); CHKERRQ(ierr); PetscPrintf(PETSC_COMM_WORLD,"PETSC: initial norm= %f \n",*(Mixnorm+0)/n ); /////////////////////////////////////////////////////////////////////////// // Map Center Points PetscInt *NzindJ,*idx,*idy,*idp; PetscScalar *CenterX,*CenterY,*VecVal,*pty; PetscScalar *ShiftX,*ShiftY,CX,CY, *yarray; IS isx,isy; VecScatter ctx; CenterX = malloc(npt*sizeof(PetscScalar)); CenterY = malloc(npt*sizeof(PetscScalar)); ShiftX = malloc(npt*sizeof(PetscScalar)); ShiftY = malloc(npt*sizeof(PetscScalar)); VecVal = malloc(npt*sizeof(PetscScalar)); yarray = malloc(pmax*sizeof(PetscScalar)); NzindJ = malloc(pmax*npt*sizeof(PetscInt)); idx = malloc(pmax*npt*sizeof(PetscInt)); idy = malloc(pmax*npt*sizeof(PetscInt)); idp = malloc(pmax*sizeof(PetscInt)); *(ShiftX+0) = 0; *(ShiftY+0) = 0; *(ShiftX+1) = -dx2; *(ShiftY+1) = -dy2; *(ShiftX+2) = dx2; *(ShiftY+2) = -dy2; *(ShiftX+3) = -dx2; *(ShiftY+3) = dy2; *(ShiftX+4) = dy2; *(ShiftY+4) = dx2; //*(ShiftX+5) = 0; //*(ShiftY+5) = -dy2; //*(ShiftX+6) = -dx2; //*(ShiftY+6) = 0; //*(ShiftX+7) = dx2; //*(ShiftY+7) = 0; //*(ShiftX+8) = 0; //*(ShiftY+9) = dy2; for(i=0;i<npt*pmax;i++){ *(idy+i)=i; } ISCreateGeneralWithArray(PETSC_COMM_SELF,npt*pmax,idy,&isy); vinda = &x0; vindb = &x; sprintf(fname, "mixnorm_%d_%d",n,iter); ierr =PetscPrintf(PETSC_COMM_WORLD,"\n iter norm time unit time\n");CHKERRQ(ierr); ierr =PetscFOpen(PETSC_COMM_WORLD,fname,"w",&fidoutput);CHKERRQ(ierr); for(niter=0;niter<iter;niter++){ ierr = PetscGetTime(&v1);CHKERRQ(ierr); l = 0; p = 0; if (n*localsize-l<=pmax){puse = n*localsize-l;}else{puse=pmax;} for(i=Istart;i<Iend;i++){ for(j=0;j<n;j++){ CX = dx2+i*dx; CY = dy2+j*dy; for(k=0;k<npt;k++){ *(CenterX+k) = CX + *(ShiftX+k); *(CenterY+k) = CY + *(ShiftY+k); InverseStandardMap((CenterX+k),(CenterY+k)); *(NzindJ+p*npt +k) = floor(*(CenterX+k)*n)*n + floor(*(CenterY+k)*n); } *(idp+p) = Istart*n+ l; if(p>=puse-1){ ierr = ISCreateGeneralWithArray(PETSC_COMM_WORLD,npt*puse,NzindJ,&isx);CHKERRQ(ierr); for(m=0;m<npt*puse;m++){ *(idy+m)=m; } ierr = ISCreateGeneralWithArray(PETSC_COMM_SELF,npt*puse,idy,&isy);CHKERRQ(ierr); ierr = VecScatterCreate(*vinda,isx,y0,isy,&ctx);CHKERRQ(ierr); ierr = VecScatterBegin(*vinda,y0,INSERT_VALUES,SCATTER_FORWARD,ctx);CHKERRQ(ierr); ierr = VecScatterEnd(*vinda,y0,INSERT_VALUES,SCATTER_FORWARD,ctx);CHKERRQ(ierr); ierr = VecScatterDestroy(ctx); ierr = VecGetArray(y0,&pty);CHKERRQ(ierr); for(m=0;m<puse;m++){ for(m2=0;m2<npt;m2++){ *(yarray+m) = *(yarray+m)+*(pty+m*npt+m2); } *(yarray+m) = *(yarray+m)/npt; } VecRestoreArray(y0,&pty); VecSetValues(*vindb,puse,idp,yarray,INSERT_VALUES); for(m=0;m<pmax;m++){*(yarray+m) = 0; } p = 0; if (n*localsize-l<=pmax){puse = n*localsize-l-1;}else{puse=pmax;} }else{p++;} l++; } } VecAssemblyBegin(*vindb); VecAssemblyEnd(*vindb); vindc = vindb; vindb = vinda; vinda = vindc; //ierr = VecCopy(x,x0);CHKERRQ(ierr); ierr = VecNorm(*vinda,NORM_2,Mixnorm+niter); CHKERRQ(ierr); *(Mixnorm+niter) = *(Mixnorm+niter)/n; ierr = PetscGetTime(&v2);CHKERRQ(ierr); elapsed_time = v2 - v1; PetscPrintf(PETSC_COMM_WORLD," %d %f %f %f \n",niter,*(Mixnorm+niter),elapsed_time,elapsed_time/n/n*1e6 ); PetscFPrintf(PETSC_COMM_WORLD,fidoutput," %d %f %f %f\n" ,niter,*(Mixnorm+niter),elapsed_time,elapsed_time/n/n*1e6 ); } PetscFClose(PETSC_COMM_WORLD,fidoutput); /////////////////////////////////////////////////////////////////////////// if(withMatlab==1){ VecView(x0,socketviewer); PetscScalarView(iter,Mixnorm,socketviewer); } free(CenterX); free(CenterY); free(ShiftX); free(ShiftY); free(x0array); free(idx); free(idy); free(idp); free(yarray); free(NzindJ); free(Mixnorm); ierr = VecDestroy(x0);CHKERRQ(ierr); ierr = VecDestroy(x);CHKERRQ(ierr); ierr = VecDestroy(y0);CHKERRQ(ierr); PetscPrintf(PETSC_COMM_WORLD,"Done!"); ////////////////////////////////////////////////////////////////////////////////////// ierr = PetscFinalize();CHKERRQ(ierr); return 0; }
int main(int argc,char **args) { PetscInt rank,size,npt; PetscScalar dx,dy,cx,cy; PetscErrorCode ierr; Vec x,x0,tempvec, *vinda,*vindb,*vindc; PetscInt i,j,k,n,n2,pmax,puse,Istart,Iend,localsize,niter; PetscScalar **x0array, **aarray,**barray; PetscInt *cacheInt; PetscScalar *cacheScalar; DA myDA; PetscScalar *Mixnorm; PetscInt iter,*iterind,*nind; FILE *fidoutput, *fidtimelog; char fname[50],ftimelog[50]; PetscViewer socketviewer; PetscInt withMatlab, doFFT, doSmoothing; PetscTruth Matlabflag, FFTflag, Smoothingflag; PetscInt timelogcount; MPI_Status status; PetscLogDouble v1,v2,elapsed_time; timelogcount = 0; PetscInitialize(&argc,&args,(char *)0,help); MPI_Comm_size(PETSC_COMM_WORLD,&size); MPI_Comm_rank(PETSC_COMM_WORLD,&rank); ierr = PetscPrintf(PETSC_COMM_WORLD,"\nPETSC: Petsc Initializes successfully! \n"); ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: comm_size is %d \n", size); ierr = PetscOptionsGetInt(PETSC_NULL,"-withMatlab",&withMatlab,&Matlabflag);CHKERRQ(ierr); if (Matlabflag == PETSC_FALSE){withMatlab = 0;}else{withMatlab = 1;} ierr = PetscOptionsGetInt(PETSC_NULL,"-doFFT",&doFFT,&FFTflag);CHKERRQ(ierr); if (FFTflag == PETSC_FALSE){doFFT = 0;}else{doFFT = 1;} ierr = PetscOptionsGetInt(PETSC_NULL,"-doSmoothing",&doSmoothing,&Smoothingflag);CHKERRQ(ierr); if (Smoothingflag == PETSC_FALSE){doSmoothing = 0;}else{doSmoothing = 1;} if(withMatlab==1){ // Rank 0 connects to socket, use default socket PetscViewerSocketOpen(PETSC_COMM_WORLD,0,PETSC_DEFAULT,&socketviewer); ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: socket opened! \n");CHKERRQ(ierr); // Receive n from Matlab IntReceive(socketviewer, &nind); n = *nind; // Receive iter from Matlab IntReceive(socketviewer, &iterind); iter = *iterind; }else{ ierr = PetscOptionsGetInt(PETSC_NULL,"-ngrid",&n,PETSC_NULL);CHKERRQ(ierr); ierr = PetscOptionsGetInt(PETSC_NULL,"-niter",&iter,PETSC_NULL);CHKERRQ(ierr); } ///////////////////////////////////////////////////////////////////////////////////// ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: number of grid is %d \n", n); ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: number of iteration is %d \n", iter); Mixnorm = malloc(iter*sizeof(PetscScalar)); dx = 1.0/n; dy = 1.0/n; n2 = (PetscInt)(n*0.5); npt = 5; pmax = 5e5; puse = pmax; PetscInt logmax = 1000; PetscScalar Timelog[logmax]; PetscLogDouble t1,t2; ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: estimated buffer size (per processer) %f Mbytes \n", pmax*1.0/1e6*8*17 ); ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: estimated variable size %f Mbytes\n", 1.0*n*n/1e6*8*1); ///////////////////////////////////////////////////////////////////////////////////// // ierr = VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE ,n,&tempvec);CHKERRQ(ierr); // ierr = VecGetOwnershipRange(tempvec,&Istart,&Iend);CHKERRQ(ierr); // localsize = Iend-Istart; // ierr = VecDestroy(tempvec);CHKERRQ(ierr); ///////////////////////////////////////////////////////////////////////////////////// if(doSmoothing==1){ ierr = PetscPrintf(PETSC_COMM_WORLD,"\n\n\n\n\nPETSC: Now Do DACreate2d \n\n\n\n" ); ierr = PetscPrintf(PETSC_COMM_WORLD,"\n\n\n\n\nPETSC: %d %d %d\n\n\n\n",n2,n,size); DACreate2d(MPI_COMM_WORLD,DA_XYPERIODIC,DA_STENCIL_BOX,n2,n,1,size,1,2,PETSC_NULL,PETSC_NULL,&myDA); DACreateGlobalVector(myDA,&x0); DAGetCorners(myDA,PETSC_NULL,&Istart,PETSC_NULL,PETSC_NULL,&localsize,PETSC_NULL); Iend = Istart+localsize; }else{ ierr = VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE ,n,&tempvec);CHKERRQ(ierr); ierr = VecGetOwnershipRange(tempvec,&Istart,&Iend);CHKERRQ(ierr); localsize = Iend-Istart; ierr = VecDestroy(tempvec);CHKERRQ(ierr); VecCreateMPI(PETSC_COMM_WORLD,localsize*n2,PETSC_DETERMINE ,&x0); } //ierr = PetscPrintf(PETSC_COMM_WORLD,"\n\n\n\n\nPETSC: So far so good\n\n\n\n"); VecGetArray2d(x0,n2,localsize,0,0,&x0array); // Create initial vector for(j=0;j<localsize;j++){ for(i=0;i<n2;i++){ cx = (Istart+j+0.5)*dx; x0array[i][j] = cos(2*M_PI*cx); } } VecRestoreArray2d(x0,n2,localsize,0,0,&x0array); ierr = VecDuplicate(x0,&x);CHKERRQ(ierr); ierr = VecNorm(x0,NORM_2,Mixnorm); CHKERRQ(ierr); PetscPrintf(PETSC_COMM_WORLD,"PETSC: initial norm= %f \n",*(Mixnorm+0)/n ); vinda = &x0; vindb = &x; sprintf(fname, "mixnorm_%d_%d",n,iter); ierr =PetscPrintf(PETSC_COMM_WORLD,"\n iter norm time unit time\n");CHKERRQ(ierr); ierr =PetscFOpen(PETSC_COMM_WORLD,fname,"w",&fidoutput);CHKERRQ(ierr); /////////////////////////////////////////////////////////////////////////////////////////////////// // Memory allocation for the iteration scheme // cacheInt = malloc(1*pmax*sizeof(PetscInt)); // cacheScalar = malloc(2*pmax*sizeof(PetscScalar)); cacheInt = malloc(2*pmax*sizeof(PetscInt)); cacheScalar = malloc(2*pmax*sizeof(PetscScalar)); /////////////////////////////////////////////////////////////////////////////////////////////////// // Iteration here! for(niter=0;niter<iter;niter++){ ierr = PetscGetTime(&v1);CHKERRQ(ierr); // BackwardAverage(vinda, vindb, cacheInt, cacheScalar, n, npt, pmax, Istart,Iend); // BackwardAverageR(vinda, vindb, cacheInt, cacheScalar, n, npt, pmax, Istart,Iend); BackwardAverageRL(vinda, vindb, cacheInt, cacheScalar, n, npt, pmax, Istart,Iend); vindc = vindb; vindb = vinda; vinda = vindc; // if(doSmoothing==1){Smoothing(vinda, vindb,n, myDA, Istart,Iend);} ierr = PetscGetTime(&v2);CHKERRQ(ierr); //vindc = vindb; //vindb = vinda; //vinda = vindc; ierr = VecNorm(*vinda,NORM_2,Mixnorm+niter); CHKERRQ(ierr); *(Mixnorm+niter) = *(Mixnorm+niter)/n; elapsed_time = v2 - v1; PetscPrintf(PETSC_COMM_WORLD," %d %f %f %f \n",niter,*(Mixnorm+niter),elapsed_time,elapsed_time/n/n*1e6 ); PetscFPrintf(PETSC_COMM_WORLD,fidoutput," %d %f %f %f\n" ,niter,*(Mixnorm+niter),elapsed_time,elapsed_time/n/n*1e6 ); } //////////////////////////////////////////////////////////////////////////////////////////////////// //Change oremtation of vector VecGetArray2d(*vinda,n2,localsize,0,0,&aarray); VecGetArray2d(*vindb,localsize,n2,0,0,&barray); for(j=0;j<localsize;j++){ for(i=0;i<n2;i++){ barray[j][i] = aarray[i][j]; } } VecRestoreArray2d(*vinda,n2,localsize,0,0,&aarray); VecRestoreArray2d(*vindb,localsize,n2,0,0,&barray); vindc = vindb; vindb = vinda; vinda = vindc; //////////////////////////////////////////////////////////////////////////////////////////////////// // FFT part if(doFFT==1){FFT2D(*vinda,*vindb, localsize, n, Istart,Iend, iter,doSmoothing);} //////////////////////////////////////////////////////////////////////////////////////////////////// /* if(rank==0){ sprintf(ftimelog, "timelog_%d_%d",n,iter); fidtimelog = fopen(ftimelog,"w"); for(i=0;i<timelogcount;i++){ fprintf(fidtimelog,"%f ",Timelog[i]); } fprintf(fidtimelog,"\n "); for(j = 1;j<size;j++){ MPI_Recv(Timelog,timelogcount,MPI_DOUBLE,j,j,PETSC_COMM_WORLD,&status); for(i=0;i<timelogcount;i++){ fprintf(fidtimelog,"%f ",Timelog[i]); } fprintf(fidtimelog,"\n "); } fclose(fidtimelog); }else{ MPI_Send(Timelog ,timelogcount,MPI_DOUBLE,0,rank,PETSC_COMM_WORLD); } PetscFClose(PETSC_COMM_WORLD,fidoutput); */ /////////////////////////////////////////////////////////////////////////// if(withMatlab==1){ VecView(*vinda,socketviewer); PetscScalarView(iter,Mixnorm,socketviewer); } // free(x0array); free(Mixnorm); free(cacheInt); free(cacheScalar); ierr = VecDestroy(x0);CHKERRQ(ierr); ierr = VecDestroy(x);CHKERRQ(ierr); PetscPrintf(PETSC_COMM_WORLD,"Done!"); ////////////////////////////////////////////////////////////////////////////////////// ierr = PetscFinalize();CHKERRQ(ierr);
int main(int argc,char **args) { Vec sol,x0,y0,z0; PetscInt rank,size,Istart,Iend,Lsize; Mat Dy,Dz,MarkovA,dAdy,dAdz; PetscErrorCode ierr; PetscViewer socketviewer; CoordInfo *cdinfo; // for streamline calculation PetscScalar *xarray, *yarray,*zarray; PetscScalar *x,*y,*z; PetscInt n, withMatlab; PetscTruth Matlabflag,flg; PetscInt ny,nz; PetscInt solmaxp; PetscScalar solmax; n = 5500; withMatlab = 0; ny = 2; nz = 2; PetscInitialize(&argc,&args,(char *)0,help); MPI_Comm_size(PETSC_COMM_WORLD,&size); MPI_Comm_rank(PETSC_COMM_WORLD,&rank); ierr = PetscPrintf(PETSC_COMM_WORLD,"\nPETSC: Petsc Initializes successfully! \n"); ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: comm_size is %d \n", size); ////////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////// // Use Matlab as output ierr = PetscOptionsGetInt(PETSC_NULL,"-withMatlab",&withMatlab,&Matlabflag);CHKERRQ(ierr); ierr = PetscOptionsGetInt(PETSC_NULL,"-ngridy",&ny,&flg);CHKERRQ(ierr); ierr = PetscOptionsGetInt(PETSC_NULL,"-ngridz",&nz,&flg);CHKERRQ(ierr); if (Matlabflag == PETSC_FALSE){withMatlab = 0;}else{withMatlab = 1;} if(withMatlab==1){ // Rank 0 connects to socket, use default socket PetscViewerSocketOpen(PETSC_COMM_WORLD,0,PETSC_DEFAULT,&socketviewer); ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: socket opened! \n");CHKERRQ(ierr); // receive cdinfo from Matlab ierr = CdinfoReceive(socketviewer, &cdinfo); // receive the velocity field from matlab. ierr = VecReceive(socketviewer,VECSEQ,&sol);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: sol is received from Matlab \n");CHKERRQ(ierr); //test ierr = VecLoadfromFile(PETSC_COMM_WORLD,"sol",VECSEQ,&sol);CHKERRQ(ierr); ierr = CdinfoLoad(PETSC_COMM_WORLD,"cdinfofiled", &cdinfo);CHKERRQ(ierr); ierr = VecReceive(socketviewer,VECMPI,&x0);CHKERRQ(ierr); ierr = VecReceive(socketviewer,VECMPI,&y0);CHKERRQ(ierr); ierr = VecReceive(socketviewer,VECMPI,&z0);CHKERRQ(ierr); ierr = PetscPrintf(PETSC_COMM_WORLD,"PETSC: x0,y0 and z0 are received from Matlab \n");CHKERRQ(ierr); }else{ // Load cdinfo ierr = CdinfoLoad(PETSC_COMM_WORLD,"cdinfofiled", &cdinfo);CHKERRQ(ierr); ierr = VecLoadfromFile(PETSC_COMM_WORLD,"sol",VECSEQ,&sol);CHKERRQ(ierr); // Load x0, y0, z0 as initial points for streamlines ierr = VecLoadfromFile(PETSC_COMM_WORLD,"x0filed",VECMPI,&x0);CHKERRQ(ierr); ierr = VecLoadfromFile(PETSC_COMM_WORLD,"y0filed",VECMPI,&y0);CHKERRQ(ierr); ierr = VecLoadfromFile(PETSC_COMM_WORLD,"z0filed",VECMPI,&z0);CHKERRQ(ierr); } ierr = VecMax(sol,&solmaxp,&solmax);CHKERRQ(ierr); StreamlineMap(x0, y0, z0, sol, cdinfo, &Dy, &Dz,0.01/solmax, n ,10,&x,&y,&z); MarkovMap2d(sol, ny, nz, cdinfo,&MarkovA,&dAdy,&dAdz); ierr = VecGetOwnershipRange(x0,&Istart,&Iend);CHKERRQ(ierr); Lsize = Iend - Istart; if(withMatlab==1){ VecView(x0,socketviewer); VecView(y0,socketviewer); VecView(z0,socketviewer); VecSave(x0,"xefilew"); VecSave(y0,"yefilew"); VecSave(z0,"zefilew"); PetscScalarView(Lsize*n,x,socketviewer); PetscScalarView(Lsize*n,y,socketviewer); PetscScalarView(Lsize*n,z,socketviewer); SparseView(Dy,socketviewer); SparseView(Dz,socketviewer); }else{ SparseSave(MarkovA,"A"); VecSave(x0,"xefilew"); VecSave(y0,"yefilew"); VecSave(z0,"zefilew"); Vec vx,vy,vz; ierr = VecCreateMPIWithArray(PETSC_COMM_WORLD,Lsize*n,PETSC_DECIDE,x,&vx);CHKERRQ(ierr); ierr = VecCreateMPIWithArray(PETSC_COMM_WORLD,Lsize*n,PETSC_DECIDE,y,&vy);CHKERRQ(ierr); ierr = VecCreateMPIWithArray(PETSC_COMM_WORLD,Lsize*n,PETSC_DECIDE,z,&vz);CHKERRQ(ierr); VecSave(vx,"xeallfilew"); VecSave(vy,"yeallfilew"); VecSave(vz,"zeallfilew"); ierr = VecDestroy(vx);CHKERRQ(ierr); ierr = VecDestroy(vy);CHKERRQ(ierr); ierr = VecDestroy(vz);CHKERRQ(ierr); } ierr = VecRestoreArray(x0,&xarray);CHKERRQ(ierr); ierr = VecRestoreArray(y0,&yarray);CHKERRQ(ierr); ierr = VecRestoreArray(z0,&zarray);CHKERRQ(ierr); ierr = VecDestroy(x0);CHKERRQ(ierr); ierr = VecDestroy(y0);CHKERRQ(ierr); ierr = VecDestroy(z0);CHKERRQ(ierr); ierr = MatDestroy(Dy);CHKERRQ(ierr); ierr = MatDestroy(Dz);CHKERRQ(ierr); ierr = MatDestroy(dAdy);CHKERRQ(ierr); ierr = MatDestroy(dAdz);CHKERRQ(ierr); ierr = MatDestroy(MarkovA);CHKERRQ(ierr); ierr = PetscFinalize();CHKERRQ(ierr); return 0; }