int main(int argc, char *argv[]) { superlu_options_t options; SuperLUStat_t stat; SuperMatrix A; ScalePermstruct_t ScalePermstruct; LUstruct_t LUstruct; SOLVEstruct_t SOLVEstruct; gridinfo_t grid1, grid2; double *berr; doublecomplex *a, *b, *xtrue; int_t *asub, *xa; int_t i, j, m, n; int_t nprow, npcol, ldumap, p; int_t usermap[6]; int iam, info, ldb, ldx, nprocs; int nrhs = 1; /* Number of right-hand side. */ char **cpp, c; FILE *fp, *fopen(); /* ------------------------------------------------------------ INITIALIZE MPI ENVIRONMENT. ------------------------------------------------------------*/ MPI_Init( &argc, &argv ); MPI_Comm_size( MPI_COMM_WORLD, &nprocs ); if ( nprocs < 10 ) { fprintf(stderr, "Requires at least 10 processes\n"); exit(-1); } /* Parse command line argv[]. */ for (cpp = argv+1; *cpp; ++cpp) { if ( **cpp == '-' ) { c = *(*cpp+1); ++cpp; switch (c) { case 'h': printf("Options:\n"); printf("\t-r <int>: process rows (default %d)\n", nprow); printf("\t-c <int>: process columns (default %d)\n", npcol); exit(0); break; case 'r': nprow = atoi(*cpp); break; case 'c': npcol = atoi(*cpp); break; } } else { /* Last arg is considered a filename */ if ( !(fp = fopen(*cpp, "r")) ) { ABORT("File does not exist"); } break; } } /* ------------------------------------------------------------ INITIALIZE THE SUPERLU PROCESS GRID 1. ------------------------------------------------------------*/ nprow = 2; npcol = 3; ldumap = 2; p = 0; /* Grid 1 starts from process 0. */ for (i = 0; i < nprow; ++i) for (j = 0; j < npcol; ++j) usermap[i+j*ldumap] = p++; superlu_gridmap(MPI_COMM_WORLD, nprow, npcol, usermap, ldumap, &grid1); /* ------------------------------------------------------------ INITIALIZE THE SUPERLU PROCESS GRID 2. ------------------------------------------------------------*/ nprow = 2; npcol = 2; ldumap = 2; p = 6; /* Grid 2 starts from process 6. */ for (i = 0; i < nprow; ++i) for (j = 0; j < npcol; ++j) usermap[i+j*ldumap] = p++; superlu_gridmap(MPI_COMM_WORLD, nprow, npcol, usermap, ldumap, &grid2); /* Bail out if I do not belong in any of the 2 grids. */ MPI_Comm_rank( MPI_COMM_WORLD, &iam ); if ( iam >= 10 ) goto out; #if ( DEBUGlevel>=1 ) CHECK_MALLOC(iam, "Enter main()"); #endif if ( iam >= 0 && iam < 6 ) { /* I am in grid 1. */ iam = grid1.iam; /* Get the logical number in the new grid. */ /* ------------------------------------------------------------ GET THE MATRIX FROM FILE AND SETUP THE RIGHT HAND SIDE. ------------------------------------------------------------*/ zcreate_matrix(&A, nrhs, &b, &ldb, &xtrue, &ldx, fp, &grid1); if ( !(berr = doubleMalloc_dist(nrhs)) ) ABORT("Malloc fails for berr[]."); /* ------------------------------------------------------------ NOW WE SOLVE THE LINEAR SYSTEM. ------------------------------------------------------------*/ /* Set the default input options: options.Fact = DOFACT; options.Equil = YES; options.ColPerm = MMD_AT_PLUS_A; options.RowPerm = LargeDiag; options.ReplaceTinyPivot = YES; options.Trans = NOTRANS; options.IterRefine = DOUBLE; options.SolveInitialized = NO; options.RefineInitialized = NO; options.PrintStat = YES; */ set_default_options_dist(&options); m = A.nrow; n = A.ncol; /* Initialize ScalePermstruct and LUstruct. */ ScalePermstructInit(m, n, &ScalePermstruct); LUstructInit(m, n, &LUstruct); /* Initialize the statistics variables. */ PStatInit(&stat); /* Call the linear equation solver. */ pzgssvx(&options, &A, &ScalePermstruct, b, ldb, nrhs, &grid1, &LUstruct, &SOLVEstruct, berr, &stat, &info); /* Check the accuracy of the solution. */ pzinf_norm_error(iam, ((NRformat_loc *)A.Store)->m_loc, nrhs, b, ldb, xtrue, ldx, &grid1); /* Print the statistics. */ PStatPrint(&options, &stat, &grid1); /* ------------------------------------------------------------ DEALLOCATE STORAGE. ------------------------------------------------------------*/ PStatFree(&stat); Destroy_CompRowLoc_Matrix_dist(&A); ScalePermstructFree(&ScalePermstruct); Destroy_LU(n, &grid1, &LUstruct); LUstructFree(&LUstruct); if ( options.SolveInitialized ) { zSolveFinalize(&options, &SOLVEstruct); } SUPERLU_FREE(b); SUPERLU_FREE(xtrue); SUPERLU_FREE(berr); } else { /* I am in grid 2. */ iam = grid2.iam; /* Get the logical number in the new grid. */ /* ------------------------------------------------------------ GET THE MATRIX FROM FILE AND SETUP THE RIGHT HAND SIDE. ------------------------------------------------------------*/ zcreate_matrix(&A, nrhs, &b, &ldb, &xtrue, &ldx, fp, &grid2); if ( !(berr = doubleMalloc_dist(nrhs)) ) ABORT("Malloc fails for berr[]."); /* ------------------------------------------------------------ NOW WE SOLVE THE LINEAR SYSTEM. ------------------------------------------------------------*/ /* Set the default input options: options.Fact = DOFACT; options.Equil = YES; options.ColPerm = MMD_AT_PLUS_A; options.RowPerm = LargeDiag; options.ReplaceTinyPivot = YES; options.Trans = NOTRANS; options.IterRefine = DOUBLE; options.SolveInitialized = NO; options.RefineInitialized = NO; options.PrintStat = YES; */ set_default_options_dist(&options); m = A.nrow; n = A.ncol; /* Initialize ScalePermstruct and LUstruct. */ ScalePermstructInit(m, n, &ScalePermstruct); LUstructInit(m, n, &LUstruct); /* Initialize the statistics variables. */ PStatInit(&stat); /* Call the linear equation solver. */ pzgssvx(&options, &A, &ScalePermstruct, b, ldb, nrhs, &grid2, &LUstruct, &SOLVEstruct, berr, &stat, &info); /* Check the accuracy of the solution. */ pzinf_norm_error(iam, ((NRformat_loc *)A.Store)->m_loc, nrhs, b, ldb, xtrue, ldx, &grid2); /* Print the statistics. */ PStatPrint(&options, &stat, &grid2); /* ------------------------------------------------------------ DEALLOCATE STORAGE. ------------------------------------------------------------*/ PStatFree(&stat); Destroy_CompRowLoc_Matrix_dist(&A); ScalePermstructFree(&ScalePermstruct); Destroy_LU(n, &grid2, &LUstruct); LUstructFree(&LUstruct); if ( options.SolveInitialized ) { zSolveFinalize(&options, &SOLVEstruct); } SUPERLU_FREE(b); SUPERLU_FREE(xtrue); SUPERLU_FREE(berr); } /* ------------------------------------------------------------ RELEASE THE SUPERLU PROCESS GRIDS. ------------------------------------------------------------*/ superlu_gridexit(&grid1); superlu_gridexit(&grid2); out: /* ------------------------------------------------------------ TERMINATES THE MPI EXECUTION ENVIRONMENT. ------------------------------------------------------------*/ MPI_Finalize(); #if ( DEBUGlevel>=1 ) CHECK_MALLOC(iam, "Exit main()"); #endif }
int main(int argc, char *argv[]) { superlu_options_t options; SuperLUStat_t stat; SuperMatrix A; NRformat_loc *Astore; ScalePermstruct_t ScalePermstruct; LUstruct_t LUstruct; SOLVEstruct_t SOLVEstruct; gridinfo_t grid; double *berr; doublecomplex *b, *b1, *xtrue, *nzval, *nzval1; int_t *colind, *colind1, *rowptr, *rowptr1; int_t i, j, m, n, nnz_loc, m_loc, fst_row; int_t nprow, npcol; int iam, info, ldb, ldx, nrhs; char **cpp, c; FILE *fp, *fopen(); nprow = 1; /* Default process rows. */ npcol = 1; /* Default process columns. */ nrhs = 1; /* Number of right-hand side. */ /* ------------------------------------------------------------ INITIALIZE MPI ENVIRONMENT. ------------------------------------------------------------*/ MPI_Init( &argc, &argv ); /* Parse command line argv[]. */ for (cpp = argv+1; *cpp; ++cpp) { if ( **cpp == '-' ) { c = *(*cpp+1); ++cpp; switch (c) { case 'h': printf("Options:\n"); printf("\t-r <int>: process rows (default %d)\n", nprow); printf("\t-c <int>: process columns (default %d)\n", npcol); exit(0); break; case 'r': nprow = atoi(*cpp); break; case 'c': npcol = atoi(*cpp); break; } } else { /* Last arg is considered a filename */ if ( !(fp = fopen(*cpp, "r")) ) { ABORT("File does not exist"); } break; } } /* ------------------------------------------------------------ INITIALIZE THE SUPERLU PROCESS GRID. ------------------------------------------------------------*/ superlu_gridinit(MPI_COMM_WORLD, nprow, npcol, &grid); /* Bail out if I do not belong in the grid. */ iam = grid.iam; if ( iam >= nprow * npcol ) goto out; if ( !iam ) printf("\tProcess grid\t%d X %d\n", grid.nprow, grid.npcol); #if ( DEBUGlevel>=1 ) CHECK_MALLOC(iam, "Enter main()"); #endif /* ------------------------------------------------------------ GET THE MATRIX FROM FILE AND SETUP THE RIGHT HAND SIDE. ------------------------------------------------------------*/ zcreate_matrix(&A, nrhs, &b, &ldb, &xtrue, &ldx, fp, &grid); if ( !(b1 = doublecomplexMalloc_dist(ldb * nrhs)) ) ABORT("Malloc fails for b1[]"); for (j = 0; j < nrhs; ++j) for (i = 0; i < ldb; ++i) b1[i+j*ldb] = b[i+j*ldb]; if ( !(berr = doubleMalloc_dist(nrhs)) ) ABORT("Malloc fails for berr[]."); m = A.nrow; n = A.ncol; /* Save a copy of the matrix A. */ Astore = (NRformat_loc *) A.Store; nnz_loc = Astore->nnz_loc; m_loc = Astore->m_loc; fst_row = Astore->fst_row; nzval = Astore->nzval; colind = Astore->colind; rowptr = Astore->rowptr; nzval1 = doublecomplexMalloc_dist(nnz_loc); colind1 = intMalloc_dist(nnz_loc); rowptr1 = intMalloc_dist(m_loc+1); for (i = 0; i < nnz_loc; ++i) { nzval1[i] = nzval[i]; colind1[i] = colind[i]; } for (i = 0; i < m_loc+1; ++i) rowptr1[i] = rowptr[i]; /* ------------------------------------------------------------ WE SOLVE THE LINEAR SYSTEM FOR THE FIRST TIME. ------------------------------------------------------------*/ /* Set the default input options: options.Fact = DOFACT; options.Equil = YES; options.ColPerm = MMD_AT_PLUS_A; options.RowPerm = LargeDiag; options.ReplaceTinyPivot = YES; options.Trans = NOTRANS; options.IterRefine = DOUBLE; options.SolveInitialized = NO; options.RefineInitialized = NO; options.PrintStat = YES; */ set_default_options_dist(&options); /* Initialize ScalePermstruct and LUstruct. */ ScalePermstructInit(m, n, &ScalePermstruct); LUstructInit(m, n, &LUstruct); /* Initialize the statistics variables. */ PStatInit(&stat); /* Call the linear equation solver: factorize and solve. */ pzgssvx(&options, &A, &ScalePermstruct, b, ldb, nrhs, &grid, &LUstruct, &SOLVEstruct, berr, &stat, &info); /* Check the accuracy of the solution. */ pzinf_norm_error(iam, m_loc, nrhs, b, ldb, xtrue, ldx, &grid); PStatPrint(&options, &stat, &grid); /* Print the statistics. */ PStatFree(&stat); Destroy_CompRowLoc_Matrix_dist(&A); /* Deallocate storage of matrix A. */ SUPERLU_FREE(b); /* Free storage of right-hand side. */ /* ------------------------------------------------------------ NOW WE SOLVE ANOTHER LINEAR SYSTEM. THE MATRIX A HAS THE SAME SPARSITY PATTERN AND THE SIMILAR NUMERICAL VALUES AS THAT IN A PREVIOUS SYSTEM. ------------------------------------------------------------*/ options.Fact = SamePattern_SameRowPerm; PStatInit(&stat); /* Initialize the statistics variables. */ /* Set up the local A in NR_loc format */ zCreate_CompRowLoc_Matrix_dist(&A, m, n, nnz_loc, m_loc, fst_row, nzval1, colind1, rowptr1, SLU_NR_loc, SLU_Z, SLU_GE); /* Solve the linear system. */ pzgssvx(&options, &A, &ScalePermstruct, b1, ldb, nrhs, &grid, &LUstruct, &SOLVEstruct, berr, &stat, &info); /* Check the accuracy of the solution. */ if ( !iam ) printf("Solve a system with the same pattern and similar values.\n"); pzinf_norm_error(iam, m_loc, nrhs, b1, ldb, xtrue, ldx, &grid); /* Print the statistics. */ PStatPrint(&options, &stat, &grid); /* ------------------------------------------------------------ DEALLOCATE STORAGE. ------------------------------------------------------------*/ PStatFree(&stat); Destroy_CompRowLoc_Matrix_dist(&A); /* Deallocate storage of matrix A. */ Destroy_LU(n, &grid, &LUstruct); /* Deallocate storage associated with the L and U matrices. */ ScalePermstructFree(&ScalePermstruct); LUstructFree(&LUstruct); /* Deallocate the structure of L and U.*/ if ( options.SolveInitialized ) { zSolveFinalize(&options, &SOLVEstruct); } SUPERLU_FREE(b1); /* Free storage of right-hand side. */ SUPERLU_FREE(xtrue); /* Free storage of the exact solution. */ SUPERLU_FREE(berr); /* ------------------------------------------------------------ RELEASE THE SUPERLU PROCESS GRID. ------------------------------------------------------------*/ out: superlu_gridexit(&grid); /* ------------------------------------------------------------ TERMINATES THE MPI EXECUTION ENVIRONMENT. ------------------------------------------------------------*/ MPI_Finalize(); #if ( DEBUGlevel>=1 ) CHECK_MALLOC(iam, "Exit main()"); #endif }
PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat F,Mat A,const MatFactorInfo *info) { Mat *tseq,A_seq = NULL; Mat_SeqAIJ *aa,*bb; Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(F)->spptr; PetscErrorCode ierr; PetscInt M=A->rmap->N,N=A->cmap->N,i,*ai,*aj,*bi,*bj,nz,rstart,*garray, m=A->rmap->n, colA_start,j,jcol,jB,countA,countB,*bjj,*ajj; int sinfo; /* SuperLU_Dist info flag is always an int even with long long indices */ PetscMPIInt size; SuperLUStat_t stat; double *berr=0; IS isrow; Mat F_diag=NULL; #if defined(PETSC_USE_COMPLEX) doublecomplex *av, *bv; #else double *av, *bv; #endif PetscFunctionBegin; ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); if (lu->MatInputMode == GLOBAL) { /* global mat input */ if (size > 1) { /* convert mpi A to seq mat A */ ierr = ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);CHKERRQ(ierr); ierr = MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);CHKERRQ(ierr); ierr = ISDestroy(&isrow);CHKERRQ(ierr); A_seq = *tseq; ierr = PetscFree(tseq);CHKERRQ(ierr); aa = (Mat_SeqAIJ*)A_seq->data; } else { PetscBool flg; ierr = PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&flg);CHKERRQ(ierr); if (flg) { Mat_MPIAIJ *At = (Mat_MPIAIJ*)A->data; A = At->A; } aa = (Mat_SeqAIJ*)A->data; } /* Convert Petsc NR matrix to SuperLU_DIST NC. Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */ if (lu->options.Fact != DOFACT) {/* successive numeric factorization, sparsity pattern is reused. */ PetscStackCall("SuperLU_DIST:Destroy_CompCol_Matrix_dist",Destroy_CompCol_Matrix_dist(&lu->A_sup)); if (lu->FactPattern == SamePattern_SameRowPerm) { lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ } else { /* lu->FactPattern == SamePattern */ PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); lu->options.Fact = SamePattern; } } #if defined(PETSC_USE_COMPLEX) PetscStackCall("SuperLU_DIST:zCompRow_to_CompCol_dist",zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val,&lu->col, &lu->row)); #else PetscStackCall("SuperLU_DIST:dCompRow_to_CompCol_dist",dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,(int_t*)aa->j,(int_t*)aa->i,&lu->val, &lu->col, &lu->row)); #endif /* Create compressed column matrix A_sup. */ #if defined(PETSC_USE_COMPLEX) PetscStackCall("SuperLU_DIST:zCreate_CompCol_Matrix_dist",zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE)); #else PetscStackCall("SuperLU_DIST:dCreate_CompCol_Matrix_dist",dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE)); #endif } else { /* distributed mat input */ Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; aa=(Mat_SeqAIJ*)(mat->A)->data; bb=(Mat_SeqAIJ*)(mat->B)->data; ai=aa->i; aj=aa->j; bi=bb->i; bj=bb->j; #if defined(PETSC_USE_COMPLEX) av=(doublecomplex*)aa->a; bv=(doublecomplex*)bb->a; #else av=aa->a; bv=bb->a; #endif rstart = A->rmap->rstart; nz = aa->nz + bb->nz; garray = mat->garray; if (lu->options.Fact == DOFACT) { /* first numeric factorization */ #if defined(PETSC_USE_COMPLEX) PetscStackCall("SuperLU_DIST:zallocateA_dist",zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); #else PetscStackCall("SuperLU_DIST:dallocateA_dist",dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row)); #endif } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */ /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); */ /* this leads to crash! However, see SuperLU_DIST_2.5/EXAMPLE/pzdrive2.c */ if (lu->FactPattern == SamePattern_SameRowPerm) { lu->options.Fact = SamePattern_SameRowPerm; /* matrix has similar numerical values */ } else { PetscStackCall("SuperLU_DIST:Destroy_LU",Destroy_LU(N, &lu->grid, &lu->LUstruct)); /* Deallocate storage associated with the L and U matrices. */ lu->options.Fact = SamePattern; } } nz = 0; for (i=0; i<m; i++) { lu->row[i] = nz; countA = ai[i+1] - ai[i]; countB = bi[i+1] - bi[i]; ajj = aj + ai[i]; /* ptr to the beginning of this row */ bjj = bj + bi[i]; /* B part, smaller col index */ colA_start = rstart + ajj[0]; /* the smallest global col index of A */ jB = 0; for (j=0; j<countB; j++) { jcol = garray[bjj[j]]; if (jcol > colA_start) { jB = j; break; } lu->col[nz] = jcol; lu->val[nz++] = *bv++; if (j==countB-1) jB = countB; } /* A part */ for (j=0; j<countA; j++) { lu->col[nz] = rstart + ajj[j]; lu->val[nz++] = *av++; } /* B part, larger col index */ for (j=jB; j<countB; j++) { lu->col[nz] = garray[bjj[j]]; lu->val[nz++] = *bv++; } } lu->row[m] = nz; #if defined(PETSC_USE_COMPLEX) PetscStackCall("SuperLU_DIST:zCreate_CompRowLoc_Matrix_dist",zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE)); #else PetscStackCall("SuperLU_DIST:dCreate_CompRowLoc_Matrix_dist",dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE)); #endif } /* Factor the matrix. */ PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ if (lu->MatInputMode == GLOBAL) { /* global mat input */ #if defined(PETSC_USE_COMPLEX) PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo)); #else PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,&lu->grid, &lu->LUstruct, berr, &stat, &sinfo)); #endif } else { /* distributed mat input */ #if defined(PETSC_USE_COMPLEX) PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",sinfo); #else PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, m, 0, &lu->grid,&lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo)); if (sinfo) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",sinfo); #endif } if (lu->MatInputMode == GLOBAL && size > 1) { ierr = MatDestroy(&A_seq);CHKERRQ(ierr); } if (lu->options.PrintStat) { PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ } PStatFree(&stat); if (size > 1) { F_diag = ((Mat_MPIAIJ*)(F)->data)->A; F_diag->assembled = PETSC_TRUE; } (F)->assembled = PETSC_TRUE; (F)->preallocated = PETSC_TRUE; lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only */ PetscFunctionReturn(0); }
PetscErrorCode MatMatSolve_SuperLU_DIST(Mat A,Mat B_mpi,Mat X) { Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; PetscErrorCode ierr; PetscMPIInt size; PetscInt M=A->rmap->N,m=A->rmap->n,nrhs; SuperLUStat_t stat; double berr[1]; PetscScalar *bptr; int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ PetscBool flg; PetscFunctionBegin; if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); ierr = PetscObjectTypeCompareAny((PetscObject)B_mpi,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix B must be MATDENSE matrix"); ierr = PetscObjectTypeCompareAny((PetscObject)X,&flg,MATSEQDENSE,MATMPIDENSE,NULL);CHKERRQ(ierr); if (!flg) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONG,"Matrix X must be MATDENSE matrix"); ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); if (size > 1 && lu->MatInputMode == GLOBAL) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatInputMode=GLOBAL for nproc %d>1 is not supported",size); /* size==1 or distributed mat input */ if (lu->options.SolveInitialized && !lu->matmatsolve_iscalled) { /* communication pattern of SOLVEstruct is unlikely created for matmatsolve, thus destroy it and create a new SOLVEstruct. Otherwise it may result in memory corruption or incorrect solution See src/mat/examples/tests/ex125.c */ #if defined(PETSC_USE_COMPLEX) PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); #else PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); #endif lu->options.SolveInitialized = NO; } ierr = MatCopy(B_mpi,X,SAME_NONZERO_PATTERN);CHKERRQ(ierr); ierr = MatGetSize(B_mpi,NULL,&nrhs);CHKERRQ(ierr); PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ ierr = MatDenseGetArray(X,&bptr);CHKERRQ(ierr); if (lu->MatInputMode == GLOBAL) { /* size == 1 */ #if defined(PETSC_USE_COMPLEX) PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, M, nrhs,&lu->grid, &lu->LUstruct, berr, &stat, &info)); #else PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, M, nrhs, &lu->grid, &lu->LUstruct, berr, &stat, &info)); #endif } else { /* distributed mat input */ #if defined(PETSC_USE_COMPLEX) PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); #else PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); #endif } if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); ierr = MatDenseRestoreArray(X,&bptr);CHKERRQ(ierr); if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); lu->matsolve_iscalled = PETSC_FALSE; lu->matmatsolve_iscalled = PETSC_TRUE; PetscFunctionReturn(0); }
PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x) { Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; PetscErrorCode ierr; PetscMPIInt size; PetscInt m=A->rmap->n,M=A->rmap->N,N=A->cmap->N; SuperLUStat_t stat; double berr[1]; PetscScalar *bptr; PetscInt nrhs=1; Vec x_seq; IS iden; VecScatter scat; int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ static PetscBool cite = PETSC_FALSE; PetscFunctionBegin; ierr = PetscCitationsRegister("@article{lidemmel03,\n author = {Xiaoye S. Li and James W. Demmel},\n title = {{SuperLU_DIST}: A Scalable Distributed-Memory Sparse Direct\n Solver for Unsymmetric Linear Systems},\n journal = {ACM Trans. Mathematical Software},\n volume = {29},\n number = {2},\n pages = {110-140},\n year = 2003\n}\n",&cite);CHKERRQ(ierr); ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); if (size > 1 && lu->MatInputMode == GLOBAL) { /* global mat input, convert b to x_seq */ ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr); ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr); ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr); ierr = ISDestroy(&iden);CHKERRQ(ierr); ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr); } else { /* size==1 || distributed mat input */ if (lu->options.SolveInitialized && !lu->matsolve_iscalled) { /* see comments in MatMatSolve() */ #if defined(PETSC_USE_COMPLEX) PetscStackCall("SuperLU_DIST:zSolveFinalize",zSolveFinalize(&lu->options, &lu->SOLVEstruct)); #else PetscStackCall("SuperLU_DIST:dSolveFinalize",dSolveFinalize(&lu->options, &lu->SOLVEstruct)); #endif lu->options.SolveInitialized = NO; } ierr = VecCopy(b_mpi,x);CHKERRQ(ierr); ierr = VecGetArray(x,&bptr);CHKERRQ(ierr); } if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); PetscStackCall("SuperLU_DIST:PStatInit",PStatInit(&stat)); /* Initialize the statistics variables. */ if (lu->MatInputMode == GLOBAL) { #if defined(PETSC_USE_COMPLEX) PetscStackCall("SuperLU_DIST:pzgssvx_ABglobal",pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info)); #else PetscStackCall("SuperLU_DIST:pdgssvx_ABglobal",pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs,&lu->grid,&lu->LUstruct,berr,&stat,&info)); #endif } else { /* distributed mat input */ #if defined(PETSC_USE_COMPLEX) PetscStackCall("SuperLU_DIST:pzgssvx",pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); #else PetscStackCall("SuperLU_DIST:pdgssvx",pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid,&lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info)); #endif } if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); if (lu->options.PrintStat) PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ PetscStackCall("SuperLU_DIST:PStatFree",PStatFree(&stat)); if (size > 1 && lu->MatInputMode == GLOBAL) { /* convert seq x to mpi x */ ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr); ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); ierr = VecDestroy(&x_seq);CHKERRQ(ierr); } else { ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr); lu->matsolve_iscalled = PETSC_TRUE; lu->matmatsolve_iscalled = PETSC_FALSE; } PetscFunctionReturn(0); }
PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x) { Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr; PetscErrorCode ierr; PetscMPIInt size; PetscInt m=A->rmap->n,M=A->rmap->N,N=A->cmap->N; SuperLUStat_t stat; double berr[1]; PetscScalar *bptr; PetscInt nrhs=1; Vec x_seq; IS iden; VecScatter scat; int info; /* SuperLU_Dist info code is ALWAYS an int, even with long long indices */ PetscFunctionBegin; ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); if (size > 1 && lu->MatInputMode == GLOBAL) { /* global mat input, convert b to x_seq */ ierr = VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);CHKERRQ(ierr); ierr = ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);CHKERRQ(ierr); ierr = VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);CHKERRQ(ierr); ierr = ISDestroy(&iden);CHKERRQ(ierr); ierr = VecScatterBegin(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); ierr = VecScatterEnd(scat,b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); ierr = VecGetArray(x_seq,&bptr);CHKERRQ(ierr); } else { /* size==1 || distributed mat input */ if (lu->options.SolveInitialized && !lu->matsolve_iscalled){ /* see comments in MatMatSolve() */ #if defined(PETSC_USE_COMPLEX) zSolveFinalize(&lu->options, &lu->SOLVEstruct); #else dSolveFinalize(&lu->options, &lu->SOLVEstruct); #endif lu->options.SolveInitialized = NO; } ierr = VecCopy(b_mpi,x);CHKERRQ(ierr); ierr = VecGetArray(x,&bptr);CHKERRQ(ierr); } if (lu->options.Fact != FACTORED) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"SuperLU_DIST options.Fact mush equal FACTORED"); PStatInit(&stat); /* Initialize the statistics variables. */ if (lu->MatInputMode == GLOBAL) { #if defined(PETSC_USE_COMPLEX) pzgssvx_ABglobal(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,M,nrhs, &lu->grid,&lu->LUstruct,berr,&stat,&info); #else pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr,M,nrhs, &lu->grid,&lu->LUstruct,berr,&stat,&info); #endif } else { /* distributed mat input */ #if defined(PETSC_USE_COMPLEX) pzgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,(doublecomplex*)bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); #else pdgssvx(&lu->options,&lu->A_sup,&lu->ScalePermstruct,bptr,m,nrhs,&lu->grid, &lu->LUstruct,&lu->SOLVEstruct,berr,&stat,&info); #endif } if (info) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info); if (lu->options.PrintStat) { PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */ } PStatFree(&stat); if (size > 1 && lu->MatInputMode == GLOBAL) { /* convert seq x to mpi x */ ierr = VecRestoreArray(x_seq,&bptr);CHKERRQ(ierr); ierr = VecScatterBegin(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); ierr = VecScatterEnd(scat,x_seq,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); ierr = VecScatterDestroy(&scat);CHKERRQ(ierr); ierr = VecDestroy(&x_seq);CHKERRQ(ierr); } else { ierr = VecRestoreArray(x,&bptr);CHKERRQ(ierr); lu->matsolve_iscalled = PETSC_TRUE; lu->matmatsolve_iscalled = PETSC_FALSE; } PetscFunctionReturn(0); }
int main(int argc, char *argv[]) { superlu_options_t options; SuperLUStat_t stat; SuperMatrix A; ScalePermstruct_t ScalePermstruct; LUstruct_t LUstruct; SOLVEstruct_t SOLVEstruct; gridinfo_t grid; double *berr; doublecomplex *b, *xtrue; int_t m, n; int_t nprow, npcol; int iam, info, ldb, ldx, nrhs; char **cpp, c; FILE *fp, *fopen(); extern int cpp_defs(); nprow = 1; /* Default process rows. */ npcol = 1; /* Default process columns. */ nrhs = 1; /* Number of right-hand side. */ /* ------------------------------------------------------------ INITIALIZE MPI ENVIRONMENT. ------------------------------------------------------------*/ MPI_Init( &argc, &argv ); /* Parse command line argv[]. */ for (cpp = argv+1; *cpp; ++cpp) { if ( **cpp == '-' ) { c = *(*cpp+1); ++cpp; switch (c) { case 'h': printf("Options:\n"); printf("\t-r <int>: process rows (default %d)\n", nprow); printf("\t-c <int>: process columns (default %d)\n", npcol); exit(0); break; case 'r': nprow = atoi(*cpp); break; case 'c': npcol = atoi(*cpp); break; } } else { /* Last arg is considered a filename */ if ( !(fp = fopen(*cpp, "r")) ) { ABORT("File does not exist"); } break; } } /* ------------------------------------------------------------ INITIALIZE THE SUPERLU PROCESS GRID. ------------------------------------------------------------*/ superlu_gridinit(MPI_COMM_WORLD, nprow, npcol, &grid); /* Bail out if I do not belong in the grid. */ iam = grid.iam; if ( iam >= nprow * npcol ) goto out; if ( !iam ) printf("\tProcess grid\t%d X %d\n", grid.nprow, grid.npcol); #if ( VAMPIR>=1 ) VT_traceoff(); #endif #if ( DEBUGlevel>=1 ) CHECK_MALLOC(iam, "Enter main()"); #endif /* ------------------------------------------------------------ GET THE MATRIX FROM FILE AND SETUP THE RIGHT HAND SIDE. ------------------------------------------------------------*/ zcreate_matrix(&A, nrhs, &b, &ldb, &xtrue, &ldx, fp, &grid); if ( !(berr = doubleMalloc_dist(nrhs)) ) ABORT("Malloc fails for berr[]."); /* ------------------------------------------------------------ NOW WE SOLVE THE LINEAR SYSTEM. ------------------------------------------------------------*/ /* Set the default input options: options.Fact = DOFACT; options.Equil = YES; options.ParSymbFact = NO; options.ColPerm = MMD_AT_PLUS_A; options.RowPerm = LargeDiag; options.ReplaceTinyPivot = YES; options.IterRefine = DOUBLE; options.Trans = NOTRANS; options.SolveInitialized = NO; options.RefineInitialized = NO; options.PrintStat = YES; */ set_default_options_dist(&options); #if 0 options.RowPerm = NOROWPERM; options.IterRefine = NOREFINE; options.ColPerm = NATURAL; options.Equil = NO; options.ReplaceTinyPivot = NO; #endif m = A.nrow; n = A.ncol; /* Initialize ScalePermstruct and LUstruct. */ ScalePermstructInit(m, n, &ScalePermstruct); LUstructInit(m, n, &LUstruct); /* Initialize the statistics variables. */ PStatInit(&stat); /* Call the linear equation solver. */ pzgssvx(&options, &A, &ScalePermstruct, b, ldb, nrhs, &grid, &LUstruct, &SOLVEstruct, berr, &stat, &info); /* Check the accuracy of the solution. */ pzinf_norm_error(iam, ((NRformat_loc *)A.Store)->m_loc, nrhs, b, ldb, xtrue, ldx, &grid); PStatPrint(&options, &stat, &grid); /* Print the statistics. */ /* ------------------------------------------------------------ DEALLOCATE STORAGE. ------------------------------------------------------------*/ PStatFree(&stat); Destroy_CompRowLoc_Matrix_dist(&A); ScalePermstructFree(&ScalePermstruct); Destroy_LU(n, &grid, &LUstruct); LUstructFree(&LUstruct); if ( options.SolveInitialized ) { zSolveFinalize(&options, &SOLVEstruct); } SUPERLU_FREE(b); SUPERLU_FREE(xtrue); SUPERLU_FREE(berr); /* ------------------------------------------------------------ RELEASE THE SUPERLU PROCESS GRID. ------------------------------------------------------------*/ out: superlu_gridexit(&grid); /* ------------------------------------------------------------ TERMINATES THE MPI EXECUTION ENVIRONMENT. ------------------------------------------------------------*/ MPI_Finalize(); #if ( DEBUGlevel>=1 ) CHECK_MALLOC(iam, "Exit main()"); #endif }