Actual source code: superlu_dist.c
1: #define PETSCMAT_DLL
3: /*
4: Provides an interface to the SuperLU_DIST_2.0 sparse solver
5: */
7: #include "src/mat/impls/aij/seq/aij.h"
8: #include "src/mat/impls/aij/mpi/mpiaij.h"
9: #if defined(PETSC_HAVE_STDLIB_H) /* This is to get arround weird problem with SuperLU on cray */
10: #include "stdlib.h"
11: #endif
14: #if defined(PETSC_USE_COMPLEX)
15: #include "superlu_zdefs.h"
16: #else
17: #include "superlu_ddefs.h"
18: #endif
21: typedef enum { GLOBAL,DISTRIBUTED
22: } SuperLU_MatInputMode;
24: typedef struct {
25: int_t nprow,npcol,*row,*col;
26: gridinfo_t grid;
27: superlu_options_t options;
28: SuperMatrix A_sup;
29: ScalePermstruct_t ScalePermstruct;
30: LUstruct_t LUstruct;
31: int StatPrint;
32: int MatInputMode;
33: SOLVEstruct_t SOLVEstruct;
34: MatStructure flg;
35: MPI_Comm comm_superlu;
36: #if defined(PETSC_USE_COMPLEX)
37: doublecomplex *val;
38: #else
39: double *val;
40: #endif
42: /* A few function pointers for inheritance */
43: PetscErrorCode (*MatDuplicate)(Mat,MatDuplicateOption,Mat*);
44: PetscErrorCode (*MatView)(Mat,PetscViewer);
45: PetscErrorCode (*MatAssemblyEnd)(Mat,MatAssemblyType);
46: PetscErrorCode (*MatLUFactorSymbolic)(Mat,IS,IS,MatFactorInfo*,Mat*);
47: PetscErrorCode (*MatDestroy)(Mat);
49: /* Flag to clean up (non-global) SuperLU objects during Destroy */
50: PetscTruth CleanUpSuperLU_Dist;
51: } Mat_SuperLU_DIST;
53: EXTERN PetscErrorCode MatDuplicate_SuperLU_DIST(Mat,MatDuplicateOption,Mat*);
58: PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_SuperLU_DIST_Base(Mat A,MatType type,MatReuse reuse,Mat *newmat)
59: {
60: PetscErrorCode ierr;
61: Mat B=*newmat;
62: Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST *)A->spptr;
65: if (reuse == MAT_INITIAL_MATRIX) {
66: MatDuplicate(A,MAT_COPY_VALUES,&B);
67: }
68: /* Reset the original function pointers */
69: B->ops->duplicate = lu->MatDuplicate;
70: B->ops->view = lu->MatView;
71: B->ops->assemblyend = lu->MatAssemblyEnd;
72: B->ops->lufactorsymbolic = lu->MatLUFactorSymbolic;
73: B->ops->destroy = lu->MatDestroy;
75: PetscObjectComposeFunction((PetscObject)B,"MatConvert_seqaij_superlu_dist_C","",PETSC_NULL);
76: PetscObjectComposeFunction((PetscObject)B,"MatConvert_superlu_dist_seqaij_C","",PETSC_NULL);
77: PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpiaij_superlu_dist_C","",PETSC_NULL);
78: PetscObjectComposeFunction((PetscObject)B,"MatConvert_superlu_dist_mpiaij_C","",PETSC_NULL);
80: PetscObjectChangeTypeName((PetscObject)B,type);
81: *newmat = B;
82: return(0);
83: }
88: PetscErrorCode MatDestroy_SuperLU_DIST(Mat A)
89: {
90: PetscErrorCode ierr;
91: PetscMPIInt size;
92: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
93:
95: if (lu->CleanUpSuperLU_Dist) {
96: /* Deallocate SuperLU_DIST storage */
97: if (lu->MatInputMode == GLOBAL) {
98: Destroy_CompCol_Matrix_dist(&lu->A_sup);
99: } else {
100: Destroy_CompRowLoc_Matrix_dist(&lu->A_sup);
101: if ( lu->options.SolveInitialized ) {
102: #if defined(PETSC_USE_COMPLEX)
103: zSolveFinalize(&lu->options, &lu->SOLVEstruct);
104: #else
105: dSolveFinalize(&lu->options, &lu->SOLVEstruct);
106: #endif
107: }
108: }
109: Destroy_LU(A->cmap.N, &lu->grid, &lu->LUstruct);
110: ScalePermstructFree(&lu->ScalePermstruct);
111: LUstructFree(&lu->LUstruct);
113: /* Release the SuperLU_DIST process grid. */
114: superlu_gridexit(&lu->grid);
115:
116: MPI_Comm_free(&(lu->comm_superlu));
117: }
119: MPI_Comm_size(A->comm,&size);
120: if (size == 1) {
121: MatConvert_SuperLU_DIST_Base(A,MATSEQAIJ,MAT_REUSE_MATRIX,&A);
122: } else {
123: MatConvert_SuperLU_DIST_Base(A,MATMPIAIJ,MAT_REUSE_MATRIX,&A);
124: }
125: (*A->ops->destroy)(A);
126: return(0);
127: }
131: PetscErrorCode MatSolve_SuperLU_DIST(Mat A,Vec b_mpi,Vec x)
132: {
133: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)A->spptr;
134: PetscErrorCode ierr;
135: PetscMPIInt size;
136: PetscInt m=A->rmap.N, N=A->cmap.N;
137: SuperLUStat_t stat;
138: double berr[1];
139: PetscScalar *bptr;
140: PetscInt info, nrhs=1;
141: Vec x_seq;
142: IS iden;
143: VecScatter scat;
144:
146: MPI_Comm_size(A->comm,&size);
147: if (size > 1) {
148: if (lu->MatInputMode == GLOBAL) { /* global mat input, convert b to x_seq */
149: VecCreateSeq(PETSC_COMM_SELF,N,&x_seq);
150: ISCreateStride(PETSC_COMM_SELF,N,0,1,&iden);
151: VecScatterCreate(b_mpi,iden,x_seq,iden,&scat);
152: ISDestroy(iden);
154: VecScatterBegin(b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD,scat);
155: VecScatterEnd(b_mpi,x_seq,INSERT_VALUES,SCATTER_FORWARD,scat);
156: VecGetArray(x_seq,&bptr);
157: } else { /* distributed mat input */
158: VecCopy(b_mpi,x);
159: VecGetArray(x,&bptr);
160: }
161: } else { /* size == 1 */
162: VecCopy(b_mpi,x);
163: VecGetArray(x,&bptr);
164: }
165:
166: lu->options.Fact = FACTORED; /* The factored form of A is supplied. Local option used by this func. only.*/
168: PStatInit(&stat); /* Initialize the statistics variables. */
169: if (lu->MatInputMode == GLOBAL) {
170: #if defined(PETSC_USE_COMPLEX)
171: pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,(doublecomplex*)bptr, m, nrhs,
172: &lu->grid, &lu->LUstruct, berr, &stat, &info);
173: #else
174: pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct,bptr, m, nrhs,
175: &lu->grid, &lu->LUstruct, berr, &stat, &info);
176: #endif
177: } else { /* distributed mat input */
178: #if defined(PETSC_USE_COMPLEX)
179: pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, (doublecomplex*)bptr, A->rmap.N, nrhs, &lu->grid,
180: &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info);
181: if (info) SETERRQ1(PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",info);
182: #else
183: pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, bptr, A->rmap.N, nrhs, &lu->grid,
184: &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &info);
185: if (info) SETERRQ1(PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",info);
186: #endif
187: }
188: if (lu->options.PrintStat) {
189: PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
190: }
191: PStatFree(&stat);
192:
193: if (size > 1) {
194: if (lu->MatInputMode == GLOBAL){ /* convert seq x to mpi x */
195: VecRestoreArray(x_seq,&bptr);
196: VecScatterBegin(x_seq,x,INSERT_VALUES,SCATTER_REVERSE,scat);
197: VecScatterEnd(x_seq,x,INSERT_VALUES,SCATTER_REVERSE,scat);
198: VecScatterDestroy(scat);
199: VecDestroy(x_seq);
200: } else {
201: VecRestoreArray(x,&bptr);
202: }
203: } else {
204: VecRestoreArray(x,&bptr);
205: }
206: return(0);
207: }
211: PetscErrorCode MatLUFactorNumeric_SuperLU_DIST(Mat A,MatFactorInfo *info,Mat *F)
212: {
213: Mat *tseq,A_seq = PETSC_NULL;
214: Mat_SeqAIJ *aa,*bb;
215: Mat_SuperLU_DIST *lu = (Mat_SuperLU_DIST*)(*F)->spptr;
216: PetscErrorCode ierr;
217: PetscInt M=A->rmap.N,N=A->cmap.N,sinfo,i,*ai,*aj,*bi,*bj,nz,rstart,*garray,
218: m=A->rmap.n, irow,colA_start,j,jcol,jB,countA,countB,*bjj,*ajj;
219: PetscMPIInt size,rank;
220: SuperLUStat_t stat;
221: double *berr=0;
222: IS isrow;
223: PetscLogDouble time0,time,time_min,time_max;
224: Mat F_diag=PETSC_NULL;
225: #if defined(PETSC_USE_COMPLEX)
226: doublecomplex *av, *bv;
227: #else
228: double *av, *bv;
229: #endif
232: MPI_Comm_size(A->comm,&size);
233: MPI_Comm_rank(A->comm,&rank);
234:
235: if (lu->options.PrintStat) { /* collect time for mat conversion */
236: MPI_Barrier(A->comm);
237: PetscGetTime(&time0);
238: }
240: if (lu->MatInputMode == GLOBAL) { /* global mat input */
241: if (size > 1) { /* convert mpi A to seq mat A */
242: ISCreateStride(PETSC_COMM_SELF,M,0,1,&isrow);
243: MatGetSubMatrices(A,1,&isrow,&isrow,MAT_INITIAL_MATRIX,&tseq);
244: ISDestroy(isrow);
245:
246: A_seq = *tseq;
247: PetscFree(tseq);
248: aa = (Mat_SeqAIJ*)A_seq->data;
249: } else {
250: aa = (Mat_SeqAIJ*)A->data;
251: }
253: /* Convert Petsc NR matrix to SuperLU_DIST NC.
254: Note: memories of lu->val, col and row are allocated by CompRow_to_CompCol_dist()! */
255: if (lu->flg == SAME_NONZERO_PATTERN) {/* successive numeric factorization, sparsity pattern is reused. */
256: Destroy_CompCol_Matrix_dist(&lu->A_sup);
257: Destroy_LU(N, &lu->grid, &lu->LUstruct);
258: lu->options.Fact = SamePattern;
259: }
260: #if defined(PETSC_USE_COMPLEX)
261: zCompRow_to_CompCol_dist(M,N,aa->nz,(doublecomplex*)aa->a,aa->j,aa->i,&lu->val,&lu->col, &lu->row);
262: #else
263: dCompRow_to_CompCol_dist(M,N,aa->nz,aa->a,aa->j,aa->i,&lu->val, &lu->col, &lu->row);
264: #endif
266: /* Create compressed column matrix A_sup. */
267: #if defined(PETSC_USE_COMPLEX)
268: zCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_Z, SLU_GE);
269: #else
270: dCreate_CompCol_Matrix_dist(&lu->A_sup, M, N, aa->nz, lu->val, lu->col, lu->row, SLU_NC, SLU_D, SLU_GE);
271: #endif
272: } else { /* distributed mat input */
273: Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data;
274: aa=(Mat_SeqAIJ*)(mat->A)->data;
275: bb=(Mat_SeqAIJ*)(mat->B)->data;
276: ai=aa->i; aj=aa->j;
277: bi=bb->i; bj=bb->j;
278: #if defined(PETSC_USE_COMPLEX)
279: av=(doublecomplex*)aa->a;
280: bv=(doublecomplex*)bb->a;
281: #else
282: av=aa->a;
283: bv=bb->a;
284: #endif
285: rstart = A->rmap.rstart;
286: nz = aa->nz + bb->nz;
287: garray = mat->garray;
289: if (lu->flg == DIFFERENT_NONZERO_PATTERN) {/* first numeric factorization */
290: #if defined(PETSC_USE_COMPLEX)
291: zallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row);
292: #else
293: dallocateA_dist(m, nz, &lu->val, &lu->col, &lu->row);
294: #endif
295: } else { /* successive numeric factorization, sparsity pattern and perm_c are reused. */
296: /* Destroy_CompRowLoc_Matrix_dist(&lu->A_sup); */ /* crash! */
297: Destroy_LU(N, &lu->grid, &lu->LUstruct);
298: lu->options.Fact = SamePattern;
299: }
300: nz = 0; irow = rstart;
301: for ( i=0; i<m; i++ ) {
302: lu->row[i] = nz;
303: countA = ai[i+1] - ai[i];
304: countB = bi[i+1] - bi[i];
305: ajj = aj + ai[i]; /* ptr to the beginning of this row */
306: bjj = bj + bi[i];
308: /* B part, smaller col index */
309: colA_start = rstart + ajj[0]; /* the smallest global col index of A */
310: jB = 0;
311: for (j=0; j<countB; j++){
312: jcol = garray[bjj[j]];
313: if (jcol > colA_start) {
314: jB = j;
315: break;
316: }
317: lu->col[nz] = jcol;
318: lu->val[nz++] = *bv++;
319: if (j==countB-1) jB = countB;
320: }
322: /* A part */
323: for (j=0; j<countA; j++){
324: lu->col[nz] = rstart + ajj[j];
325: lu->val[nz++] = *av++;
326: }
328: /* B part, larger col index */
329: for (j=jB; j<countB; j++){
330: lu->col[nz] = garray[bjj[j]];
331: lu->val[nz++] = *bv++;
332: }
333: }
334: lu->row[m] = nz;
335: #if defined(PETSC_USE_COMPLEX)
336: zCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,
337: lu->val, lu->col, lu->row, SLU_NR_loc, SLU_Z, SLU_GE);
338: #else
339: dCreate_CompRowLoc_Matrix_dist(&lu->A_sup, M, N, nz, m, rstart,
340: lu->val, lu->col, lu->row, SLU_NR_loc, SLU_D, SLU_GE);
341: #endif
342: }
343: if (lu->options.PrintStat) {
344: PetscGetTime(&time);
345: time0 = time - time0;
346: }
348: /* Factor the matrix. */
349: PStatInit(&stat); /* Initialize the statistics variables. */
351: if (lu->MatInputMode == GLOBAL) { /* global mat input */
352: #if defined(PETSC_USE_COMPLEX)
353: pzgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,
354: &lu->grid, &lu->LUstruct, berr, &stat, &sinfo);
355: #else
356: pdgssvx_ABglobal(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0,
357: &lu->grid, &lu->LUstruct, berr, &stat, &sinfo);
358: #endif
359: } else { /* distributed mat input */
360: #if defined(PETSC_USE_COMPLEX)
361: pzgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0, &lu->grid,
362: &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo);
363: if (sinfo) SETERRQ1(PETSC_ERR_LIB,"pzgssvx fails, info: %d\n",sinfo);
364: #else
365: pdgssvx(&lu->options, &lu->A_sup, &lu->ScalePermstruct, 0, M, 0, &lu->grid,
366: &lu->LUstruct, &lu->SOLVEstruct, berr, &stat, &sinfo);
367: if (sinfo) SETERRQ1(PETSC_ERR_LIB,"pdgssvx fails, info: %d\n",sinfo);
368: #endif
369: }
371: if (lu->MatInputMode == GLOBAL && size > 1){
372: MatDestroy(A_seq);
373: }
375: if (lu->options.PrintStat) {
376: if (size > 1){
377: MPI_Reduce(&time0,&time_max,1,MPI_DOUBLE,MPI_MAX,0,A->comm);
378: MPI_Reduce(&time0,&time_min,1,MPI_DOUBLE,MPI_MIN,0,A->comm);
379: MPI_Reduce(&time0,&time,1,MPI_DOUBLE,MPI_SUM,0,A->comm);
380: time = time/size; /* average time */
381: if (!rank)
382: PetscPrintf(PETSC_COMM_SELF, " Mat conversion(PETSc->SuperLU_DIST) time (max/min/avg): \n \
383: %g / %g / %g\n",time_max,time_min,time);
384: } else {
385: PetscPrintf(PETSC_COMM_SELF, " Mat conversion(PETSc->SuperLU_DIST) time: \n \
386: %g\n",time0);
387: }
388:
389: PStatPrint(&lu->options, &stat, &lu->grid); /* Print the statistics. */
390: }
391: PStatFree(&stat);
392: if (size > 1){
393: F_diag = ((Mat_MPIAIJ *)(*F)->data)->A;
394: F_diag->assembled = PETSC_TRUE;
395: }
396: (*F)->assembled = PETSC_TRUE;
397: lu->flg = SAME_NONZERO_PATTERN;
398: return(0);
399: }
401: /* Note the Petsc r and c permutations are ignored */
404: PetscErrorCode MatLUFactorSymbolic_SuperLU_DIST(Mat A,IS r,IS c,MatFactorInfo *info,Mat *F)
405: {
406: Mat B;
407: Mat_SuperLU_DIST *lu;
408: PetscErrorCode ierr;
409: PetscInt M=A->rmap.N,N=A->cmap.N,indx;
410: PetscMPIInt size;
411: superlu_options_t options;
412: PetscTruth flg;
413: const char *ptype[] = {"MMD_AT_PLUS_A","NATURAL","MMD_ATA"};
414: const char *prtype[] = {"LargeDiag","NATURAL"};
417: /* Create the factorization matrix */
418: MatCreate(A->comm,&B);
419: MatSetSizes(B,A->rmap.n,A->cmap.n,M,N);
420: MatSetType(B,A->type_name);
421: MatSeqAIJSetPreallocation(B,0,PETSC_NULL);
422: MatMPIAIJSetPreallocation(B,0,PETSC_NULL,0,PETSC_NULL);
424: B->ops->lufactornumeric = MatLUFactorNumeric_SuperLU_DIST;
425: B->ops->solve = MatSolve_SuperLU_DIST;
426: B->factor = FACTOR_LU;
428: lu = (Mat_SuperLU_DIST*)(B->spptr);
430: /* Set the input options */
431: set_default_options_dist(&options);
433: lu->MatInputMode = GLOBAL;
434: MPI_Comm_dup(A->comm,&(lu->comm_superlu));
436: MPI_Comm_size(A->comm,&size);
437: lu->nprow = size/2; /* Default process rows. */
438: if (!lu->nprow) lu->nprow = 1;
439: lu->npcol = size/lu->nprow; /* Default process columns. */
441: PetscOptionsBegin(A->comm,A->prefix,"SuperLU_Dist Options","Mat");
442:
443: PetscOptionsInt("-mat_superlu_dist_r","Number rows in processor partition","None",lu->nprow,&lu->nprow,PETSC_NULL);
444: PetscOptionsInt("-mat_superlu_dist_c","Number columns in processor partition","None",lu->npcol,&lu->npcol,PETSC_NULL);
445: if (size != lu->nprow * lu->npcol) SETERRQ(PETSC_ERR_ARG_SIZ,"Number of processes should be equal to nprow*npcol");
446:
447: PetscOptionsInt("-mat_superlu_dist_matinput","Matrix input mode (0: GLOBAL; 1: DISTRIBUTED)","None",lu->MatInputMode,&lu->MatInputMode,PETSC_NULL);
448: if(lu->MatInputMode == DISTRIBUTED && size == 1) lu->MatInputMode = GLOBAL;
450: PetscOptionsTruth("-mat_superlu_dist_equil","Equilibrate matrix","None",PETSC_TRUE,&flg,0);
451: if (!flg) {
452: options.Equil = NO;
453: }
455: PetscOptionsEList("-mat_superlu_dist_rowperm","Row permutation","None",prtype,2,prtype[0],&indx,&flg);
456: if (flg) {
457: switch (indx) {
458: case 0:
459: options.RowPerm = LargeDiag;
460: break;
461: case 1:
462: options.RowPerm = NOROWPERM;
463: break;
464: }
465: }
467: PetscOptionsEList("-mat_superlu_dist_colperm","Column permutation","None",ptype,4,ptype[0],&indx,&flg);
468: if (flg) {
469: switch (indx) {
470: case 0:
471: options.ColPerm = MMD_AT_PLUS_A;
472: break;
473: case 1:
474: options.ColPerm = NATURAL;
475: break;
476: case 2:
477: options.ColPerm = MMD_ATA;
478: break;
479: }
480: }
482: PetscOptionsTruth("-mat_superlu_dist_replacetinypivot","Replace tiny pivots","None",PETSC_TRUE,&flg,0);
483: if (!flg) {
484: options.ReplaceTinyPivot = NO;
485: }
487: options.IterRefine = NOREFINE;
488: PetscOptionsTruth("-mat_superlu_dist_iterrefine","Use iterative refinement","None",PETSC_FALSE,&flg,0);
489: if (flg) {
490: options.IterRefine = DOUBLE;
491: }
493: if (PetscLogPrintInfo) {
494: options.PrintStat = YES;
495: } else {
496: options.PrintStat = NO;
497: }
498: PetscOptionsTruth("-mat_superlu_dist_statprint","Print factorization information","None",
499: (PetscTruth)options.PrintStat,(PetscTruth*)&options.PrintStat,0);
500: PetscOptionsEnd();
502: /* Initialize the SuperLU process grid. */
503: superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid);
505: /* Initialize ScalePermstruct and LUstruct. */
506: ScalePermstructInit(M, N, &lu->ScalePermstruct);
507: LUstructInit(M, N, &lu->LUstruct);
509: lu->options = options;
510: lu->flg = DIFFERENT_NONZERO_PATTERN;
511: lu->CleanUpSuperLU_Dist = PETSC_TRUE;
512: *F = B;
514: return(0);
515: }
519: PetscErrorCode MatAssemblyEnd_SuperLU_DIST(Mat A,MatAssemblyType mode) {
520: PetscErrorCode ierr;
521: Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)(A->spptr);
524: (*lu->MatAssemblyEnd)(A,mode);
525: lu->MatLUFactorSymbolic = A->ops->lufactorsymbolic;
526: A->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
527: return(0);
528: }
532: PetscErrorCode MatFactorInfo_SuperLU_DIST(Mat A,PetscViewer viewer)
533: {
534: Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)A->spptr;
535: superlu_options_t options;
536: PetscErrorCode ierr;
539: /* check if matrix is superlu_dist type */
540: if (A->ops->solve != MatSolve_SuperLU_DIST) return(0);
542: options = lu->options;
543: PetscViewerASCIIPrintf(viewer,"SuperLU_DIST run parameters:\n");
544: PetscViewerASCIIPrintf(viewer," Equilibrate matrix %s \n",PetscTruths[options.Equil != NO]);
545: PetscViewerASCIIPrintf(viewer," Matrix input mode %d \n",lu->MatInputMode);
546: PetscViewerASCIIPrintf(viewer," Replace tiny pivots %s \n",PetscTruths[options.ReplaceTinyPivot != NO]);
547: PetscViewerASCIIPrintf(viewer," Use iterative refinement %s \n",PetscTruths[options.IterRefine == DOUBLE]);
548: PetscViewerASCIIPrintf(viewer," Processors in row %d col partition %d \n",lu->nprow,lu->npcol);
549: PetscViewerASCIIPrintf(viewer," Row permutation %s \n",(options.RowPerm == NOROWPERM) ? "NATURAL": "LargeDiag");
550: if (options.ColPerm == NATURAL) {
551: PetscViewerASCIIPrintf(viewer," Column permutation NATURAL\n");
552: } else if (options.ColPerm == MMD_AT_PLUS_A) {
553: PetscViewerASCIIPrintf(viewer," Column permutation MMD_AT_PLUS_A\n");
554: } else if (options.ColPerm == MMD_ATA) {
555: PetscViewerASCIIPrintf(viewer," Column permutation MMD_ATA\n");
556: } else {
557: SETERRQ(PETSC_ERR_ARG_WRONG,"Unknown column permutation");
558: }
559: return(0);
560: }
564: PetscErrorCode MatView_SuperLU_DIST(Mat A,PetscViewer viewer)
565: {
566: PetscErrorCode ierr;
567: PetscTruth iascii;
568: PetscViewerFormat format;
569: Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST*)(A->spptr);
572: (*lu->MatView)(A,viewer);
574: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
575: if (iascii) {
576: PetscViewerGetFormat(viewer,&format);
577: if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
578: MatFactorInfo_SuperLU_DIST(A,viewer);
579: }
580: }
581: return(0);
582: }
588: PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_Base_SuperLU_DIST(Mat A,MatType type,MatReuse reuse,Mat *newmat)
589: {
590: /* This routine is only called to convert to MATSUPERLU_DIST */
591: /* from MATSEQAIJ if A has a single process communicator */
592: /* or MATMPIAIJ otherwise, so we will ignore 'MatType type'. */
593: PetscErrorCode ierr;
594: PetscMPIInt size;
595: MPI_Comm comm;
596: Mat B=*newmat;
597: Mat_SuperLU_DIST *lu;
600: if (reuse == MAT_INITIAL_MATRIX) {
601: MatDuplicate(A,MAT_COPY_VALUES,&B);
602: }
604: PetscObjectGetComm((PetscObject)A,&comm);
605: PetscNew(Mat_SuperLU_DIST,&lu);
607: lu->MatDuplicate = A->ops->duplicate;
608: lu->MatView = A->ops->view;
609: lu->MatAssemblyEnd = A->ops->assemblyend;
610: lu->MatLUFactorSymbolic = A->ops->lufactorsymbolic;
611: lu->MatDestroy = A->ops->destroy;
612: lu->CleanUpSuperLU_Dist = PETSC_FALSE;
614: B->spptr = (void*)lu;
615: B->ops->duplicate = MatDuplicate_SuperLU_DIST;
616: B->ops->view = MatView_SuperLU_DIST;
617: B->ops->assemblyend = MatAssemblyEnd_SuperLU_DIST;
618: B->ops->lufactorsymbolic = MatLUFactorSymbolic_SuperLU_DIST;
619: B->ops->destroy = MatDestroy_SuperLU_DIST;
620: MPI_Comm_size(comm,&size);
621: if (size == 1) {
622: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_seqaij_superlu_dist_C",
623: "MatConvert_Base_SuperLU_DIST",MatConvert_Base_SuperLU_DIST);
624: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_superlu_dist_seqaij_C",
625: "MatConvert_SuperLU_DIST_Base",MatConvert_SuperLU_DIST_Base);
626: } else {
627: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_superlu_dist_C",
628: "MatConvert_Base_SuperLU_DIST",MatConvert_Base_SuperLU_DIST);
629: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_superlu_dist_mpiaij_C",
630: "MatConvert_SuperLU_DIST_Base",MatConvert_SuperLU_DIST_Base);
631: }
632: PetscInfo(0,"Using SuperLU_DIST for SeqAIJ LU factorization and solves.\n");
633: PetscObjectChangeTypeName((PetscObject)B,MATSUPERLU_DIST);
634: *newmat = B;
635: return(0);
636: }
641: PetscErrorCode MatDuplicate_SuperLU_DIST(Mat A, MatDuplicateOption op, Mat *M) {
642: PetscErrorCode ierr;
643: Mat_SuperLU_DIST *lu=(Mat_SuperLU_DIST *)A->spptr;
646: (*lu->MatDuplicate)(A,op,M);
647: PetscMemcpy((*M)->spptr,lu,sizeof(Mat_SuperLU_DIST));
648: return(0);
649: }
651: /*MC
652: MATSUPERLU_DIST - MATSUPERLU_DIST = "superlu_dist" - A matrix type providing direct solvers (LU) for parallel matrices
653: via the external package SuperLU_DIST.
655: If SuperLU_DIST is installed (see the manual for
656: instructions on how to declare the existence of external packages),
657: a matrix type can be constructed which invokes SuperLU_DIST solvers.
658: After calling MatCreate(...,A), simply call MatSetType(A,MATSUPERLU_DIST).
660: This matrix inherits from MATSEQAIJ when constructed with a single process communicator,
661: and from MATMPIAIJ otherwise. As a result, for single process communicators,
662: MatSeqAIJSetPreallocation is supported, and similarly MatMPIAIJSetPreallocation is supported
663: for communicators controlling multiple processes. It is recommended that you call both of
664: the above preallocation routines for simplicity. One can also call MatConvert for an inplace
665: conversion to or from the MATSEQAIJ or MATMPIAIJ type (depending on the communicator size)
666: without data copy.
668: Options Database Keys:
669: + -mat_type superlu_dist - sets the matrix type to "superlu_dist" during a call to MatSetFromOptions()
670: . -mat_superlu_dist_r <n> - number of rows in processor partition
671: . -mat_superlu_dist_c <n> - number of columns in processor partition
672: . -mat_superlu_dist_matinput <0,1> - matrix input mode; 0=global, 1=distributed
673: . -mat_superlu_dist_equil - equilibrate the matrix
674: . -mat_superlu_dist_rowperm <LargeDiag,NATURAL> - row permutation
675: . -mat_superlu_dist_colperm <MMD_AT_PLUS_A,MMD_ATA,NATURAL> - column permutation
676: . -mat_superlu_dist_replacetinypivot - replace tiny pivots
677: . -mat_superlu_dist_iterrefine - use iterative refinement
678: - -mat_superlu_dist_statprint - print factorization information
680: Level: beginner
682: .seealso: PCLU
683: M*/
688: PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_SuperLU_DIST(Mat A)
689: {
691: PetscMPIInt size;
694: /* Change type name before calling MatSetType to force proper construction of SeqAIJ or MPIAIJ */
695: /* and SuperLU_DIST types */
696: PetscObjectChangeTypeName((PetscObject)A,MATSUPERLU_DIST);
697: MPI_Comm_size(A->comm,&size);
698: if (size == 1) {
699: MatSetType(A,MATSEQAIJ);
700: } else {
701: MatSetType(A,MATMPIAIJ);
702: /* A_diag = 0x0 ??? -- do we need it?
703: Mat A_diag = ((Mat_MPIAIJ *)A->data)->A;
704: MatConvert_Base_SuperLU_DIST(A_diag,MATSUPERLU_DIST,MAT_REUSE_MATRIX,&A_diag);
705: */
706: }
707: MatConvert_Base_SuperLU_DIST(A,MATSUPERLU_DIST,MAT_REUSE_MATRIX,&A);
708: return(0);
709: }