Actual source code: dscpack.c
1: #define PETSCMAT_DLL
3: /*
4: Provides an interface to the DSCPACK (Domain-Separator Codes) sparse direct solver
5: */
7: #include src/mat/impls/baij/seq/baij.h
8: #include src/mat/impls/baij/mpi/mpibaij.h
11: #include "dscmain.h"
14: typedef struct {
15: DSC_Solver My_DSC_Solver;
16: PetscInt num_local_strucs, *local_struc_old_num,
17: num_local_cols, num_local_nonz,
18: *global_struc_new_col_num,
19: *global_struc_new_num, *global_struc_owner,
20: dsc_id,bs,*local_cols_old_num,*replication;
21: PetscInt order_code,scheme_code,factor_type, stat,
22: LBLASLevel,DBLASLevel,max_mem_allowed;
23: MatStructure flg;
24: IS my_cols,iden,iden_dsc;
25: Vec vec_dsc;
26: VecScatter scat;
27: MPI_Comm comm_dsc;
29: /* A few inheritance details */
30: PetscMPIInt size;
31: PetscErrorCode (*MatDuplicate)(Mat,MatDuplicateOption,Mat*);
32: PetscErrorCode (*MatView)(Mat,PetscViewer);
33: PetscErrorCode (*MatAssemblyEnd)(Mat,MatAssemblyType);
34: PetscErrorCode (*MatCholeskyFactorSymbolic)(Mat,IS,MatFactorInfo*,Mat*);
35: PetscErrorCode (*MatDestroy)(Mat);
36: PetscErrorCode (*MatPreallocate)(Mat,PetscInt,PetscInt,PetscInt*,PetscInt,PetscInt*);
38: /* Clean up flag for destructor */
39: PetscTruth CleanUpDSCPACK;
40: } Mat_DSC;
42: EXTERN PetscErrorCode MatDuplicate_DSCPACK(Mat,MatDuplicateOption,Mat*);
44: EXTERN PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_Base_DSCPACK(Mat,const MatType,MatReuse,Mat*);
47: /* DSC function */
50: void isort2(PetscInt size, PetscInt *list, PetscInt *idx_dsc) {
51: /* in increasing order */
52: /* idx_dsc will contain indices such that */
53: /* list can be accessed in sorted order */
54: PetscInt i, j, x, y;
55:
56: for (i=0; i<size; i++) idx_dsc[i] =i;
58: for (i=1; i<size; i++){
59: y= idx_dsc[i];
60: x=list[idx_dsc[i]];
61: for (j=i-1; ((j>=0) && (x<list[idx_dsc[j]])); j--)
62: idx_dsc[j+1]=idx_dsc[j];
63: idx_dsc[j+1]=y;
64: }
65: }/*end isort2*/
69: PetscErrorCode BAIJtoMyANonz( PetscInt *AIndex, PetscInt *AStruct, PetscInt bs,
70: RealNumberType *ANonz, PetscInt NumLocalStructs,
71: PetscInt NumLocalNonz, PetscInt *GlobalStructNewColNum,
72: PetscInt *LocalStructOldNum,
73: PetscInt *LocalStructLocalNum,
74: RealNumberType **adr_MyANonz)
75: /*
76: Extract non-zero values of lower triangular part
77: of the permuted matrix that belong to this processor.
79: Only output parameter is adr_MyANonz -- is malloced and changed.
80: Rest are input parameters left unchanged.
82: When LocalStructLocalNum == PETSC_NULL,
83: AIndex, AStruct, and ANonz contain entire original matrix A
84: in PETSc SeqBAIJ format,
85: otherwise,
86: AIndex, AStruct, and ANonz are indeces for the submatrix
87: of A whose colomns (in increasing order) belong to this processor.
89: Other variables supply information on ownership of columns
90: and the new numbering in a fill-reducing permutation
92: This information is used to setup lower half of A nonzeroes
93: for columns owned by this processor
94: */
95: {
97: PetscInt i, j, k, iold,inew, jj, kk, bs2=bs*bs,
98: *idx, *NewColNum,
99: MyANonz_last, max_struct=0, struct_size;
100: RealNumberType *MyANonz;
104: /* loop: to find maximum number of subscripts over columns
105: assigned to this processor */
106: for (i=0; i <NumLocalStructs; i++) {
107: /* for each struct i (local) assigned to this processor */
108: if (LocalStructLocalNum){
109: iold = LocalStructLocalNum[i];
110: } else {
111: iold = LocalStructOldNum[i];
112: }
113:
114: struct_size = AIndex[iold+1] - AIndex[iold];
115: if ( max_struct <= struct_size) max_struct = struct_size;
116: }
118: /* allocate tmp arrays large enough to hold densest struct */
119: PetscMalloc((2*max_struct+1)*sizeof(PetscInt),&NewColNum);
120: idx = NewColNum + max_struct;
121:
122: PetscMalloc(NumLocalNonz*sizeof(RealNumberType),&MyANonz);
123: *adr_MyANonz = MyANonz;
125: /* loop to set up nonzeroes in MyANonz */
126: MyANonz_last = 0 ; /* points to first empty space in MyANonz */
127: for (i=0; i <NumLocalStructs; i++) {
129: /* for each struct i (local) assigned to this processor */
130: if (LocalStructLocalNum){
131: iold = LocalStructLocalNum[i];
132: } else {
133: iold = LocalStructOldNum[i];
134: }
136: struct_size = AIndex[iold+1] - AIndex[iold];
137: for (k=0, j=AIndex[iold]; j<AIndex[iold+1]; j++){
138: NewColNum[k] = GlobalStructNewColNum[AStruct[j]];
139: k++;
140: }
141: isort2(struct_size, NewColNum, idx);
142:
143: kk = AIndex[iold]*bs2; /* points to 1st element of iold block col in ANonz */
144: inew = GlobalStructNewColNum[LocalStructOldNum[i]];
146: for (jj = 0; jj < bs; jj++) {
147: for (j=0; j<struct_size; j++){
148: for ( k = 0; k<bs; k++){
149: if (NewColNum[idx[j]] + k >= inew)
150: MyANonz[MyANonz_last++] = ANonz[kk + idx[j]*bs2 + k*bs + jj];
151: }
152: }
153: inew++;
154: }
155: } /* end outer loop for i */
157: PetscFree(NewColNum);
158: if (MyANonz_last != NumLocalNonz) SETERRQ2(PETSC_ERR_PLIB,"MyANonz_last %d != NumLocalNonz %d\n",MyANonz_last, NumLocalNonz);
159: return(0);
160: }
165: PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_DSCPACK_Base(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
166: {
168: Mat B=*newmat;
169: Mat_DSC *lu=(Mat_DSC*)A->spptr;
170: void (*f)(void);
173: if (reuse == MAT_INITIAL_MATRIX) {
174: MatDuplicate(A,MAT_COPY_VALUES,&B);
175: }
176: /* Reset the original function pointers */
177: B->ops->duplicate = lu->MatDuplicate;
178: B->ops->view = lu->MatView;
179: B->ops->assemblyend = lu->MatAssemblyEnd;
180: B->ops->choleskyfactorsymbolic = lu->MatCholeskyFactorSymbolic;
181: B->ops->destroy = lu->MatDestroy;
182: PetscObjectQueryFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",&f);
183: if (f) {
184: PetscObjectComposeFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C","",(PetscVoidFunction)lu->MatPreallocate);
185: }
187: PetscObjectComposeFunction((PetscObject)B,"MatConvert_seqbaij_dscpack_C","",PETSC_NULL);
188: PetscObjectComposeFunction((PetscObject)B,"MatConvert_dscpack_seqbaij_C","",PETSC_NULL);
189: PetscObjectComposeFunction((PetscObject)B,"MatConvert_mpibaij_dscpack_C","",PETSC_NULL);
190: PetscObjectComposeFunction((PetscObject)B,"MatConvert_dscpack_mpibaij_C","",PETSC_NULL);
192: PetscObjectChangeTypeName((PetscObject)B,type);
193: *newmat = B;
195: return(0);
196: }
201: PetscErrorCode MatDestroy_DSCPACK(Mat A)
202: {
203: Mat_DSC *lu=(Mat_DSC*)A->spptr;
205:
207: if (lu->CleanUpDSCPACK) {
208: if (lu->dsc_id != -1) {
209: if(lu->stat) DSC_DoStats(lu->My_DSC_Solver);
210: DSC_FreeAll(lu->My_DSC_Solver);
211: DSC_Close0(lu->My_DSC_Solver);
212:
213: PetscFree(lu->local_cols_old_num);
214: }
215: DSC_End(lu->My_DSC_Solver);
216:
217: MPI_Comm_free(&(lu->comm_dsc));
218: ISDestroy(lu->my_cols);
219: PetscFree(lu->replication);
220: VecDestroy(lu->vec_dsc);
221: ISDestroy(lu->iden_dsc);
222: VecScatterDestroy(lu->scat);
223: if (lu->size >1 && lu->iden) {ISDestroy(lu->iden);}
224: }
225: if (lu->size == 1) {
226: MatConvert_DSCPACK_Base(A,MATSEQBAIJ,MAT_REUSE_MATRIX,&A);
227: } else {
228: MatConvert_DSCPACK_Base(A,MATMPIBAIJ,MAT_REUSE_MATRIX,&A);
229: }
230: (*A->ops->destroy)(A);
231: return(0);
232: }
236: PetscErrorCode MatSolve_DSCPACK(Mat A,Vec b,Vec x) {
237: Mat_DSC *lu= (Mat_DSC*)A->spptr;
239: RealNumberType *solution_vec,*rhs_vec;
242: /* scatter b into seq vec_dsc */
243: if ( !lu->scat ) {
244: VecScatterCreate(b,lu->my_cols,lu->vec_dsc,lu->iden_dsc,&lu->scat);
245: }
246: VecScatterBegin(b,lu->vec_dsc,INSERT_VALUES,SCATTER_FORWARD,lu->scat);
247: VecScatterEnd(b,lu->vec_dsc,INSERT_VALUES,SCATTER_FORWARD,lu->scat);
249: if (lu->dsc_id != -1){
250: VecGetArray(lu->vec_dsc,&rhs_vec);
251: DSC_InputRhsLocalVec(lu->My_DSC_Solver, rhs_vec, lu->num_local_cols);
252: VecRestoreArray(lu->vec_dsc,&rhs_vec);
253:
254: DSC_Solve(lu->My_DSC_Solver);
255: if (ierr != DSC_NO_ERROR) {
256: DSC_ErrorDisplay(lu->My_DSC_Solver);
257: SETERRQ(PETSC_ERR_LIB,"Error in calling DSC_Solve");
258: }
260: /* get the permuted local solution */
261: VecGetArray(lu->vec_dsc,&solution_vec);
262: DSC_GetLocalSolution(lu->My_DSC_Solver,solution_vec, lu->num_local_cols);
263: VecRestoreArray(lu->vec_dsc,&solution_vec);
265: } /* end of if (lu->dsc_id != -1) */
267: /* put permuted local solution solution_vec into x in the original order */
268: VecScatterBegin(lu->vec_dsc,x,INSERT_VALUES,SCATTER_REVERSE,lu->scat);
269: VecScatterEnd(lu->vec_dsc,x,INSERT_VALUES,SCATTER_REVERSE,lu->scat);
271: return(0);
272: }
276: PetscErrorCode MatCholeskyFactorNumeric_DSCPACK(Mat A,MatFactorInfo *info,Mat *F) {
277: Mat_SeqBAIJ *a_seq;
278: Mat_DSC *lu=(Mat_DSC*)(*F)->spptr;
279: Mat *tseq,A_seq=PETSC_NULL;
280: RealNumberType *my_a_nonz;
282: PetscMPIInt size;
283: PetscInt M=A->rmap.N,Mbs=M/lu->bs,max_mem_estimate,max_single_malloc_blk,
284: number_of_procs,i,j,next,iold,*idx,*iidx=0,*itmp;
285: IS my_cols_sorted;
286: Mat F_diag;
287:
289: MPI_Comm_size(A->comm,&size);
290: if ( lu->flg == DIFFERENT_NONZERO_PATTERN){ /* first numeric factorization */
291: /* convert A to A_seq */
292: if (size > 1) {
293: if (!lu->iden){
294: ISCreateStride(PETSC_COMM_SELF,M,0,1,&lu->iden);
295: }
296: MatGetSubMatrices(A,1,&lu->iden,&lu->iden,MAT_INITIAL_MATRIX,&tseq);
297: A_seq = tseq[0];
298: a_seq = (Mat_SeqBAIJ*)A_seq->data;
299: } else {
300: a_seq = (Mat_SeqBAIJ*)A->data;
301: }
302:
303: PetscMalloc(Mbs*sizeof(PetscInt),&lu->replication);
304: for (i=0; i<Mbs; i++) lu->replication[i] = lu->bs;
306: number_of_procs = DSC_Analyze(Mbs, a_seq->i, a_seq->j, lu->replication);
307:
308: i = size;
309: if ( number_of_procs < i ) i = number_of_procs;
310: number_of_procs = 1;
311: while ( i > 1 ){
312: number_of_procs *= 2; i /= 2;
313: }
315: /* DSC_Solver starts */
316: DSC_Open0( lu->My_DSC_Solver, number_of_procs, &lu->dsc_id, lu->comm_dsc );
318: if (lu->dsc_id != -1) {
319: DSC_Order(lu->My_DSC_Solver,lu->order_code,Mbs,a_seq->i,a_seq->j,lu->replication,
320: &M,&lu->num_local_strucs,
321: &lu->num_local_cols, &lu->num_local_nonz, &lu->global_struc_new_col_num,
322: &lu->global_struc_new_num, &lu->global_struc_owner,
323: &lu->local_struc_old_num);
324: if (ierr != DSC_NO_ERROR) {
325: DSC_ErrorDisplay(lu->My_DSC_Solver);
326: SETERRQ(PETSC_ERR_LIB,"Error when use DSC_Order()");
327: }
329: DSC_SFactor(lu->My_DSC_Solver,&max_mem_estimate,&max_single_malloc_blk,
330: lu->max_mem_allowed, lu->LBLASLevel, lu->DBLASLevel);
331: if (ierr != DSC_NO_ERROR) {
332: DSC_ErrorDisplay(lu->My_DSC_Solver);
333: SETERRQ(PETSC_ERR_LIB,"Error when use DSC_Order");
334: }
336: BAIJtoMyANonz(a_seq->i, a_seq->j, lu->bs, a_seq->a,
337: lu->num_local_strucs, lu->num_local_nonz,
338: lu->global_struc_new_col_num,
339: lu->local_struc_old_num,
340: PETSC_NULL,
341: &my_a_nonz);
342: if (ierr <0) {
343: DSC_ErrorDisplay(lu->My_DSC_Solver);
344: SETERRQ1(PETSC_ERR_LIB,"Error setting local nonzeroes at processor %d \n", lu->dsc_id);
345: }
347: /* get local_cols_old_num and IS my_cols to be used later */
348: PetscMalloc(lu->num_local_cols*sizeof(PetscInt),&lu->local_cols_old_num);
349: for (next = 0, i=0; i<lu->num_local_strucs; i++){
350: iold = lu->bs*lu->local_struc_old_num[i];
351: for (j=0; j<lu->bs; j++)
352: lu->local_cols_old_num[next++] = iold++;
353: }
354: ISCreateGeneral(PETSC_COMM_SELF,lu->num_local_cols,lu->local_cols_old_num,&lu->my_cols);
355:
356: } else { /* lu->dsc_id == -1 */
357: lu->num_local_cols = 0;
358: lu->local_cols_old_num = 0;
359: ISCreateGeneral(PETSC_COMM_SELF,lu->num_local_cols,lu->local_cols_old_num,&lu->my_cols);
360: }
361: /* generate vec_dsc and iden_dsc to be used later */
362: VecCreateSeq(PETSC_COMM_SELF,lu->num_local_cols,&lu->vec_dsc);
363: ISCreateStride(PETSC_COMM_SELF,lu->num_local_cols,0,1,&lu->iden_dsc);
364: lu->scat = PETSC_NULL;
366: if ( size>1 ) {
367: MatDestroyMatrices(1,&tseq);
368: }
369: } else { /* use previously computed symbolic factor */
370: /* convert A to my A_seq */
371: if (size > 1) {
372: if (lu->dsc_id == -1) {
373: itmp = 0;
374: } else {
375: PetscMalloc(2*lu->num_local_strucs*sizeof(PetscInt),&idx);
376: iidx = idx + lu->num_local_strucs;
377: PetscMalloc(lu->num_local_cols*sizeof(PetscInt),&itmp);
378:
379: isort2(lu->num_local_strucs, lu->local_struc_old_num, idx);
380: for (next=0, i=0; i< lu->num_local_strucs; i++) {
381: iold = lu->bs*lu->local_struc_old_num[idx[i]];
382: for (j=0; j<lu->bs; j++){
383: itmp[next++] = iold++; /* sorted local_cols_old_num */
384: }
385: }
386: for (i=0; i< lu->num_local_strucs; i++) {
387: iidx[idx[i]] = i; /* inverse of idx */
388: }
389: } /* end of (lu->dsc_id == -1) */
390: ISCreateGeneral(PETSC_COMM_SELF,lu->num_local_cols,itmp,&my_cols_sorted);
391: MatGetSubMatrices(A,1,&my_cols_sorted,&lu->iden,MAT_INITIAL_MATRIX,&tseq);
392: ISDestroy(my_cols_sorted);
393: A_seq = tseq[0];
394:
395: if (lu->dsc_id != -1) {
396: DSC_ReFactorInitialize(lu->My_DSC_Solver);
398: a_seq = (Mat_SeqBAIJ*)A_seq->data;
399: BAIJtoMyANonz(a_seq->i, a_seq->j, lu->bs, a_seq->a,
400: lu->num_local_strucs, lu->num_local_nonz,
401: lu->global_struc_new_col_num,
402: lu->local_struc_old_num,
403: iidx,
404: &my_a_nonz);
405: if (ierr <0) {
406: DSC_ErrorDisplay(lu->My_DSC_Solver);
407: SETERRQ1(PETSC_ERR_LIB,"Error setting local nonzeroes at processor %d \n", lu->dsc_id);
408: }
409: PetscFree(idx);
410: PetscFree(itmp);
411: } /* end of if(lu->dsc_id != -1) */
412: } else { /* size == 1 */
413: a_seq = (Mat_SeqBAIJ*)A->data;
414:
415: BAIJtoMyANonz(a_seq->i, a_seq->j, lu->bs, a_seq->a,
416: lu->num_local_strucs, lu->num_local_nonz,
417: lu->global_struc_new_col_num,
418: lu->local_struc_old_num,
419: PETSC_NULL,
420: &my_a_nonz);
421: if (ierr <0) {
422: DSC_ErrorDisplay(lu->My_DSC_Solver);
423: SETERRQ1(PETSC_ERR_LIB,"Error setting local nonzeroes at processor %d \n", lu->dsc_id);
424: }
425: }
426: if ( size>1 ) {MatDestroyMatrices(1,&tseq); }
427: }
428:
429: if (lu->dsc_id != -1) {
430: DSC_NFactor(lu->My_DSC_Solver, lu->scheme_code, my_a_nonz, lu->factor_type, lu->LBLASLevel, lu->DBLASLevel);
431: PetscFree(my_a_nonz);
432: }
433:
434: F_diag = ((Mat_MPIBAIJ *)(*F)->data)->A;
435: F_diag->assembled = PETSC_TRUE;
436: (*F)->assembled = PETSC_TRUE;
437: lu->flg = SAME_NONZERO_PATTERN;
439: return(0);
440: }
442: /* Note the Petsc permutation r is ignored */
445: PetscErrorCode MatCholeskyFactorSymbolic_DSCPACK(Mat A,IS r,MatFactorInfo *info,Mat *F) {
446: Mat B;
447: Mat_DSC *lu;
449: PetscInt bs,indx;
450: PetscTruth flg;
451: const char *ftype[]={"LDLT","LLT"},*ltype[]={"LBLAS1","LBLAS2","LBLAS3"},*dtype[]={"DBLAS1","DBLAS2"};
455: /* Create the factorization matrix F */
456: MatGetBlockSize(A,&bs);
457: MatCreate(A->comm,&B);
458: MatSetSizes(B,A->rmap.n,A->cmap.n,A->rmap.N,A->cmap.N);
459: MatSetType(B,A->type_name);
460: MatSeqBAIJSetPreallocation(B,bs,0,PETSC_NULL);
461: MatMPIBAIJSetPreallocation(B,bs,0,PETSC_NULL,0,PETSC_NULL);
462:
463: lu = (Mat_DSC*)B->spptr;
464: B->bs = bs;
466: B->ops->choleskyfactornumeric = MatCholeskyFactorNumeric_DSCPACK;
467: B->ops->solve = MatSolve_DSCPACK;
468: B->factor = FACTOR_CHOLESKY;
470: /* Set the default input options */
471: lu->order_code = 2;
472: lu->scheme_code = 1;
473: lu->factor_type = 2;
474: lu->stat = 0; /* do not display stats */
475: lu->LBLASLevel = DSC_LBLAS3;
476: lu->DBLASLevel = DSC_DBLAS2;
477: lu->max_mem_allowed = 256;
478: MPI_Comm_dup(A->comm,&(lu->comm_dsc));
479: /* Get the runtime input options */
480: PetscOptionsBegin(A->comm,A->prefix,"DSCPACK Options","Mat");
482: PetscOptionsInt("-mat_dscpack_order","order_code: \n\
483: 1 = ND, 2 = Hybrid with Minimum Degree, 3 = Hybrid with Minimum Deficiency", \
484: "None",
485: lu->order_code,&lu->order_code,PETSC_NULL);
487: PetscOptionsInt("-mat_dscpack_scheme","scheme_code: \n\
488: 1 = standard factorization, 2 = factorization + selective inversion", \
489: "None",
490: lu->scheme_code,&lu->scheme_code,PETSC_NULL);
491:
492: PetscOptionsEList("-mat_dscpack_factor","factor_type","None",ftype,2,ftype[0],&indx,&flg);
493: if (flg) {
494: switch (indx) {
495: case 0:
496: lu->factor_type = DSC_LDLT;
497: break;
498: case 1:
499: lu->factor_type = DSC_LLT;
500: break;
501: }
502: }
503: PetscOptionsInt("-mat_dscpack_MaxMemAllowed","in Mbytes","None",
504: lu->max_mem_allowed,&lu->max_mem_allowed,PETSC_NULL);
506: PetscOptionsInt("-mat_dscpack_stats","display stats: 0 = no display, 1 = display",
507: "None", lu->stat,&lu->stat,PETSC_NULL);
508:
509: PetscOptionsEList("-mat_dscpack_LBLAS","BLAS level used in the local phase","None",ltype,3,ltype[2],&indx,&flg);
510: if (flg) {
511: switch (indx) {
512: case 0:
513: lu->LBLASLevel = DSC_LBLAS1;
514: break;
515: case 1:
516: lu->LBLASLevel = DSC_LBLAS2;
517: break;
518: case 2:
519: lu->LBLASLevel = DSC_LBLAS3;
520: break;
521: }
522: }
524: PetscOptionsEList("-mat_dscpack_DBLAS","BLAS level used in the distributed phase","None",dtype,2,dtype[1],&indx,&flg);
525: if (flg) {
526: switch (indx) {
527: case 0:
528: lu->DBLASLevel = DSC_DBLAS1;
529: break;
530: case 1:
531: lu->DBLASLevel = DSC_DBLAS2;
532: break;
533: }
534: }
536: PetscOptionsEnd();
537:
538: lu->flg = DIFFERENT_NONZERO_PATTERN;
540: lu->My_DSC_Solver = DSC_Begin();
541: lu->CleanUpDSCPACK = PETSC_TRUE;
542: *F = B;
543: return(0);
544: }
548: PetscErrorCode MatAssemblyEnd_DSCPACK(Mat A,MatAssemblyType mode) {
550: Mat_DSC *lu=(Mat_DSC*)A->spptr;
553: (*lu->MatAssemblyEnd)(A,mode);
554: lu->MatCholeskyFactorSymbolic = A->ops->choleskyfactorsymbolic;
555: A->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_DSCPACK;
556: return(0);
557: }
561: PetscErrorCode MatFactorInfo_DSCPACK(Mat A,PetscViewer viewer)
562: {
563: Mat_DSC *lu=(Mat_DSC*)A->spptr;
565: char *s=0;
566:
568: PetscViewerASCIIPrintf(viewer,"DSCPACK run parameters:\n");
570: switch (lu->order_code) {
571: case 1: s = "ND"; break;
572: case 2: s = "Hybrid with Minimum Degree"; break;
573: case 3: s = "Hybrid with Minimum Deficiency"; break;
574: }
575: PetscViewerASCIIPrintf(viewer," order_code: %s \n",s);
577: switch (lu->scheme_code) {
578: case 1: s = "standard factorization"; break;
579: case 2: s = "factorization + selective inversion"; break;
580: }
581: PetscViewerASCIIPrintf(viewer," scheme_code: %s \n",s);
583: switch (lu->stat) {
584: case 0: s = "NO"; break;
585: case 1: s = "YES"; break;
586: }
587: PetscViewerASCIIPrintf(viewer," display stats: %s \n",s);
588:
589: if ( lu->factor_type == DSC_LLT) {
590: s = "LLT";
591: } else if ( lu->factor_type == DSC_LDLT){
592: s = "LDLT";
593: } else {
594: SETERRQ(PETSC_ERR_PLIB,"Unknown factor type");
595: }
596: PetscViewerASCIIPrintf(viewer," factor type: %s \n",s);
598: if ( lu->LBLASLevel == DSC_LBLAS1) {
599: s = "BLAS1";
600: } else if ( lu->LBLASLevel == DSC_LBLAS2){
601: s = "BLAS2";
602: } else if ( lu->LBLASLevel == DSC_LBLAS3){
603: s = "BLAS3";
604: } else {
605: SETERRQ(PETSC_ERR_PLIB,"Unknown local phase BLAS level");
606: }
607: PetscViewerASCIIPrintf(viewer," local phase BLAS level: %s \n",s);
608:
609: if ( lu->DBLASLevel == DSC_DBLAS1) {
610: s = "BLAS1";
611: } else if ( lu->DBLASLevel == DSC_DBLAS2){
612: s = "BLAS2";
613: } else {
614: SETERRQ(PETSC_ERR_PLIB,"Unknown distributed phase BLAS level");
615: }
616: PetscViewerASCIIPrintf(viewer," distributed phase BLAS level: %s \n",s);
617: return(0);
618: }
622: PetscErrorCode MatView_DSCPACK(Mat A,PetscViewer viewer) {
623: PetscErrorCode ierr;
624: PetscMPIInt size;
625: PetscTruth iascii;
626: PetscViewerFormat format;
627: Mat_DSC *lu=(Mat_DSC*)A->spptr;
630: /* This convertion ugliness is because MatView for BAIJ types calls MatConvert to AIJ */
631: size = lu->size;
632: if (size==1) {
633: MatConvert(A,MATSEQBAIJ,MAT_REUSE_MATRIX,&A);
634: } else {
635: MatConvert(A,MATMPIBAIJ,MAT_REUSE_MATRIX,&A);
636: }
638: MatView(A,viewer);
640: MatConvert(A,MATDSCPACK,MAT_REUSE_MATRIX,&A);
642: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
643: if (iascii) {
644: PetscViewerGetFormat(viewer,&format);
645: if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) {
646: MatFactorInfo_DSCPACK(A,viewer);
647: }
648: }
649: return(0);
650: }
655: PetscErrorCode PETSCMAT_DLLEXPORT MatMPIBAIJSetPreallocation_MPIDSCPACK(Mat B,PetscInt bs,PetscInt d_nz,PetscInt *d_nnz,PetscInt o_nz,PetscInt *o_nnz)
656: {
657: Mat A;
658: Mat_DSC *lu = (Mat_DSC*)B->spptr;
662: /*
663: After performing the MPIBAIJ Preallocation, we need to convert the local diagonal block matrix
664: into DSCPACK type so that the block jacobi preconditioner (for example) can use DSCPACK. I would
665: like this to be done in the MatCreate routine, but the creation of this inner matrix requires
666: block size info so that PETSc can determine the local size properly. The block size info is set
667: in the preallocation routine.
668: */
669: (*lu->MatPreallocate)(B,bs,d_nz,d_nnz,o_nz,o_nnz);
670: A = ((Mat_MPIBAIJ *)B->data)->A;
671: MatConvert_Base_DSCPACK(A,MATDSCPACK,MAT_REUSE_MATRIX,&A);
672: return(0);
673: }
679: PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_Base_DSCPACK(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
680: {
681: /* This routine is only called to convert to MATDSCPACK */
682: /* from MATSEQBAIJ if A has a single process communicator */
683: /* or MATMPIBAIJ otherwise, so we will ignore 'MatType type'. */
685: MPI_Comm comm;
686: Mat B=*newmat;
687: Mat_DSC *lu;
688: void (*f)(void);
691: if (reuse == MAT_INITIAL_MATRIX) {
692: MatDuplicate(A,MAT_COPY_VALUES,&B);
693: }
695: PetscObjectGetComm((PetscObject)A,&comm);
696: PetscNew(Mat_DSC,&lu);
698: lu->MatDuplicate = A->ops->duplicate;
699: lu->MatView = A->ops->view;
700: lu->MatAssemblyEnd = A->ops->assemblyend;
701: lu->MatCholeskyFactorSymbolic = A->ops->choleskyfactorsymbolic;
702: lu->MatDestroy = A->ops->destroy;
703: lu->CleanUpDSCPACK = PETSC_FALSE;
704: lu->bs = A->bs;
706: B->spptr = (void*)lu;
707: B->ops->duplicate = MatDuplicate_DSCPACK;
708: B->ops->view = MatView_DSCPACK;
709: B->ops->assemblyend = MatAssemblyEnd_DSCPACK;
710: B->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_DSCPACK;
711: B->ops->destroy = MatDestroy_DSCPACK;
713: MPI_Comm_size(comm,&(lu->size));
714: if (lu->size == 1) {
715: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_seqbaij_dscpack_C",
716: "MatConvert_Base_DSCPACK",MatConvert_Base_DSCPACK);
717: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_dscpack_seqbaij_C",
718: "MatConvert_DSCPACK_Base",MatConvert_DSCPACK_Base);
719: } else {
720: /* I really don't like needing to know the tag: MatMPIBAIJSetPreallocation_C */
721: PetscObjectQueryFunction((PetscObject)B,"MatMPIBAIJSetPreallocation_C",&f);
722: if (f) {
723: lu->MatPreallocate = (PetscErrorCode (*)(Mat,PetscInt,PetscInt,PetscInt*,PetscInt,PetscInt*))f;
724: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIBAIJSetPreallocation_C",
725: "MatMPIBAIJSetPreallocation_MPIDSCPACK",
726: MatMPIBAIJSetPreallocation_MPIDSCPACK);
727: }
728: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpibaij_dscpack_C",
729: "MatConvert_Base_DSCPACK",MatConvert_Base_DSCPACK);
730: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_dscpack_mpibaij_C",
731: "MatConvert_DSCPACK_Base",MatConvert_DSCPACK_Base);
732: }
733: PetscObjectChangeTypeName((PetscObject)B,MATDSCPACK);
734: *newmat = B;
735: return(0);
736: }
741: PetscErrorCode MatDuplicate_DSCPACK(Mat A, MatDuplicateOption op, Mat *M) {
743: Mat_DSC *lu=(Mat_DSC *)A->spptr;
746: (*lu->MatDuplicate)(A,op,M);
747: PetscMemcpy((*M)->spptr,lu,sizeof(Mat_DSC));
748: return(0);
749: }
751: /*MC
752: MATDSCPACK - MATDSCPACK = "dscpack" - A matrix type providing direct solvers (Cholesky) for sequential
753: or distributed matrices via the external package DSCPACK.
755: If DSCPACK is installed (see the manual for
756: instructions on how to declare the existence of external packages),
757: a matrix type can be constructed which invokes DSCPACK solvers.
758: After calling MatCreate(...,A), simply call MatSetType(A,MATDSCPACK).
759: This matrix type is only supported for double precision real.
761: This matrix inherits from MATSEQBAIJ if constructed with a single process communicator,
762: and from MATMPIBAIJ otherwise. As a result, for sequential matrices, MatSeqBAIJSetPreallocation is
763: supported, and similarly MatMPIBAIJSetPreallocation is supported for distributed matrices. It is
764: recommended that you call both of the above preallocation routines for simplicity. Also,
765: MatConvert can be called to perform inplace conversion to and from MATSEQBAIJ or MATMPIBAIJ
766: for sequential or distributed matrices respectively.
768: Options Database Keys:
769: + -mat_type dscpack - sets the matrix type to dscpack during a call to MatSetFromOptions()
770: . -mat_dscpack_order <1,2,3> - DSCPACK ordering, 1:ND, 2:Hybrid with Minimum Degree, 3:Hybrid with Minimum Deficiency
771: . -mat_dscpack_scheme <1,2> - factorization scheme, 1:standard factorization, 2: factorization with selective inversion
772: . -mat_dscpack_factor <LLT,LDLT> - the type of factorization to be performed.
773: . -mat_dscpack_MaxMemAllowed <n> - the maximum memory to be used during factorization
774: . -mat_dscpack_stats <0,1> - display stats of the factorization and solves during MatDestroy(), 0: no display, 1: display
775: . -mat_dscpack_LBLAS <LBLAS1,LBLAS2,LBLAS3> - BLAS level used in the local phase
776: - -mat_dscpack_DBLAS <DBLAS1,DBLAS2> - BLAS level used in the distributed phase
778: Level: beginner
780: .seealso: PCCHOLESKY
781: M*/
786: PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_DSCPACK(Mat A)
787: {
789: PetscMPIInt size;
792: /* Change type name before calling MatSetType to force proper construction of SeqBAIJ or MPIBAIJ */
793: /* and DSCPACK types */
794: PetscObjectChangeTypeName((PetscObject)A,MATDSCPACK);
795: MPI_Comm_size(A->comm,&size);
796: if (size == 1) {
797: MatSetType(A,MATSEQBAIJ);
798: } else {
799: MatSetType(A,MATMPIBAIJ);
800: }
801: MatConvert_Base_DSCPACK(A,MATDSCPACK,MAT_REUSE_MATRIX,&A);
802: return(0);
803: }