Actual source code: mpiadj.c
1: #define PETSCMAT_DLL
3: /*
4: Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5: */
6: #include src/mat/impls/adj/mpi/mpiadj.h
7: #include petscsys.h
11: PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12: {
13: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
14: PetscErrorCode ierr;
15: PetscInt i,j,m = A->rmap.n;
16: const char *name;
17: PetscViewerFormat format;
20: PetscObjectGetName((PetscObject)A,&name);
21: PetscViewerGetFormat(viewer,&format);
22: if (format == PETSC_VIEWER_ASCII_INFO) {
23: return(0);
24: } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
25: SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
26: } else {
27: PetscViewerASCIIUseTabs(viewer,PETSC_NO);
28: for (i=0; i<m; i++) {
29: PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap.rstart);
30: for (j=a->i[i]; j<a->i[i+1]; j++) {
31: PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
32: }
33: PetscViewerASCIISynchronizedPrintf(viewer,"\n");
34: }
35: PetscViewerASCIIUseTabs(viewer,PETSC_YES);
36: }
37: PetscViewerFlush(viewer);
38: return(0);
39: }
43: PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44: {
46: PetscTruth iascii;
49: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
50: if (iascii) {
51: MatView_MPIAdj_ASCII(A,viewer);
52: } else {
53: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54: }
55: return(0);
56: }
60: PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61: {
62: Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
66: #if defined(PETSC_USE_LOG)
67: PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap.n,mat->cmap.n,a->nz);
68: #endif
69: PetscFree(a->diag);
70: if (a->freeaij) {
71: PetscFree(a->i);
72: PetscFree(a->j);
73: PetscFree(a->values);
74: }
75: PetscFree(a);
77: PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);
78: return(0);
79: }
83: PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
84: {
85: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
89: switch (op) {
90: case MAT_SYMMETRIC:
91: case MAT_STRUCTURALLY_SYMMETRIC:
92: case MAT_HERMITIAN:
93: a->symmetric = PETSC_TRUE;
94: break;
95: case MAT_NOT_SYMMETRIC:
96: case MAT_NOT_STRUCTURALLY_SYMMETRIC:
97: case MAT_NOT_HERMITIAN:
98: a->symmetric = PETSC_FALSE;
99: break;
100: case MAT_SYMMETRY_ETERNAL:
101: case MAT_NOT_SYMMETRY_ETERNAL:
102: break;
103: default:
104: PetscInfo(A,"Option ignored\n");
105: break;
106: }
107: return(0);
108: }
111: /*
112: Adds diagonal pointers to sparse matrix structure.
113: */
117: PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118: {
119: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
121: PetscInt i,j,*diag,m = A->rmap.n;
124: PetscMalloc((m+1)*sizeof(PetscInt),&diag);
125: PetscLogObjectMemory(A,(m+1)*sizeof(PetscInt));
126: for (i=0; i<A->rmap.n; i++) {
127: for (j=a->i[i]; j<a->i[i+1]; j++) {
128: if (a->j[j] == i) {
129: diag[i] = j;
130: break;
131: }
132: }
133: }
134: a->diag = diag;
135: return(0);
136: }
140: PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
141: {
142: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
143: PetscInt *itmp;
146: row -= A->rmap.rstart;
148: if (row < 0 || row >= A->rmap.n) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
150: *nz = a->i[row+1] - a->i[row];
151: if (v) *v = PETSC_NULL;
152: if (idx) {
153: itmp = a->j + a->i[row];
154: if (*nz) {
155: *idx = itmp;
156: }
157: else *idx = 0;
158: }
159: return(0);
160: }
164: PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
165: {
167: return(0);
168: }
172: PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
173: {
174: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
176: PetscTruth flag;
179: /* If the matrix dimensions are not equal,or no of nonzeros */
180: if ((A->rmap.n != B->rmap.n) ||(a->nz != b->nz)) {
181: flag = PETSC_FALSE;
182: }
183:
184: /* if the a->i are the same */
185: PetscMemcmp(a->i,b->i,(A->rmap.n+1)*sizeof(PetscInt),&flag);
186:
187: /* if a->j are the same */
188: PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);
190: MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);
191: return(0);
192: }
196: PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
197: {
199: PetscMPIInt size;
200: PetscInt i;
201: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
204: MPI_Comm_size(A->comm,&size);
205: if (size > 1) {*done = PETSC_FALSE; return(0);}
206: *m = A->rmap.n;
207: *ia = a->i;
208: *ja = a->j;
209: *done = PETSC_TRUE;
210: if (oshift) {
211: for (i=0; i<(*ia)[*m]; i++) {
212: (*ja)[i]++;
213: }
214: for (i=0; i<=(*m); i++) (*ia)[i]++;
215: }
216: return(0);
217: }
221: PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
222: {
223: PetscInt i;
224: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
227: if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
228: if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
229: if (oshift) {
230: for (i=0; i<=(*m); i++) (*ia)[i]--;
231: for (i=0; i<(*ia)[*m]; i++) {
232: (*ja)[i]--;
233: }
234: }
235: return(0);
236: }
238: /* -------------------------------------------------------------------*/
239: static struct _MatOps MatOps_Values = {0,
240: MatGetRow_MPIAdj,
241: MatRestoreRow_MPIAdj,
242: 0,
243: /* 4*/ 0,
244: 0,
245: 0,
246: 0,
247: 0,
248: 0,
249: /*10*/ 0,
250: 0,
251: 0,
252: 0,
253: 0,
254: /*15*/ 0,
255: MatEqual_MPIAdj,
256: 0,
257: 0,
258: 0,
259: /*20*/ 0,
260: 0,
261: 0,
262: MatSetOption_MPIAdj,
263: 0,
264: /*25*/ 0,
265: 0,
266: 0,
267: 0,
268: 0,
269: /*30*/ 0,
270: 0,
271: 0,
272: 0,
273: 0,
274: /*35*/ 0,
275: 0,
276: 0,
277: 0,
278: 0,
279: /*40*/ 0,
280: 0,
281: 0,
282: 0,
283: 0,
284: /*45*/ 0,
285: 0,
286: 0,
287: 0,
288: 0,
289: /*50*/ 0,
290: MatGetRowIJ_MPIAdj,
291: MatRestoreRowIJ_MPIAdj,
292: 0,
293: 0,
294: /*55*/ 0,
295: 0,
296: 0,
297: 0,
298: 0,
299: /*60*/ 0,
300: MatDestroy_MPIAdj,
301: MatView_MPIAdj,
302: 0,
303: 0,
304: /*65*/ 0,
305: 0,
306: 0,
307: 0,
308: 0,
309: /*70*/ 0,
310: 0,
311: 0,
312: 0,
313: 0,
314: /*75*/ 0,
315: 0,
316: 0,
317: 0,
318: 0,
319: /*80*/ 0,
320: 0,
321: 0,
322: 0,
323: 0,
324: /*85*/ 0,
325: 0,
326: 0,
327: 0,
328: 0,
329: /*90*/ 0,
330: 0,
331: 0,
332: 0,
333: 0,
334: /*95*/ 0,
335: 0,
336: 0,
337: 0};
342: PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
343: {
344: Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
346: #if defined(PETSC_USE_DEBUG)
347: PetscInt ii;
348: #endif
351: B->preallocated = PETSC_TRUE;
352: #if defined(PETSC_USE_DEBUG)
353: if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
354: for (ii=1; ii<B->rmap.n; ii++) {
355: if (i[ii] < 0 || i[ii] < i[ii-1]) {
356: SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
357: }
358: }
359: for (ii=0; ii<i[B->rmap.n]; ii++) {
360: if (j[ii] < 0 || j[ii] >= B->cmap.N) {
361: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
362: }
363: }
364: #endif
366: b->j = j;
367: b->i = i;
368: b->values = values;
370: b->nz = i[B->rmap.n];
371: b->diag = 0;
372: b->symmetric = PETSC_FALSE;
373: b->freeaij = PETSC_TRUE;
375: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
376: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
377: return(0);
378: }
381: /*MC
382: MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
383: intended for use constructing orderings and partitionings.
385: Level: beginner
387: .seealso: MatCreateMPIAdj
388: M*/
393: PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAdj(Mat B)
394: {
395: Mat_MPIAdj *b;
397: PetscMPIInt size,rank;
400: MPI_Comm_size(B->comm,&size);
401: MPI_Comm_rank(B->comm,&rank);
403: PetscNew(Mat_MPIAdj,&b);
404: B->data = (void*)b;
405: PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
406: B->factor = 0;
407: B->mapping = 0;
408: B->assembled = PETSC_FALSE;
409:
410: PetscMapInitialize(B->comm,&B->rmap);
411: PetscMapInitialize(B->comm,&B->cmap);
413: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
414: "MatMPIAdjSetPreallocation_MPIAdj",
415: MatMPIAdjSetPreallocation_MPIAdj);
416: return(0);
417: }
422: /*@C
423: MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
425: Collective on MPI_Comm
427: Input Parameters:
428: + A - the matrix
429: . i - the indices into j for the start of each row
430: . j - the column indices for each row (sorted for each row).
431: The indices in i and j start with zero (NOT with one).
432: - values - [optional] edge weights
434: Level: intermediate
436: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
437: @*/
438: PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
439: {
440: PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
443: PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);
444: if (f) {
445: (*f)(B,i,j,values);
446: }
447: return(0);
448: }
452: /*@C
453: MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
454: The matrix does not have numerical values associated with it, but is
455: intended for ordering (to reduce bandwidth etc) and partitioning.
457: Collective on MPI_Comm
459: Input Parameters:
460: + comm - MPI communicator
461: . m - number of local rows
462: . n - number of columns
463: . i - the indices into j for the start of each row
464: . j - the column indices for each row (sorted for each row).
465: The indices in i and j start with zero (NOT with one).
466: - values -[optional] edge weights
468: Output Parameter:
469: . A - the matrix
471: Level: intermediate
473: Notes: This matrix object does not support most matrix operations, include
474: MatSetValues().
475: You must NOT free the ii, values and jj arrays yourself. PETSc will free them
476: when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
477: call from Fortran you need not create the arrays with PetscMalloc().
478: Should not include the matrix diagonals.
480: If you already have a matrix, you can create its adjacency matrix by a call
481: to MatConvert, specifying a type of MATMPIADJ.
483: Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
485: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
486: @*/
487: PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
488: {
492: MatCreate(comm,A);
493: MatSetSizes(*A,m,n,PETSC_DETERMINE,PETSC_DETERMINE);
494: MatSetType(*A,MATMPIADJ);
495: MatMPIAdjSetPreallocation(*A,i,j,values);
496: return(0);
497: }
502: PetscErrorCode PETSCMAT_DLLEXPORT MatConvertTo_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
503: {
504: Mat B;
505: PetscErrorCode ierr;
506: PetscInt i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
507: const PetscInt *rj;
508: const PetscScalar *ra;
509: MPI_Comm comm;
512: MatGetSize(A,PETSC_NULL,&N);
513: MatGetLocalSize(A,&m,PETSC_NULL);
514: MatGetOwnershipRange(A,&rstart,PETSC_NULL);
515:
516: /* count the number of nonzeros per row */
517: for (i=0; i<m; i++) {
518: MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
519: for (j=0; j<len; j++) {
520: if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */
521: }
522: MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
523: nzeros += len;
524: }
526: /* malloc space for nonzeros */
527: PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);
528: PetscMalloc((N+1)*sizeof(PetscInt),&ia);
529: PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);
531: nzeros = 0;
532: ia[0] = 0;
533: for (i=0; i<m; i++) {
534: MatGetRow(A,i+rstart,&len,&rj,&ra);
535: cnt = 0;
536: for (j=0; j<len; j++) {
537: if (rj[j] != i+rstart) { /* if not diagonal */
538: a[nzeros+cnt] = (PetscInt) PetscAbsScalar(ra[j]);
539: ja[nzeros+cnt++] = rj[j];
540: }
541: }
542: MatRestoreRow(A,i+rstart,&len,&rj,&ra);
543: nzeros += cnt;
544: ia[i+1] = nzeros;
545: }
547: PetscObjectGetComm((PetscObject)A,&comm);
548: MatCreate(comm,&B);
549: MatSetSizes(B,m,N,PETSC_DETERMINE,N);
550: MatSetType(B,type);
551: MatMPIAdjSetPreallocation(B,ia,ja,a);
553: if (reuse == MAT_REUSE_MATRIX) {
554: MatHeaderReplace(A,B);
555: } else {
556: *newmat = B;
557: }
558: return(0);
559: }