Actual source code: mpiadj.c
1: #define PETSCMAT_DLL
3: /*
4: Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5: */
6: #include src/mat/impls/adj/mpi/mpiadj.h
7: #include petscsys.h
11: PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12: {
13: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
14: PetscErrorCode ierr;
15: PetscInt i,j,m = A->m;
16: const char *name;
17: PetscViewerFormat format;
20: PetscObjectGetName((PetscObject)A,&name);
21: PetscViewerGetFormat(viewer,&format);
22: if (format == PETSC_VIEWER_ASCII_INFO) {
23: return(0);
24: } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
25: SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
26: } else {
27: PetscViewerASCIIUseTabs(viewer,PETSC_NO);
28: for (i=0; i<m; i++) {
29: PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+a->rstart);
30: for (j=a->i[i]; j<a->i[i+1]; j++) {
31: PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
32: }
33: PetscViewerASCIISynchronizedPrintf(viewer,"\n");
34: }
35: PetscViewerASCIIUseTabs(viewer,PETSC_YES);
36: }
37: PetscViewerFlush(viewer);
38: return(0);
39: }
43: PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44: {
46: PetscTruth iascii;
49: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
50: if (iascii) {
51: MatView_MPIAdj_ASCII(A,viewer);
52: } else {
53: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54: }
55: return(0);
56: }
60: PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61: {
62: Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
66: #if defined(PETSC_USE_LOG)
67: PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->m,mat->n,a->nz);
68: #endif
69: if (a->diag) {PetscFree(a->diag);}
70: if (a->freeaij) {
71: PetscFree(a->i);
72: PetscFree(a->j);
73: if (a->values) {PetscFree(a->values);}
74: }
75: PetscFree(a->rowners);
76: PetscFree(a);
78: PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);
79: return(0);
80: }
84: PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
85: {
86: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
90: switch (op) {
91: case MAT_SYMMETRIC:
92: case MAT_STRUCTURALLY_SYMMETRIC:
93: case MAT_HERMITIAN:
94: a->symmetric = PETSC_TRUE;
95: break;
96: case MAT_NOT_SYMMETRIC:
97: case MAT_NOT_STRUCTURALLY_SYMMETRIC:
98: case MAT_NOT_HERMITIAN:
99: a->symmetric = PETSC_FALSE;
100: break;
101: case MAT_SYMMETRY_ETERNAL:
102: case MAT_NOT_SYMMETRY_ETERNAL:
103: break;
104: default:
105: PetscLogInfo((A,"MatSetOption_MPIAdj:Option ignored\n"));
106: break;
107: }
108: return(0);
109: }
112: /*
113: Adds diagonal pointers to sparse matrix structure.
114: */
118: PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
119: {
120: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
122: PetscInt i,j,*diag,m = A->m;
125: PetscMalloc((m+1)*sizeof(PetscInt),&diag);
126: PetscLogObjectMemory(A,(m+1)*sizeof(PetscInt));
127: for (i=0; i<A->m; i++) {
128: for (j=a->i[i]; j<a->i[i+1]; j++) {
129: if (a->j[j] == i) {
130: diag[i] = j;
131: break;
132: }
133: }
134: }
135: a->diag = diag;
136: return(0);
137: }
141: PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
142: {
143: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
144: PetscInt *itmp;
147: row -= a->rstart;
149: if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
151: *nz = a->i[row+1] - a->i[row];
152: if (v) *v = PETSC_NULL;
153: if (idx) {
154: itmp = a->j + a->i[row];
155: if (*nz) {
156: *idx = itmp;
157: }
158: else *idx = 0;
159: }
160: return(0);
161: }
165: PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
166: {
168: return(0);
169: }
173: PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
174: {
175: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
177: PetscTruth flag;
180: /* If the matrix dimensions are not equal,or no of nonzeros */
181: if ((A->m != B->m) ||(a->nz != b->nz)) {
182: flag = PETSC_FALSE;
183: }
184:
185: /* if the a->i are the same */
186: PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(PetscInt),&flag);
187:
188: /* if a->j are the same */
189: PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);
191: MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);
192: return(0);
193: }
197: PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
198: {
200: PetscMPIInt size;
201: PetscInt i;
202: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
205: MPI_Comm_size(A->comm,&size);
206: if (size > 1) {*done = PETSC_FALSE; return(0);}
207: *m = A->m;
208: *ia = a->i;
209: *ja = a->j;
210: *done = PETSC_TRUE;
211: if (oshift) {
212: for (i=0; i<(*ia)[*m]; i++) {
213: (*ja)[i]++;
214: }
215: for (i=0; i<=(*m); i++) (*ia)[i]++;
216: }
217: return(0);
218: }
222: PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
223: {
224: PetscInt i;
225: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
228: if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
229: if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
230: if (oshift) {
231: for (i=0; i<=(*m); i++) (*ia)[i]--;
232: for (i=0; i<(*ia)[*m]; i++) {
233: (*ja)[i]--;
234: }
235: }
236: return(0);
237: }
239: /* -------------------------------------------------------------------*/
240: static struct _MatOps MatOps_Values = {0,
241: MatGetRow_MPIAdj,
242: MatRestoreRow_MPIAdj,
243: 0,
244: /* 4*/ 0,
245: 0,
246: 0,
247: 0,
248: 0,
249: 0,
250: /*10*/ 0,
251: 0,
252: 0,
253: 0,
254: 0,
255: /*15*/ 0,
256: MatEqual_MPIAdj,
257: 0,
258: 0,
259: 0,
260: /*20*/ 0,
261: 0,
262: 0,
263: MatSetOption_MPIAdj,
264: 0,
265: /*25*/ 0,
266: 0,
267: 0,
268: 0,
269: 0,
270: /*30*/ 0,
271: 0,
272: 0,
273: 0,
274: 0,
275: /*35*/ 0,
276: 0,
277: 0,
278: 0,
279: 0,
280: /*40*/ 0,
281: 0,
282: 0,
283: 0,
284: 0,
285: /*45*/ 0,
286: 0,
287: 0,
288: 0,
289: 0,
290: /*50*/ 0,
291: MatGetRowIJ_MPIAdj,
292: MatRestoreRowIJ_MPIAdj,
293: 0,
294: 0,
295: /*55*/ 0,
296: 0,
297: 0,
298: 0,
299: 0,
300: /*60*/ 0,
301: MatDestroy_MPIAdj,
302: MatView_MPIAdj,
303: MatGetPetscMaps_Petsc,
304: 0,
305: /*65*/ 0,
306: 0,
307: 0,
308: 0,
309: 0,
310: /*70*/ 0,
311: 0,
312: 0,
313: 0,
314: 0,
315: /*75*/ 0,
316: 0,
317: 0,
318: 0,
319: 0,
320: /*80*/ 0,
321: 0,
322: 0,
323: 0,
324: 0,
325: /*85*/ 0,
326: 0,
327: 0,
328: 0,
329: 0,
330: /*90*/ 0,
331: 0,
332: 0,
333: 0,
334: 0,
335: /*95*/ 0,
336: 0,
337: 0,
338: 0};
343: PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
344: {
345: Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
347: #if defined(PETSC_USE_DEBUG)
348: PetscInt ii;
349: #endif
352: B->preallocated = PETSC_TRUE;
353: #if defined(PETSC_USE_DEBUG)
354: if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
355: for (ii=1; ii<B->m; ii++) {
356: if (i[ii] < 0 || i[ii] < i[ii-1]) {
357: SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
358: }
359: }
360: for (ii=0; ii<i[B->m]; ii++) {
361: if (j[ii] < 0 || j[ii] >= B->N) {
362: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
363: }
364: }
365: #endif
367: b->j = j;
368: b->i = i;
369: b->values = values;
371: b->nz = i[B->m];
372: b->diag = 0;
373: b->symmetric = PETSC_FALSE;
374: b->freeaij = PETSC_TRUE;
376: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
377: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
378: return(0);
379: }
382: /*MC
383: MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
384: intended for use constructing orderings and partitionings.
386: Level: beginner
388: .seealso: MatCreateMPIAdj
389: M*/
394: PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAdj(Mat B)
395: {
396: Mat_MPIAdj *b;
398: PetscInt ii;
399: PetscMPIInt size,rank;
402: MPI_Comm_size(B->comm,&size);
403: MPI_Comm_rank(B->comm,&rank);
405: PetscNew(Mat_MPIAdj,&b);
406: B->data = (void*)b;
407: PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
408: B->factor = 0;
409: B->mapping = 0;
410: B->assembled = PETSC_FALSE;
411:
412: PetscSplitOwnership(B->comm,&B->m,&B->M);
413: B->n = B->N = PetscMax(B->N,B->n);
415: /* the information in the maps duplicates the information computed below, eventually
416: we should remove the duplicate information that is not contained in the maps */
417: PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);
418: /* we don't know the "local columns" so just use the row information :-(*/
419: PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);
421: PetscMalloc((size+1)*sizeof(PetscInt),&b->rowners);
422: PetscLogObjectMemory(B,(size+2)*sizeof(PetscInt)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
423: MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);
424: b->rowners[0] = 0;
425: for (ii=2; ii<=size; ii++) {
426: b->rowners[ii] += b->rowners[ii-1];
427: }
428: b->rstart = b->rowners[rank];
429: b->rend = b->rowners[rank+1];
430: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
431: "MatMPIAdjSetPreallocation_MPIAdj",
432: MatMPIAdjSetPreallocation_MPIAdj);
433: return(0);
434: }
439: /*@C
440: MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
442: Collective on MPI_Comm
444: Input Parameters:
445: + A - the matrix
446: . i - the indices into j for the start of each row
447: . j - the column indices for each row (sorted for each row).
448: The indices in i and j start with zero (NOT with one).
449: - values - [optional] edge weights
451: Level: intermediate
453: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
454: @*/
455: PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
456: {
457: PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
460: PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);
461: if (f) {
462: (*f)(B,i,j,values);
463: }
464: return(0);
465: }
469: /*@C
470: MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
471: The matrix does not have numerical values associated with it, but is
472: intended for ordering (to reduce bandwidth etc) and partitioning.
474: Collective on MPI_Comm
476: Input Parameters:
477: + comm - MPI communicator
478: . m - number of local rows
479: . n - number of columns
480: . i - the indices into j for the start of each row
481: . j - the column indices for each row (sorted for each row).
482: The indices in i and j start with zero (NOT with one).
483: - values -[optional] edge weights
485: Output Parameter:
486: . A - the matrix
488: Level: intermediate
490: Notes: This matrix object does not support most matrix operations, include
491: MatSetValues().
492: You must NOT free the ii, values and jj arrays yourself. PETSc will free them
493: when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
494: call from Fortran you need not create the arrays with PetscMalloc().
495: Should not include the matrix diagonals.
497: If you already have a matrix, you can create its adjacency matrix by a call
498: to MatConvert, specifying a type of MATMPIADJ.
500: Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
502: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
503: @*/
504: PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
505: {
509: MatCreate(comm,A);
510: MatSetSizes(*A,m,n,PETSC_DETERMINE,n);
511: MatSetType(*A,MATMPIADJ);
512: MatMPIAdjSetPreallocation(*A,i,j,values);
513: return(0);
514: }
519: PetscErrorCode PETSCMAT_DLLEXPORT MatConvertTo_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
520: {
521: Mat B;
522: PetscErrorCode ierr;
523: PetscInt i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
524: const PetscInt *rj;
525: const PetscScalar *ra;
526: MPI_Comm comm;
529: MatGetSize(A,PETSC_NULL,&N);
530: MatGetLocalSize(A,&m,PETSC_NULL);
531: MatGetOwnershipRange(A,&rstart,PETSC_NULL);
532:
533: /* count the number of nonzeros per row */
534: for (i=0; i<m; i++) {
535: MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
536: for (j=0; j<len; j++) {
537: if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */
538: }
539: MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
540: nzeros += len;
541: }
543: /* malloc space for nonzeros */
544: PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);
545: PetscMalloc((N+1)*sizeof(PetscInt),&ia);
546: PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);
548: nzeros = 0;
549: ia[0] = 0;
550: for (i=0; i<m; i++) {
551: MatGetRow(A,i+rstart,&len,&rj,&ra);
552: cnt = 0;
553: for (j=0; j<len; j++) {
554: if (rj[j] != i+rstart) { /* if not diagonal */
555: a[nzeros+cnt] = (PetscInt) PetscAbsScalar(ra[j]);
556: ja[nzeros+cnt++] = rj[j];
557: }
558: }
559: MatRestoreRow(A,i+rstart,&len,&rj,&ra);
560: nzeros += cnt;
561: ia[i+1] = nzeros;
562: }
564: PetscObjectGetComm((PetscObject)A,&comm);
565: MatCreate(comm,&B);
566: MatSetSizes(B,m,N,PETSC_DETERMINE,N);
567: MatSetType(B,type);
568: MatMPIAdjSetPreallocation(B,ia,ja,a);
570: if (reuse == MAT_REUSE_MATRIX) {
571: MatHeaderReplace(A,B);
572: } else {
573: *newmat = B;
574: }
575: return(0);
576: }