Actual source code: mpiadj.c
1: /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/
3: /*
4: Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5: */
6: #include src/mat/impls/adj/mpi/mpiadj.h
7: #include petscsys.h
9: int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10: {
11: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
12: int ierr,i,j,m = A->m;
13: char *name;
14: PetscViewerFormat format;
17: PetscObjectGetName((PetscObject)A,&name);
18: PetscViewerGetFormat(viewer,&format);
19: if (format == PETSC_VIEWER_ASCII_INFO) {
20: return(0);
21: } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
22: SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
23: } else {
24: PetscViewerASCIIUseTabs(viewer,PETSC_NO);
25: for (i=0; i<m; i++) {
26: PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);
27: for (j=a->i[i]; j<a->i[i+1]; j++) {
28: PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);
29: }
30: PetscViewerASCIISynchronizedPrintf(viewer,"n");
31: }
32: PetscViewerASCIIUseTabs(viewer,PETSC_YES);
33: }
34: PetscViewerFlush(viewer);
35: return(0);
36: }
38: int MatView_MPIAdj(Mat A,PetscViewer viewer)
39: {
40: int ierr;
41: PetscTruth isascii;
44: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
45: if (isascii) {
46: MatView_MPIAdj_ASCII(A,viewer);
47: } else {
48: SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
49: }
50: return(0);
51: }
53: int MatDestroy_MPIAdj(Mat mat)
54: {
55: Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
56: int ierr;
59: #if defined(PETSC_USE_LOG)
60: PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
61: #endif
62: if (a->diag) {PetscFree(a->diag);}
63: if (a->freeaij) {
64: PetscFree(a->i);
65: PetscFree(a->j);
66: if (a->values) {PetscFree(a->values);}
67: }
68: PetscFree(a->rowners);
69: PetscFree(a);
70: return(0);
71: }
73: int MatSetOption_MPIAdj(Mat A,MatOption op)
74: {
75: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
78: switch (op) {
79: case MAT_STRUCTURALLY_SYMMETRIC:
80: a->symmetric = PETSC_TRUE;
81: break;
82: case MAT_USE_SINGLE_PRECISION_SOLVES:
83: default:
84: PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignoredn");
85: break;
86: }
87: return(0);
88: }
91: /*
92: Adds diagonal pointers to sparse matrix structure.
93: */
95: int MatMarkDiagonal_MPIAdj(Mat A)
96: {
97: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
98: int i,j,*diag,m = A->m,ierr;
101: PetscMalloc((m+1)*sizeof(int),&diag);
102: PetscLogObjectMemory(A,(m+1)*sizeof(int));
103: for (i=0; i<A->m; i++) {
104: for (j=a->i[i]; j<a->i[i+1]; j++) {
105: if (a->j[j] == i) {
106: diag[i] = j;
107: break;
108: }
109: }
110: }
111: a->diag = diag;
112: return(0);
113: }
115: int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
116: {
117: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
118: int *itmp;
121: row -= a->rstart;
123: if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
125: *nz = a->i[row+1] - a->i[row];
126: if (v) *v = PETSC_NULL;
127: if (idx) {
128: itmp = a->j + a->i[row];
129: if (*nz) {
130: *idx = itmp;
131: }
132: else *idx = 0;
133: }
134: return(0);
135: }
137: int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
138: {
140: return(0);
141: }
143: int MatGetBlockSize_MPIAdj(Mat A,int *bs)
144: {
146: *bs = 1;
147: return(0);
148: }
151: int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
152: {
153: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
154: int ierr;
155: PetscTruth flag;
158: PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);
159: if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
161: /* If the matrix dimensions are not equal,or no of nonzeros */
162: if ((A->m != B->m) ||(a->nz != b->nz)) {
163: flag = PETSC_FALSE;
164: }
165:
166: /* if the a->i are the same */
167: PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);
168:
169: /* if a->j are the same */
170: PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);
172: MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);
173: return(0);
174: }
176: int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
177: {
178: int ierr,size,i;
179: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
182: MPI_Comm_size(A->comm,&size);
183: if (size > 1) {*done = PETSC_FALSE; return(0);}
184: *m = A->m;
185: *ia = a->i;
186: *ja = a->j;
187: *done = PETSC_TRUE;
188: if (oshift) {
189: for (i=0; i<(*ia)[*m]; i++) {
190: (*ja)[i]++;
191: }
192: for (i=0; i<=(*m); i++) (*ia)[i]++;
193: }
194: return(0);
195: }
197: int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
198: {
199: int i;
200: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
203: if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
204: if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
205: if (oshift) {
206: for (i=0; i<=(*m); i++) (*ia)[i]--;
207: for (i=0; i<(*ia)[*m]; i++) {
208: (*ja)[i]--;
209: }
210: }
211: return(0);
212: }
214: /* -------------------------------------------------------------------*/
215: static struct _MatOps MatOps_Values = {0,
216: MatGetRow_MPIAdj,
217: MatRestoreRow_MPIAdj,
218: 0,
219: 0,
220: 0,
221: 0,
222: 0,
223: 0,
224: 0,
225: 0,
226: 0,
227: 0,
228: 0,
229: 0,
230: 0,
231: MatEqual_MPIAdj,
232: 0,
233: 0,
234: 0,
235: 0,
236: 0,
237: 0,
238: MatSetOption_MPIAdj,
239: 0,
240: 0,
241: 0,
242: 0,
243: 0,
244: 0,
245: 0,
246: 0,
247: 0,
248: 0,
249: 0,
250: 0,
251: 0,
252: 0,
253: 0,
254: 0,
255: 0,
256: 0,
257: 0,
258: 0,
259: 0,
260: 0,
261: 0,
262: 0,
263: 0,
264: 0,
265: MatGetBlockSize_MPIAdj,
266: MatGetRowIJ_MPIAdj,
267: MatRestoreRowIJ_MPIAdj,
268: 0,
269: 0,
270: 0,
271: 0,
272: 0,
273: 0,
274: 0,
275: 0,
276: MatDestroy_MPIAdj,
277: MatView_MPIAdj,
278: MatGetPetscMaps_Petsc};
281: EXTERN_C_BEGIN
282: int MatCreate_MPIAdj(Mat B)
283: {
284: Mat_MPIAdj *b;
285: int ii,ierr,size,rank;
288: MPI_Comm_size(B->comm,&size);
289: MPI_Comm_rank(B->comm,&rank);
291: ierr = PetscNew(Mat_MPIAdj,&b);
292: B->data = (void*)b;
293: ierr = PetscMemzero(b,sizeof(Mat_MPIAdj));
294: ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
295: B->factor = 0;
296: B->lupivotthreshold = 1.0;
297: B->mapping = 0;
298: B->assembled = PETSC_FALSE;
299:
300: MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);
301: B->n = B->N;
303: /* the information in the maps duplicates the information computed below, eventually
304: we should remove the duplicate information that is not contained in the maps */
305: PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);
306: /* we don't know the "local columns" so just use the row information :-(*/
307: PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);
309: PetscMalloc((size+1)*sizeof(int),&b->rowners);
310: PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
311: MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);
312: b->rowners[0] = 0;
313: for (ii=2; ii<=size; ii++) {
314: b->rowners[ii] += b->rowners[ii-1];
315: }
316: b->rstart = b->rowners[rank];
317: b->rend = b->rowners[rank+1];
319: return(0);
320: }
321: EXTERN_C_END
323: int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
324: {
325: Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
326: int ierr;
327: #if defined(PETSC_USE_BOPT_g)
328: int ii;
329: #endif
332: B->preallocated = PETSC_TRUE;
333: #if defined(PETSC_USE_BOPT_g)
334: if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %dn",i[0]);
335: for (ii=1; ii<B->m; ii++) {
336: if (i[ii] < 0 || i[ii] < i[ii-1]) {
337: SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
338: }
339: }
340: for (ii=0; ii<i[B->m]; ii++) {
341: if (j[ii] < 0 || j[ii] >= B->N) {
342: SETERRQ2(1,"Column index %d out of range %dn",ii,j[ii]);
343: }
344: }
345: #endif
347: b->j = j;
348: b->i = i;
349: b->values = values;
351: b->nz = i[B->m];
352: b->diag = 0;
353: b->symmetric = PETSC_FALSE;
354: b->freeaij = PETSC_TRUE;
356: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
357: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
358: return(0);
359: }
361: /*@C
362: MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
363: The matrix does not have numerical values associated with it, but is
364: intended for ordering (to reduce bandwidth etc) and partitioning.
366: Collective on MPI_Comm
368: Input Parameters:
369: + comm - MPI communicator
370: . m - number of local rows
371: . n - number of columns
372: . i - the indices into j for the start of each row
373: . j - the column indices for each row (sorted for each row).
374: The indices in i and j start with zero (NOT with one).
375: - values -[optional] edge weights
377: Output Parameter:
378: . A - the matrix
380: Level: intermediate
382: Notes: This matrix object does not support most matrix operations, include
383: MatSetValues().
384: You must NOT free the ii, values and jj arrays yourself. PETSc will free them
385: when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
386: call from Fortran you need not create the arrays with PetscMalloc().
387: Should not include the matrix diagonals.
389: Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
391: .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
392: @*/
393: int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
394: {
395: int ierr;
398: MatCreate(comm,m,n,PETSC_DETERMINE,n,A);
399: MatSetType(*A,MATMPIADJ);
400: MatMPIAdjSetPreallocation(*A,i,j,values);
401: return(0);
402: }
404: EXTERN_C_BEGIN
405: int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
406: {
407: int i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
408: PetscScalar *ra;
409: MPI_Comm comm;
412: MatGetSize(A,PETSC_NULL,&N);
413: MatGetLocalSize(A,&m,PETSC_NULL);
414: MatGetOwnershipRange(A,&rstart,PETSC_NULL);
415:
416: /* count the number of nonzeros per row */
417: for (i=0; i<m; i++) {
418: ierr = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
419: for (j=0; j<len; j++) {
420: if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */
421: }
422: ierr = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
423: nzeros += len;
424: }
426: /* malloc space for nonzeros */
427: PetscMalloc((nzeros+1)*sizeof(int),&a);
428: PetscMalloc((N+1)*sizeof(int),&ia);
429: PetscMalloc((nzeros+1)*sizeof(int),&ja);
431: nzeros = 0;
432: ia[0] = 0;
433: for (i=0; i<m; i++) {
434: ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);
435: cnt = 0;
436: for (j=0; j<len; j++) {
437: if (rj[j] != i+rstart) { /* if not diagonal */
438: a[nzeros+cnt] = (int) PetscAbsScalar(ra[j]);
439: ja[nzeros+cnt++] = rj[j];
440: }
441: }
442: ierr = MatRestoreRow(A,i+rstart,&len,&rj,&ra);
443: nzeros += cnt;
444: ia[i+1] = nzeros;
445: }
447: PetscObjectGetComm((PetscObject)A,&comm);
448: MatCreateMPIAdj(comm,m,N,ia,ja,a,B);
450: return(0);
451: }
452: EXTERN_C_END