Actual source code: mesi.c
3: /*$Id: mesi.c,v 1.1 2001/09/12 03:30:08 bsmith Exp bsmith $*/
4: /*
5: Defines the basic matrix operations for the AIJ (compressed row)
6: matrix storage format.
7: */
9: #include src/mat/matimpl.h
10: #include petscsys.h
11: #include esi/petsc/vector.h
12: #include esi/petsc/matrix.h
14: typedef struct {
15: int rstart,rend; /* range of local rows */
16: esi::Operator<double,int> *eop;
17: esi::MatrixData<int> *emat;
18: esi::MatrixRowReadAccess<double,int> *rmat;
19: esi::MatrixRowWriteAccess<double,int> *wmat;
20: } Mat_ESI;
22: /*
23: Wraps a PETSc matrix to look like an ESI matrix and stashes the wrapper inside the
24: PETSc matrix. If PETSc matrix already had wrapper uses that instead.
25: */
26: int MatESIWrap(Mat xin,::esi::Operator<double,int> **v)
27: {
28: esi::petsc::Matrix<double,int> *t;
29: int ierr;
32: if (!xin->esimat) {
33: t = new esi::petsc::Matrix<double,int>(xin);
34: t->getInterface("esi::Operator",xin->esimat);
35: }
36: *v = reinterpret_cast<esi::Operator<double,int>* >(xin->esimat);
37: return(0);
38: }
40: /*@C
41: MatESISetOperator - Takes a PETSc matrix sets it to type ESI and
42: provides the ESI operator that it wraps to look like a PETSc matrix.
44: @*/
45: int MatESISetOperator(Mat xin,esi::Operator<double,int> *v)
46: {
47: Mat_ESI *x = (Mat_ESI*)xin->data;
48: PetscTruth tesi;
49: int ierr;
53: v->getInterface("esi::MatrixData",reinterpret_cast<void*&>(x->emat));
54: v->getInterface("esi::MatrixRowReadAccess",reinterpret_cast<void*&>(x->rmat));
55: v->getInterface("esi::MatrixRowWriteAccess",reinterpret_cast<void*&>(x->wmat));
56: if (!x->emat) SETERRQ(1,"PETSc currently requires esi::Operator to support esi::MatrixData interface");
58: PetscTypeCompare((PetscObject)xin,0,&tesi);
59: if (tesi) {
60: MatSetType(xin,MATESI);
61: }
62: PetscTypeCompare((PetscObject)xin,MATESI,&tesi);
63: if (tesi) {
64: int m,n,M,N;
65: esi::IndexSpace<int> *rmap,*cmap;
67: x->emat->getIndexSpaces(rmap,cmap);
69: rmap->getGlobalSize(M);
70: if (xin->M == -1) xin->M = M;
71: else if (xin->M != M) SETERRQ2(1,"Global rows of Mat %d not equal size of esi::MatrixData %d",xin->M,M);
73: cmap->getGlobalSize(N);
74: if (xin->N == -1) xin->N = N;
75: else if (xin->N != N) SETERRQ2(1,"Global columns of Mat %d not equal size of esi::MatrixData %d",xin->N,N);
77: rmap->getLocalSize(m);
78: if (xin->m == -1) xin->m = m;
79: else if (xin->m != m) SETERRQ2(1,"Local rows of Mat %d not equal size of esi::MatrixData %d",xin->m,m);
81: cmap->getLocalSize(n);
82: if (xin->n == -1) xin->n = n;
83: else if (xin->n != n) SETERRQ2(1,"Local columns of Mat %d not equal size of esi::MatrixData %d",xin->n,n);
85: x->eop = v;
86: v->addReference();
87: if (!xin->rmap){
88: PetscMapCreateMPI(xin->comm,m,M,&xin->rmap);
89: }
90: if (!xin->cmap){
91: PetscMapCreateMPI(xin->comm,n,N,&xin->cmap);
92: }
93: PetscMapGetLocalRange(xin->rmap,&x->rstart,&x->rend);
94: MatStashCreate_Private(xin->comm,1,&xin->stash);
95: }
96: return(0);
97: }
99: extern PetscFList CCAList;
101: /*@
102: MatESISetType - Given a PETSc matrix of type ESI loads the ESI constructor
103: by name and wraps the ESI operator to look like a PETSc matrix.
104: @*/
105: int MatESISetType(Mat V,char *name)
106: {
107: int ierr;
108: ::esi::Operator<double,int> *ve;
109: ::esi::Operator<double,int>::Factory *f;
110: ::esi::Operator<double,int>::Factory *(*r)(void);
111: ::esi::IndexSpace<int> *rmap,*cmap;
114: PetscFListFind(V->comm,CCAList,name,(void(**)(void))&r);
115: if (!r) SETERRQ1(1,"Unable to load esi::OperatorFactory constructor %s",name);
116: f = (*r)();
117: if (V->m == PETSC_DECIDE) {
118: PetscSplitOwnership(V->comm,&V->m,&V->M);
119: }
120: ESICreateIndexSpace("MPI",&V->comm,V->m,rmap);
121: if (V->n == PETSC_DECIDE) {
122: PetscSplitOwnership(V->comm,&V->n,&V->N);
123: }
124: ESICreateIndexSpace("MPI",&V->comm,V->n,cmap);
125: f->create(*rmap,*cmap,ve);
126: rmap->deleteReference();
127: cmap->deleteReference();
128: delete f;
129: MatESISetOperator(V,ve);
130: ve->deleteReference();
131: return(0);
132: }
134: int MatESISetFromOptions(Mat V)
135: {
136: char string[1024];
137: PetscTruth flg;
138: int ierr;
139:
141: PetscTypeCompare((PetscObject)V,MATESI,&flg);
142: if (flg) {
143: PetscOptionsGetString(V->prefix,"-mat_esi_type",string,1024,&flg);
144: if (flg) {
145: MatESISetType(V,string);
146: }
147: }
148: return(0);
149: }
151: /* ------------------------------------------------------------------------------------*/
153: int MatSetValues_ESI(Mat mat,int m,int *im,int n,int *in,PetscScalar *v,InsertMode addv)
154: {
155: Mat_ESI *iesi = (Mat_ESI*)mat->data;
156: int ierr,i,j,rstart = iesi->rstart,rend = iesi->rend;
157:
159: for (i=0; i<m; i++) {
160: if (im[i] < 0) continue;
161: #if defined(PETSC_USE_BOPT_g)
162: if (im[i] >= mat->M) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row too large");
163: #endif
164: if (im[i] >= rstart && im[i] < rend) {
165: for (j=0; j<n; j++) {
166: iesi->wmat->copyIntoRow(im[i],&v[i+j*m],&in[j],1);
167: }
168: } else {
169: MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);
170: }
171: }
172: return(0);
173: }
175: int MatAssemblyBegin_ESI(Mat mat,MatAssemblyType mode)
176: {
177: int ierr,nstash,reallocs,*rowners;
178: InsertMode addv;
182: /* make sure all processors are either in INSERTMODE or ADDMODE */
183: MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,mat->comm);
184: if (addv == (ADD_VALUES|INSERT_VALUES)) {
185: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added");
186: }
187: mat->insertmode = addv; /* in case this processor had no cache */
189: PetscMapGetGlobalRange(mat->rmap,&rowners);
190: MatStashScatterBegin_Private(&mat->stash,rowners);
191: MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);
192: PetscLogInfo(0,"MatAssemblyBegin_ESI:Stash has %d entries, uses %d mallocs.n",nstash,reallocs);
193: return(0);
194: }
197: int MatAssemblyEnd_ESI(Mat mat,MatAssemblyType mode)
198: {
199: Mat_ESI *a = (Mat_ESI*)mat->data;
200: int i,j,rstart,ncols,n,ierr,flg;
201: int *row,*col;
202: PetscScalar *val;
203: InsertMode addv = mat->insertmode;
206: while (1) {
207: MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);
208: if (!flg) break;
209: for (i=0; i<n;) {
210: /* Now identify the consecutive vals belonging to the same row */
211: for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; }
212: if (j < n) ncols = j-i;
213: else ncols = n-i;
214: /* Now assemble all these values with a single function call */
215: MatSetValues_ESI(mat,1,row+i,ncols,col+i,val+i,addv);
216: i = j;
217: }
218: }
219: MatStashScatterEnd_Private(&mat->stash);
220: a->wmat->loadComplete();
221: return(0);
222: }
224: int MatMult_ESI(Mat A,Vec xx,Vec yy)
225: {
226: Mat_ESI *a = (Mat_ESI*)A->data;
227: int ierr;
228: esi::Vector<double,int> *x,*y;
231: VecESIWrap(xx,&x);
232: VecESIWrap(yy,&y);
233: a->eop->apply(*x,*y);
234: return(0);
235: }
237: int MatDestroy_ESI(Mat v)
238: {
239: Mat_ESI *vs = (Mat_ESI*)v->data;
240: int ierr;
243: if (vs->eop) {
244: vs->eop->deleteReference();
245: }
246: MatStashDestroy_Private(&v->bstash);
247: MatStashDestroy_Private(&v->stash);
248: PetscFree(vs);
249: return(0);
250: }
252: int MatView_ESI(Mat A,PetscViewer viewer)
253: {
254: Mat_ESI *a = (Mat_ESI*)A->data;
255: int ierr,i,rstart,m,*cols,nz,j;
256: PetscTruth issocket,isascii,isbinary,isdraw;
257: esi::IndexSpace<int> *rmap,*cmap;
258: PetscScalar *values;
261: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);
262: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);
263: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
264: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
265: if (isascii) {
266: ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);
267: cols = new int[100];
268: values = new PetscScalar[100];
269: ierr = a->emat->getIndexSpaces(rmap,cmap);
270: ierr = rmap->getLocalPartitionOffset(rstart);
271: ierr = rmap->getLocalSize(m);
272: for (i=rstart; i<rstart+m; i++) {
273: PetscViewerASCIIPrintf(viewer,"row %d:",i);
274: a->rmat->copyOutRow(i,values,cols,100,nz);
275: for (j=0; j<nz; j++) {
276: PetscViewerASCIIPrintf(viewer," %d %g ",cols[j],values[j]);
277: }
278: PetscViewerASCIIPrintf(viewer,"n");
279: }
280: PetscViewerASCIIUseTabs(viewer,PETSC_YES);
281: PetscViewerFlush(viewer);
282: } else {
283: SETERRQ1(1,"Viewer type %s not supported by SeqAIJ matrices",((PetscObject)viewer)->type_name);
284: }
285: return(0);
286: }
289: /* -------------------------------------------------------------------*/
290: static struct _MatOps MatOps_Values = {
291: MatSetValues_ESI,
292: 0,
293: 0,
294: MatMult_ESI,
295: 0,
296: 0,
297: 0,
298: 0,
299: 0,
300: 0,
301: 0,
302: 0,
303: 0,
304: 0,
305: 0,
306: 0,
307: 0,
308: 0,
309: 0,
310: 0,
311: MatAssemblyBegin_ESI,
312: MatAssemblyEnd_ESI,
313: 0,
314: 0,
315: 0,
316: 0,
317: 0,
318: 0,
319: 0,
320: 0,
321: 0,
322: 0,
323: 0,
324: 0,
325: 0,
326: 0,
327: 0,
328: 0,
329: 0,
330: 0,
331: 0,
332: 0,
333: 0,
334: 0,
335: 0,
336: 0,
337: 0,
338: 0,
339: 0,
340: 0,
341: 0,
342: 0,
343: 0,
344: 0,
345: 0,
346: 0,
347: 0,
348: 0,
349: 0,
350: 0,
351: 0,
352: MatDestroy_ESI,
353: MatView_ESI,
354: 0};
356: EXTERN_C_BEGIN
357: int MatCreate_ESI(Mat B)
358: {
359: int ierr;
360: Mat_ESI *b;
364: ierr = PetscNew(Mat_ESI,&b);
365: B->data = (void*)b;
366: ierr = PetscMemzero(b,sizeof(Mat_ESI));
367: ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
368: B->factor = 0;
369: B->lupivotthreshold = 1.0;
370: B->mapping = 0;
371: PetscOptionsGetReal(PETSC_NULL,"-mat_lu_pivotthreshold",&B->lupivotthreshold,PETSC_NULL);
373: b->emat = 0;
374: return(0);
375: }
376: EXTERN_C_END
378: EXTERN_C_BEGIN
379: int MatLoad_ESI(PetscViewer viewer,MatType type,Mat *newmat)
380: {
381: Mat A;
382: PetscScalar *vals,*svals;
383: MPI_Comm comm = ((PetscObject)viewer)->comm;
384: MPI_Status status;
385: int i,nz,ierr,j,rstart,rend,fd;
386: int header[4],rank,size,*rowlengths = 0,M,N,m,*rowners,maxnz,*cols;
387: int *ourlens,*sndcounts = 0,*procsnz = 0,*offlens,jj,*mycols,*smycols;
388: int tag = ((PetscObject)viewer)->tag,cend,cstart,n;
391: MPI_Comm_size(comm,&size);
392: MPI_Comm_rank(comm,&rank);
393: if (!rank) {
394: PetscViewerBinaryGetDescriptor(viewer,&fd);
395: PetscBinaryRead(fd,(char *)header,4,PETSC_INT);
396: if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object");
397: if (header[3] < 0) {
398: SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Matrix in special format on disk, cannot load as MPIAIJ");
399: }
400: }
402: MPI_Bcast(header+1,3,MPI_INT,0,comm);
403: M = header[1]; N = header[2];
404: /* determine ownership of all rows */
405: m = M/size + ((M % size) > rank);
406: PetscMalloc((size+2)*sizeof(int),&rowners);
407: MPI_Allgather(&m,1,MPI_INT,rowners+1,1,MPI_INT,comm);
408: rowners[0] = 0;
409: for (i=2; i<=size; i++) {
410: rowners[i] += rowners[i-1];
411: }
412: rstart = rowners[rank];
413: rend = rowners[rank+1];
415: /* distribute row lengths to all processors */
416: ierr = PetscMalloc(2*(rend-rstart+1)*sizeof(int),&ourlens);
417: offlens = ourlens + (rend-rstart);
418: if (!rank) {
419: PetscMalloc(M*sizeof(int),&rowlengths);
420: PetscBinaryRead(fd,rowlengths,M,PETSC_INT);
421: PetscMalloc(size*sizeof(int),&sndcounts);
422: for (i=0; i<size; i++) sndcounts[i] = rowners[i+1] - rowners[i];
423: MPI_Scatterv(rowlengths,sndcounts,rowners,MPI_INT,ourlens,rend-rstart,MPI_INT,0,comm);
424: PetscFree(sndcounts);
425: } else {
426: MPI_Scatterv(0,0,0,MPI_INT,ourlens,rend-rstart,MPI_INT,0,comm);
427: }
429: if (!rank) {
430: /* calculate the number of nonzeros on each processor */
431: PetscMalloc(size*sizeof(int),&procsnz);
432: PetscMemzero(procsnz,size*sizeof(int));
433: for (i=0; i<size; i++) {
434: for (j=rowners[i]; j< rowners[i+1]; j++) {
435: procsnz[i] += rowlengths[j];
436: }
437: }
438: PetscFree(rowlengths);
440: /* determine max buffer needed and allocate it */
441: maxnz = 0;
442: for (i=0; i<size; i++) {
443: maxnz = PetscMax(maxnz,procsnz[i]);
444: }
445: PetscMalloc(maxnz*sizeof(int),&cols);
447: /* read in my part of the matrix column indices */
448: nz = procsnz[0];
449: PetscMalloc(nz*sizeof(int),&mycols);
450: PetscBinaryRead(fd,mycols,nz,PETSC_INT);
452: /* read in every one elses and ship off */
453: for (i=1; i<size; i++) {
454: nz = procsnz[i];
455: PetscBinaryRead(fd,cols,nz,PETSC_INT);
456: MPI_Send(cols,nz,MPI_INT,i,tag,comm);
457: }
458: PetscFree(cols);
459: } else {
460: /* determine buffer space needed for message */
461: nz = 0;
462: for (i=0; i<m; i++) {
463: nz += ourlens[i];
464: }
465: PetscMalloc((nz+1)*sizeof(int),&mycols);
467: /* receive message of column indices*/
468: MPI_Recv(mycols,nz,MPI_INT,0,tag,comm,&status);
469: MPI_Get_count(&status,MPI_INT,&maxnz);
470: if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
471: }
473: /* determine column ownership if matrix is not square */
474: if (N != M) {
475: n = N/size + ((N % size) > rank);
476: ierr = MPI_Scan(&n,&cend,1,MPI_INT,MPI_SUM,comm);
477: cstart = cend - n;
478: } else {
479: cstart = rstart;
480: cend = rend;
481: n = cend - cstart;
482: }
484: /* loop over local rows, determining number of off diagonal entries */
485: PetscMemzero(offlens,m*sizeof(int));
486: jj = 0;
487: for (i=0; i<m; i++) {
488: for (j=0; j<ourlens[i]; j++) {
489: if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++;
490: jj++;
491: }
492: }
494: /* create our matrix */
495: for (i=0; i<m; i++) {
496: ourlens[i] -= offlens[i];
497: }
498: MatCreate(comm,m,n,M,N,newmat);
499: MatSetType(*newmat,type);
500: MatSetFromOptions(*newmat);
501: A = *newmat;
502: MatSetOption(A,MAT_COLUMNS_SORTED);
503: for (i=0; i<m; i++) {
504: ourlens[i] += offlens[i];
505: }
507: if (!rank) {
508: PetscMalloc(maxnz*sizeof(PetscScalar),&vals);
510: /* read in my part of the matrix numerical values */
511: nz = procsnz[0];
512: PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
513:
514: /* insert into matrix */
515: jj = rstart;
516: smycols = mycols;
517: svals = vals;
518: for (i=0; i<m; i++) {
519: MatSetValues(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);
520: smycols += ourlens[i];
521: svals += ourlens[i];
522: jj++;
523: }
525: /* read in other processors and ship out */
526: for (i=1; i<size; i++) {
527: nz = procsnz[i];
528: PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);
529: MPI_Send(vals,nz,MPIU_SCALAR,i,A->tag,comm);
530: }
531: PetscFree(procsnz);
532: } else {
533: /* receive numeric values */
534: PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);
536: /* receive message of values*/
537: MPI_Recv(vals,nz,MPIU_SCALAR,0,A->tag,comm,&status);
538: MPI_Get_count(&status,MPIU_SCALAR,&maxnz);
539: if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file");
541: /* insert into matrix */
542: jj = rstart;
543: smycols = mycols;
544: svals = vals;
545: for (i=0; i<m; i++) {
546: ierr = MatSetValues(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);
547: smycols += ourlens[i];
548: svals += ourlens[i];
549: jj++;
550: }
551: }
552: PetscFree(ourlens);
553: PetscFree(vals);
554: PetscFree(mycols);
555: PetscFree(rowners);
557: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
558: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
559: return(0);
560: }
561: EXTERN_C_END
564: EXTERN_C_BEGIN
565: int MatCreate_PetscESI(Mat V)
566: {
567: int ierr;
568: Mat v;
569: esi::petsc::Matrix<double,int> *ve;
572: V->ops->destroy = 0; /* since this is called from MatSetType() we have to make sure it doesn't get destroyed twice */
573: MatSetType(V,MATESI);
574: MatCreate(V->comm,V->m,V->n,V->M,V->N,&v);
575: PetscObjectSetOptionsPrefix((PetscObject)v,"esi_");
576: MatSetFromOptions(v);
577: ve = new esi::petsc::Matrix<double,int>(v);
578: MatESISetOperator(V,ve);
579: ve->deleteReference();
580: PetscObjectDereference((PetscObject)v);
581: return(0);
582: }
583: EXTERN_C_END