Actual source code: mmsbaij.c
1: #define PETSCMAT_DLL
3: /*
4: Support for the parallel SBAIJ matrix vector multiply
5: */
6: #include src/mat/impls/sbaij/mpi/mpisbaij.h
8: EXTERN PetscErrorCode MatSetValues_SeqSBAIJ(Mat,PetscInt,const PetscInt [],PetscInt,const PetscInt [],const PetscScalar [],InsertMode);
13: PetscErrorCode MatSetUpMultiply_MPISBAIJ(Mat mat)
14: {
15: Mat_MPISBAIJ *sbaij = (Mat_MPISBAIJ*)mat->data;
16: Mat_SeqBAIJ *B = (Mat_SeqBAIJ*)(sbaij->B->data);
18: PetscInt Nbs = sbaij->Nbs,i,j,*indices,*aj = B->j,ec = 0,*garray,*sgarray;
19: PetscInt bs = mat->bs,*stmp,mbs=sbaij->mbs, vec_size,nt;
20: IS from,to;
21: Vec gvec;
22: PetscMPIInt rank=sbaij->rank,lsize,size=sbaij->size;
23: PetscInt *owners=sbaij->rowners,*sowners,*ec_owner,k;
24: PetscMap vecmap;
25: PetscScalar *ptr;
28: if (sbaij->lvec) {
29: VecDestroy(sbaij->lvec);
30: sbaij->lvec = 0;
31: }
32: if (sbaij->Mvctx) {
33: VecScatterDestroy(sbaij->Mvctx);
34: sbaij->Mvctx = 0;
35: }
37: /* For the first stab we make an array as long as the number of columns */
38: /* mark those columns that are in sbaij->B */
39: PetscMalloc((Nbs+1)*sizeof(PetscInt),&indices);
40: PetscMemzero(indices,Nbs*sizeof(PetscInt));
41: for (i=0; i<mbs; i++) {
42: for (j=0; j<B->ilen[i]; j++) {
43: if (!indices[aj[B->i[i] + j]]) ec++;
44: indices[aj[B->i[i] + j] ] = 1;
45: }
46: }
48: /* form arrays of columns we need */
49: PetscMalloc((ec+1)*sizeof(PetscInt),&garray);
50: PetscMalloc((3*ec+1)*sizeof(PetscInt),&sgarray);
51: ec_owner = sgarray + 2*ec;
52:
53: ec = 0;
54: for (j=0; j<size; j++){
55: for (i=owners[j]; i<owners[j+1]; i++){
56: if (indices[i]) {
57: garray[ec] = i;
58: ec_owner[ec] = j;
59: ec++;
60: }
61: }
62: }
64: /* make indices now point into garray */
65: for (i=0; i<ec; i++) indices[garray[i]] = i;
67: /* compact out the extra columns in B */
68: for (i=0; i<mbs; i++) {
69: for (j=0; j<B->ilen[i]; j++) aj[B->i[i] + j] = indices[aj[B->i[i] + j]];
70: }
71: B->nbs = ec;
72: sbaij->B->n = ec*mat->bs;
73: PetscFree(indices);
75: /* create local vector that is used to scatter into */
76: VecCreateSeq(PETSC_COMM_SELF,ec*bs,&sbaij->lvec);
78: /* create two temporary index sets for building scatter-gather */
79: PetscMalloc((2*ec+1)*sizeof(PetscInt),&stmp);
80: for (i=0; i<ec; i++) stmp[i] = bs*garray[i];
81: ISCreateBlock(PETSC_COMM_SELF,bs,ec,stmp,&from);
82:
83: for (i=0; i<ec; i++) { stmp[i] = bs*i; }
84: ISCreateBlock(PETSC_COMM_SELF,bs,ec,stmp,&to);
86: /* generate the scatter context
87: -- Mvctx and lvec are not used by MatMult_MPISBAIJ(), but usefule for some applications */
88: VecCreateMPI(mat->comm,mat->n,mat->N,&gvec);
89: VecScatterCreate(gvec,from,sbaij->lvec,to,&sbaij->Mvctx);
90: VecScatterPostRecvs(gvec,sbaij->lvec,INSERT_VALUES,SCATTER_FORWARD,sbaij->Mvctx);
92: sbaij->garray = garray;
93: PetscLogObjectParent(mat,sbaij->Mvctx);
94: PetscLogObjectParent(mat,sbaij->lvec);
95: PetscLogObjectParent(mat,from);
96: PetscLogObjectParent(mat,to);
98: ISDestroy(from);
99: ISDestroy(to);
101: /* create parallel vector that is used by SBAIJ matrix to scatter from/into */
102: lsize = (mbs + ec)*bs;
103: VecCreateMPI(mat->comm,lsize,PETSC_DETERMINE,&sbaij->slvec0);
104: VecDuplicate(sbaij->slvec0,&sbaij->slvec1);
105: VecGetSize(sbaij->slvec0,&vec_size);
107: VecGetPetscMap(sbaij->slvec0,&vecmap);
108: PetscMapGetGlobalRange(vecmap,&sowners);
109:
110: /* x index in the IS sfrom */
111: for (i=0; i<ec; i++) {
112: j = ec_owner[i];
113: sgarray[i] = garray[i] + (sowners[j]/bs - owners[j]);
114: }
115: /* b index in the IS sfrom */
116: k = sowners[rank]/bs + mbs;
117: for (i=ec,j=0; i< 2*ec; i++,j++) sgarray[i] = k + j;
118:
119: for (i=0; i<2*ec; i++) stmp[i] = bs*sgarray[i];
120: ISCreateBlock(PETSC_COMM_SELF,bs,2*ec,stmp,&from);
121:
122: /* x index in the IS sto */
123: k = sowners[rank]/bs + mbs;
124: for (i=0; i<ec; i++) stmp[i] = bs*(k + i);
125: /* b index in the IS sto */
126: for (i=ec; i<2*ec; i++) stmp[i] = bs*sgarray[i-ec];
128: ISCreateBlock(PETSC_COMM_SELF,bs,2*ec,stmp,&to);
130: /* gnerate the SBAIJ scatter context */
131: VecScatterCreate(sbaij->slvec0,from,sbaij->slvec1,to,&sbaij->sMvctx);
132:
133: /*
134: Post the receives for the first matrix vector product. We sync-chronize after
135: this on the chance that the user immediately calls MatMult() after assemblying
136: the matrix.
137: */
138: VecScatterPostRecvs(sbaij->slvec0,sbaij->slvec1,INSERT_VALUES,SCATTER_FORWARD,sbaij->sMvctx);
140: VecGetLocalSize(sbaij->slvec1,&nt);
141: VecGetArray(sbaij->slvec1,&ptr);
142: VecCreateSeqWithArray(PETSC_COMM_SELF,bs*mbs,ptr,&sbaij->slvec1a);
143: VecCreateSeqWithArray(PETSC_COMM_SELF,nt-bs*mbs,ptr+bs*mbs,&sbaij->slvec1b);
144: VecRestoreArray(sbaij->slvec1,&ptr);
146: VecGetArray(sbaij->slvec0,&ptr);
147: VecCreateSeqWithArray(PETSC_COMM_SELF,nt-bs*mbs,ptr+bs*mbs,&sbaij->slvec0b);
148: VecRestoreArray(sbaij->slvec0,&ptr);
150: PetscFree(stmp);
151: MPI_Barrier(mat->comm);
152:
153: PetscLogObjectParent(mat,sbaij->sMvctx);
154: PetscLogObjectParent(mat,sbaij->slvec0);
155: PetscLogObjectParent(mat,sbaij->slvec1);
156: PetscLogObjectParent(mat,sbaij->slvec0b);
157: PetscLogObjectParent(mat,sbaij->slvec1a);
158: PetscLogObjectParent(mat,sbaij->slvec1b);
159: PetscLogObjectParent(mat,from);
160: PetscLogObjectParent(mat,to);
161:
162: PetscLogObjectMemory(mat,(ec+1)*sizeof(PetscInt));
163: ISDestroy(from);
164: ISDestroy(to);
165: VecDestroy(gvec);
166: PetscFree(sgarray);
168: return(0);
169: }
173: PetscErrorCode MatSetUpMultiply_MPISBAIJ_2comm(Mat mat)
174: {
175: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data;
176: Mat_SeqBAIJ *B = (Mat_SeqBAIJ*)(baij->B->data);
177: PetscErrorCode ierr;
178: PetscInt i,j,*aj = B->j,ec = 0,*garray;
179: PetscInt bs = mat->bs,*stmp;
180: IS from,to;
181: Vec gvec;
182: #if defined (PETSC_USE_CTABLE)
183: PetscTable gid1_lid1;
184: PetscTablePosition tpos;
185: PetscInt gid,lid;
186: #else
187: PetscInt Nbs = baij->Nbs,*indices;
188: #endif
192: #if defined (PETSC_USE_CTABLE)
193: /* use a table - Mark Adams */
194: PetscTableCreate(B->mbs,&gid1_lid1);
195: for (i=0; i<B->mbs; i++) {
196: for (j=0; j<B->ilen[i]; j++) {
197: PetscInt data,gid1 = aj[B->i[i]+j] + 1;
198: PetscTableFind(gid1_lid1,gid1,&data);
199: if (!data) {
200: /* one based table */
201: PetscTableAdd(gid1_lid1,gid1,++ec);
202: }
203: }
204: }
205: /* form array of columns we need */
206: PetscMalloc((ec+1)*sizeof(PetscInt),&garray);
207: PetscTableGetHeadPosition(gid1_lid1,&tpos);
208: while (tpos) {
209: PetscTableGetNext(gid1_lid1,&tpos,&gid,&lid);
210: gid--; lid--;
211: garray[lid] = gid;
212: }
213: PetscSortInt(ec,garray);
214: PetscTableRemoveAll(gid1_lid1);
215: for (i=0; i<ec; i++) {
216: PetscTableAdd(gid1_lid1,garray[i]+1,i+1);
217: }
218: /* compact out the extra columns in B */
219: for (i=0; i<B->mbs; i++) {
220: for (j=0; j<B->ilen[i]; j++) {
221: PetscInt gid1 = aj[B->i[i] + j] + 1;
222: PetscTableFind(gid1_lid1,gid1,&lid);
223: lid --;
224: aj[B->i[i]+j] = lid;
225: }
226: }
227: B->nbs = ec;
228: baij->B->n = ec*mat->bs;
229: PetscTableDelete(gid1_lid1);
230: /* Mark Adams */
231: #else
232: /* For the first stab we make an array as long as the number of columns */
233: /* mark those columns that are in baij->B */
234: PetscMalloc((Nbs+1)*sizeof(PetscInt),&indices);
235: PetscMemzero(indices,Nbs*sizeof(PetscInt));
236: for (i=0; i<B->mbs; i++) {
237: for (j=0; j<B->ilen[i]; j++) {
238: if (!indices[aj[B->i[i] + j]]) ec++;
239: indices[aj[B->i[i] + j] ] = 1;
240: }
241: }
243: /* form array of columns we need */
244: PetscMalloc((ec+1)*sizeof(PetscInt),&garray);
245: ec = 0;
246: for (i=0; i<Nbs; i++) {
247: if (indices[i]) {
248: garray[ec++] = i;
249: }
250: }
252: /* make indices now point into garray */
253: for (i=0; i<ec; i++) {
254: indices[garray[i]] = i;
255: }
257: /* compact out the extra columns in B */
258: for (i=0; i<B->mbs; i++) {
259: for (j=0; j<B->ilen[i]; j++) {
260: aj[B->i[i] + j] = indices[aj[B->i[i] + j]];
261: }
262: }
263: B->nbs = ec;
264: baij->B->n = ec*mat->bs;
265: PetscFree(indices);
266: #endif
267:
268: /* create local vector that is used to scatter into */
269: VecCreateSeq(PETSC_COMM_SELF,ec*bs,&baij->lvec);
271: /* create two temporary index sets for building scatter-gather */
272: for (i=0; i<ec; i++) {
273: garray[i] = bs*garray[i];
274: }
275: ISCreateBlock(PETSC_COMM_SELF,bs,ec,garray,&from);
276: for (i=0; i<ec; i++) {
277: garray[i] = garray[i]/bs;
278: }
280: PetscMalloc((ec+1)*sizeof(PetscInt),&stmp);
281: for (i=0; i<ec; i++) { stmp[i] = bs*i; }
282: ISCreateBlock(PETSC_COMM_SELF,bs,ec,stmp,&to);
283: PetscFree(stmp);
285: /* create temporary global vector to generate scatter context */
286: /* this is inefficient, but otherwise we must do either
287: 1) save garray until the first actual scatter when the vector is known or
288: 2) have another way of generating a scatter context without a vector.*/
289: VecCreateMPI(mat->comm,mat->n,mat->N,&gvec);
291: /* gnerate the scatter context */
292: VecScatterCreate(gvec,from,baij->lvec,to,&baij->Mvctx);
294: /*
295: Post the receives for the first matrix vector product. We sync-chronize after
296: this on the chance that the user immediately calls MatMult() after assemblying
297: the matrix.
298: */
299: VecScatterPostRecvs(gvec,baij->lvec,INSERT_VALUES,SCATTER_FORWARD,baij->Mvctx);
300: MPI_Barrier(mat->comm);
302: PetscLogObjectParent(mat,baij->Mvctx);
303: PetscLogObjectParent(mat,baij->lvec);
304: PetscLogObjectParent(mat,from);
305: PetscLogObjectParent(mat,to);
306: baij->garray = garray;
307: PetscLogObjectMemory(mat,(ec+1)*sizeof(PetscInt));
308: ISDestroy(from);
309: ISDestroy(to);
310: VecDestroy(gvec);
311: return(0);
312: }
315: /*
316: Takes the local part of an already assembled MPIBAIJ matrix
317: and disassembles it. This is to allow new nonzeros into the matrix
318: that require more communication in the matrix vector multiply.
319: Thus certain data-structures must be rebuilt.
321: Kind of slow! But that's what application programmers get when
322: they are sloppy.
323: */
326: PetscErrorCode DisAssemble_MPISBAIJ(Mat A)
327: {
328: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)A->data;
329: Mat B = baij->B,Bnew;
330: Mat_SeqBAIJ *Bbaij = (Mat_SeqBAIJ*)B->data;
332: PetscInt i,j,mbs=Bbaij->mbs,n = A->N,col,*garray=baij->garray;
333: PetscInt k,bs=A->bs,bs2=baij->bs2,*rvals,*nz,ec,m=A->m;
334: MatScalar *a = Bbaij->a;
335: PetscScalar *atmp;
336: #if defined(PETSC_USE_MAT_SINGLE)
337: PetscInt l;
338: #endif
341: #if defined(PETSC_USE_MAT_SINGLE)
342: PetscMalloc(A->bs*sizeof(PetscScalar),&atmp);
343: #endif
344: /* free stuff related to matrix-vec multiply */
345: VecGetSize(baij->lvec,&ec); /* needed for PetscLogObjectMemory below */
346: VecDestroy(baij->lvec); baij->lvec = 0;
347: VecScatterDestroy(baij->Mvctx); baij->Mvctx = 0;
348: if (baij->colmap) {
349: #if defined (PETSC_USE_CTABLE)
350: PetscTableDelete(baij->colmap); baij->colmap = 0;
351: #else
352: PetscFree(baij->colmap);
353: baij->colmap = 0;
354: PetscLogObjectMemory(A,-Bbaij->nbs*sizeof(PetscInt));
355: #endif
356: }
358: /* make sure that B is assembled so we can access its values */
359: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
360: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
362: /* invent new B and copy stuff over */
363: PetscMalloc(mbs*sizeof(PetscInt),&nz);
364: for (i=0; i<mbs; i++) {
365: nz[i] = Bbaij->i[i+1]-Bbaij->i[i];
366: }
367: MatCreate(PETSC_COMM_SELF,&Bnew);
368: MatSetSizes(Bnew,m,n,m,n);
369: MatSetType(Bnew,B->type_name);
370: MatSeqBAIJSetPreallocation(Bnew,B->bs,0,nz);
371: PetscFree(nz);
372:
373: PetscMalloc(bs*sizeof(PetscInt),&rvals);
374: for (i=0; i<mbs; i++) {
375: rvals[0] = bs*i;
376: for (j=1; j<bs; j++) { rvals[j] = rvals[j-1] + 1; }
377: for (j=Bbaij->i[i]; j<Bbaij->i[i+1]; j++) {
378: col = garray[Bbaij->j[j]]*bs;
379: for (k=0; k<bs; k++) {
380: #if defined(PETSC_USE_MAT_SINGLE)
381: for (l=0; l<bs; l++) atmp[l] = a[j*bs2+l];
382: #else
383: atmp = a+j*bs2 + k*bs;
384: #endif
385: MatSetValues_SeqSBAIJ(Bnew,bs,rvals,1,&col,atmp,B->insertmode);
386: col++;
387: }
388: }
389: }
390: #if defined(PETSC_USE_MAT_SINGLE)
391: PetscFree(atmp);
392: #endif
393: PetscFree(baij->garray);
394: baij->garray = 0;
395: PetscFree(rvals);
396: PetscLogObjectMemory(A,-ec*sizeof(PetscInt));
397: MatDestroy(B);
398: PetscLogObjectParent(A,Bnew);
399: baij->B = Bnew;
400: A->was_assembled = PETSC_FALSE;
401: return(0);
402: }