Actual source code: pdvec.c

  1: #define PETSCVEC_DLL
  2: /*
  3:      Code for some of the parallel vector primatives.
  4: */
 5:  #include src/vec/impls/mpi/pvecimpl.h
  6: #if defined(PETSC_HAVE_PNETCDF)
  8: #include "pnetcdf.h"
 10: #endif

 14: PetscErrorCode VecDestroy_MPI(Vec v)
 15: {
 16:   Vec_MPI        *x = (Vec_MPI*)v->data;

 20: #if defined(PETSC_USE_LOG)
 21:   PetscLogObjectState((PetscObject)v,"Length=%D",v->N);
 22: #endif  
 23:   if (x->array_allocated) {PetscFree(x->array_allocated);}

 25:   /* Destroy local representation of vector if it exists */
 26:   if (x->localrep) {
 27:     VecDestroy(x->localrep);
 28:     if (x->localupdate) {VecScatterDestroy(x->localupdate);}
 29:   }
 30:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 31:   VecStashDestroy_Private(&v->bstash);
 32:   VecStashDestroy_Private(&v->stash);
 33:   PetscFree(x);
 34:   return(0);
 35: }

 39: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
 40: {
 41:   PetscErrorCode    ierr;
 42:   PetscInt          i,work = xin->n,cnt,len;
 43:   PetscMPIInt       j,n,size,rank,tag = ((PetscObject)viewer)->tag;
 44:   MPI_Status        status;
 45:   PetscScalar       *values,*xarray;
 46:   const char        *name;
 47:   PetscViewerFormat format;

 50:   VecGetArray(xin,&xarray);
 51:   /* determine maximum message to arrive */
 52:   MPI_Comm_rank(xin->comm,&rank);
 53:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,xin->comm);
 54:   MPI_Comm_size(xin->comm,&size);

 56:   if (!rank) {
 57:     PetscMalloc((len+1)*sizeof(PetscScalar),&values);
 58:     PetscViewerGetFormat(viewer,&format);
 59:     /*
 60:         Matlab format and ASCII format are very similar except 
 61:         Matlab uses %18.16e format while ASCII uses %g
 62:     */
 63:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 64:       PetscObjectGetName((PetscObject)xin,&name);
 65:       PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
 66:       for (i=0; i<xin->n; i++) {
 67: #if defined(PETSC_USE_COMPLEX)
 68:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 69:           PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
 70:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 71:           PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
 72:         } else {
 73:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(xarray[i]));
 74:         }
 75: #else
 76:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
 77: #endif
 78:       }
 79:       /* receive and print messages */
 80:       for (j=1; j<size; j++) {
 81:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
 82:         MPI_Get_count(&status,MPIU_SCALAR,&n);
 83:         for (i=0; i<n; i++) {
 84: #if defined(PETSC_USE_COMPLEX)
 85:           if (PetscImaginaryPart(values[i]) > 0.0) {
 86:             PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
 87:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
 88:             PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
 89:           } else {
 90:             PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(values[i]));
 91:           }
 92: #else
 93:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
 94: #endif
 95:         }
 96:       }
 97:       PetscViewerASCIIPrintf(viewer,"];\n");

 99:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
100:       for (i=0; i<xin->n; i++) {
101: #if defined(PETSC_USE_COMPLEX)
102:         PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
103: #else
104:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
105: #endif
106:       }
107:       /* receive and print messages */
108:       for (j=1; j<size; j++) {
109:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
110:         MPI_Get_count(&status,MPIU_SCALAR,&n);
111:         for (i=0; i<n; i++) {
112: #if defined(PETSC_USE_COMPLEX)
113:           PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
114: #else
115:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
116: #endif
117:         }
118:       }

120:     } else {
121:       if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
122:       cnt = 0;
123:       for (i=0; i<xin->n; i++) {
124:         if (format == PETSC_VIEWER_ASCII_INDEX) {
125:           PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
126:         }
127: #if defined(PETSC_USE_COMPLEX)
128:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
129:           PetscViewerASCIIPrintf(viewer,"%g + %g i\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
130:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
131:           PetscViewerASCIIPrintf(viewer,"%g - %g i\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
132:         } else {
133:           PetscViewerASCIIPrintf(viewer,"%g\n",PetscRealPart(xarray[i]));
134:         }
135: #else
136:         PetscViewerASCIIPrintf(viewer,"%g\n",xarray[i]);
137: #endif
138:       }
139:       /* receive and print messages */
140:       for (j=1; j<size; j++) {
141:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
142:         MPI_Get_count(&status,MPIU_SCALAR,&n);
143:         if (format != PETSC_VIEWER_ASCII_COMMON) {
144:           PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
145:         }
146:         for (i=0; i<n; i++) {
147:           if (format == PETSC_VIEWER_ASCII_INDEX) {
148:             PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
149:           }
150: #if defined(PETSC_USE_COMPLEX)
151:           if (PetscImaginaryPart(values[i]) > 0.0) {
152:             PetscViewerASCIIPrintf(viewer,"%g + %g i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
153:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
154:             PetscViewerASCIIPrintf(viewer,"%g - %g i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
155:           } else {
156:             PetscViewerASCIIPrintf(viewer,"%g\n",PetscRealPart(values[i]));
157:           }
158: #else
159:           PetscViewerASCIIPrintf(viewer,"%g\n",values[i]);
160: #endif
161:         }
162:       }
163:     }
164:     PetscFree(values);
165:   } else {
166:     /* send values */
167:     MPI_Send(xarray,xin->n,MPIU_SCALAR,0,tag,xin->comm);
168:   }
169:   PetscViewerFlush(viewer);
170:   VecRestoreArray(xin,&xarray);
171:   return(0);
172: }

176: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
177: {
179:   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag,n;
180:   PetscInt       len,work = xin->n,j;
181:   int            fdes;
182:   MPI_Status     status;
183:   PetscScalar    *values,*xarray;
184:   FILE           *file;

187:   VecGetArray(xin,&xarray);
188:   PetscViewerBinaryGetDescriptor(viewer,&fdes);

190:   /* determine maximum message to arrive */
191:   MPI_Comm_rank(xin->comm,&rank);
192:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,xin->comm);
193:   MPI_Comm_size(xin->comm,&size);

195:   if (!rank) {
196:     PetscInt cookie = VEC_FILE_COOKIE;
197:     PetscBinaryWrite(fdes,&cookie,1,PETSC_INT,PETSC_FALSE);
198:     PetscBinaryWrite(fdes,&xin->N,1,PETSC_INT,PETSC_FALSE);
199:     PetscBinaryWrite(fdes,xarray,xin->n,PETSC_SCALAR,PETSC_FALSE);

201:     PetscMalloc((len+1)*sizeof(PetscScalar),&values);
202:     /* receive and print messages */
203:     for (j=1; j<size; j++) {
204:       MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,xin->comm,&status);
205:       MPI_Get_count(&status,MPIU_SCALAR,&n);
206:       PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_FALSE);
207:     }
208:     PetscFree(values);
209:     PetscViewerBinaryGetInfoPointer(viewer,&file);
210:     if (file && xin->bs > 1) {
211:       if (xin->prefix) {
212:         PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",xin->prefix,xin->bs);
213:       } else {
214:         PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",xin->bs);
215:       }
216:     }
217:   } else {
218:     /* send values */
219:     MPI_Send(xarray,xin->n,MPIU_SCALAR,0,tag,xin->comm);
220:   }
221:   VecRestoreArray(xin,&xarray);
222:   return(0);
223: }

227: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
228: {
229:   PetscDraw      draw;
230:   PetscTruth     isnull;

233: #if defined(PETSC_USE_64BIT_INT)
235:   PetscViewerDrawGetDraw(viewer,0,&draw);
236:   PetscDrawIsNull(draw,&isnull);
237:   if (isnull) return(0);
238:   SETERRQ(PETSC_ERR_SUP,"Not supported with 64 bit integers");
239: #else
240:   PetscMPIInt    size,rank;
241:   int            i,N = xin->N,*lens;
242:   PetscReal      *xx,*yy;
243:   PetscDrawLG    lg;
244:   PetscScalar    *xarray;

247:   PetscViewerDrawGetDraw(viewer,0,&draw);
248:   PetscDrawIsNull(draw,&isnull);
249:   if (isnull) return(0);

251:   VecGetArray(xin,&xarray);
252:   PetscViewerDrawGetDrawLG(viewer,0,&lg);
253:   PetscDrawCheckResizedWindow(draw);
254:   MPI_Comm_rank(xin->comm,&rank);
255:   MPI_Comm_size(xin->comm,&size);
256:   if (!rank) {
257:     PetscDrawLGReset(lg);
258:     PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
259:     for (i=0; i<N; i++) {xx[i] = (PetscReal) i;}
260:     yy   = xx + N;
261:     PetscMalloc(size*sizeof(int),&lens);
262:     for (i=0; i<size; i++) {
263:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
264:     }
265: #if !defined(PETSC_USE_COMPLEX)
266:     MPI_Gatherv(xarray,xin->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,xin->comm);
267: #else
268:     {
269:       PetscReal *xr;
270:       PetscMalloc((xin->n+1)*sizeof(PetscReal),&xr);
271:       for (i=0; i<xin->n; i++) {
272:         xr[i] = PetscRealPart(xarray[i]);
273:       }
274:       MPI_Gatherv(xr,xin->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,xin->comm);
275:       PetscFree(xr);
276:     }
277: #endif
278:     PetscFree(lens);
279:     PetscDrawLGAddPoints(lg,N,&xx,&yy);
280:     PetscFree(xx);
281:   } else {
282: #if !defined(PETSC_USE_COMPLEX)
283:     MPI_Gatherv(xarray,xin->n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
284: #else
285:     {
286:       PetscReal *xr;
287:       PetscMalloc((xin->n+1)*sizeof(PetscReal),&xr);
288:       for (i=0; i<xin->n; i++) {
289:         xr[i] = PetscRealPart(xarray[i]);
290:       }
291:       MPI_Gatherv(xr,xin->n,MPIU_REAL,0,0,0,MPIU_REAL,0,xin->comm);
292:       PetscFree(xr);
293:     }
294: #endif
295:   }
296:   PetscDrawLGDraw(lg);
297:   PetscDrawSynchronizedFlush(draw);
298:   VecRestoreArray(xin,&xarray);
299: #endif
300:   return(0);
301: }

304: /* I am assuming this is Extern 'C' because it is dynamically loaded.  If not, we can remove the DLLEXPORT tag */
307: PetscErrorCode PETSCVEC_DLLEXPORT VecView_MPI_Draw(Vec xin,PetscViewer viewer)
308: {
310:   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
311:   PetscInt       i,start,end;
312:   MPI_Status     status;
313:   PetscReal      coors[4],ymin,ymax,xmin,xmax,tmp;
314:   PetscDraw      draw;
315:   PetscTruth     isnull;
316:   PetscDrawAxis  axis;
317:   PetscScalar    *xarray;

320:   PetscViewerDrawGetDraw(viewer,0,&draw);
321:   PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

323:   VecGetArray(xin,&xarray);
324:   PetscDrawCheckResizedWindow(draw);
325:   xmin = 1.e20; xmax = -1.e20;
326:   for (i=0; i<xin->n; i++) {
327: #if defined(PETSC_USE_COMPLEX)
328:     if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
329:     if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
330: #else
331:     if (xarray[i] < xmin) xmin = xarray[i];
332:     if (xarray[i] > xmax) xmax = xarray[i];
333: #endif
334:   }
335:   if (xmin + 1.e-10 > xmax) {
336:     xmin -= 1.e-5;
337:     xmax += 1.e-5;
338:   }
339:   MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPI_MIN,0,xin->comm);
340:   MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPI_MAX,0,xin->comm);
341:   MPI_Comm_size(xin->comm,&size);
342:   MPI_Comm_rank(xin->comm,&rank);
343:   PetscDrawAxisCreate(draw,&axis);
344:   PetscLogObjectParent(draw,axis);
345:   if (!rank) {
346:     PetscDrawClear(draw);
347:     PetscDrawFlush(draw);
348:     PetscDrawAxisSetLimits(axis,0.0,(double)xin->N,ymin,ymax);
349:     PetscDrawAxisDraw(axis);
350:     PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
351:   }
352:   PetscDrawAxisDestroy(axis);
353:   MPI_Bcast(coors,4,MPIU_REAL,0,xin->comm);
354:   if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
355:   /* draw local part of vector */
356:   VecGetOwnershipRange(xin,&start,&end);
357:   if (rank < size-1) { /*send value to right */
358:     MPI_Send(&xarray[xin->n-1],1,MPIU_REAL,rank+1,tag,xin->comm);
359:   }
360:   for (i=1; i<xin->n; i++) {
361: #if !defined(PETSC_USE_COMPLEX)
362:     PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),
363:                    xarray[i],PETSC_DRAW_RED);
364: #else
365:     PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),
366:                    PetscRealPart(xarray[i]),PETSC_DRAW_RED);
367: #endif
368:   }
369:   if (rank) { /* receive value from right */
370:     MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,xin->comm,&status);
371: #if !defined(PETSC_USE_COMPLEX)
372:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
373: #else
374:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
375: #endif
376:   }
377:   PetscDrawSynchronizedFlush(draw);
378:   PetscDrawPause(draw);
379:   VecRestoreArray(xin,&xarray);
380:   return(0);
381: }

384: #if defined(PETSC_USE_SOCKET_VIEWER)
387: PetscErrorCode VecView_MPI_Socket(Vec xin,PetscViewer viewer)
388: {
389: #if defined(PETSC_USE_64BIT_INT)
391:   SETERRQ(PETSC_ERR_SUP,"Not supported with 64 bit integers");
392: #else
394:   PetscMPIInt    rank,size;
395:   int            i,N = xin->N,*lens;
396:   PetscScalar    *xx,*xarray;

399:   VecGetArray(xin,&xarray);
400:   MPI_Comm_rank(xin->comm,&rank);
401:   MPI_Comm_size(xin->comm,&size);
402:   if (!rank) {
403:     PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
404:     PetscMalloc(size*sizeof(int),&lens);
405:     for (i=0; i<size; i++) {
406:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
407:     }
408:     MPI_Gatherv(xarray,xin->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,xin->comm);
409:     PetscFree(lens);
410:     PetscViewerSocketPutScalar(viewer,N,1,xx);
411:     PetscFree(xx);
412:   } else {
413:     MPI_Gatherv(xarray,xin->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,xin->comm);
414:   }
415:   VecRestoreArray(xin,&xarray);
416: #endif
417:   return(0);
418: }
419: #endif

421: #if defined(PETSC_HAVE_MATLAB)
424: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
425: {
427:   PetscMPIInt    rank,size,*lens;
428:   PetscInt       i,N = xin->N;
429:   PetscScalar    *xx,*xarray;

432:   VecGetArray(xin,&xarray);
433:   MPI_Comm_rank(xin->comm,&rank);
434:   MPI_Comm_size(xin->comm,&size);
435:   if (!rank) {
436:     PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
437:     PetscMalloc(size*sizeof(PetscMPIInt),&lens);
438:     for (i=0; i<size; i++) {
439:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
440:     }
441:     MPI_Gatherv(xarray,xin->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,xin->comm);
442:     PetscFree(lens);

444:     PetscObjectName((PetscObject)xin);
445:     PetscViewerMatlabPutArray(viewer,N,1,xx,xin->name);

447:     PetscFree(xx);
448:   } else {
449:     MPI_Gatherv(xarray,xin->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,xin->comm);
450:   }
451:   VecRestoreArray(xin,&xarray);
452:   return(0);
453: }
454: #endif

456: #if defined(PETSC_HAVE_PNETCDF)
459: PetscErrorCode VecView_MPI_Netcdf(Vec xin,PetscViewer v)
460: {
462:   int         n = xin->n,ncid,xdim,xdim_num=1,xin_id,xstart;
463:   MPI_Comm    comm = xin->comm;
464:   PetscScalar *xarray;

467:   VecGetArray(xin,&xarray);
468:   PetscViewerNetcdfGetID(v,&ncid);
469:   if (ncid < 0) SETERRQ(PETSC_ERR_ORDER,"First call PetscViewerNetcdfOpen to create NetCDF dataset");
470:   /* define dimensions */
471:   ncmpi_def_dim(ncid,"PETSc_Vector_Global_Size",xin->N,&xdim);
472:   /* define variables */
473:   ncmpi_def_var(ncid,"PETSc_Vector_MPI",NC_DOUBLE,xdim_num,&xdim,&xin_id);
474:   /* leave define mode */
475:   ncmpi_enddef(ncid);
476:   /* store the vector */
477:   VecGetOwnershipRange(xin,&xstart,PETSC_NULL);
478:   ncmpi_put_vara_double_all(ncid,xin_id,(const size_t*)&xstart,(const size_t*)&n,xarray);
479:   VecRestoreArray(xin,&xarray);
480:   return(0);
481: }
482: #endif

484: #if defined(PETSC_HAVE_HDF4)
487: PetscErrorCode VecView_MPI_HDF4_Ex(Vec X, PetscViewer viewer, int d, int *dims)
488: {
490:   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
491:   int            len, i, j, k, cur, bs, n, N;
492:   MPI_Status     status;
493:   PetscScalar    *x;
494:   float          *xlf, *xf;


498:   bs = X->bs > 0 ? X->bs : 1;
499:   N  = X->N / bs;
500:   n  = X->n / bs;

502:   // For now, always convert to float
503:   PetscMalloc(N * sizeof(float), &xf);
504:   PetscMalloc(n * sizeof(float), &xlf);

506:   MPI_Comm_rank(X->comm, &rank);
507:   MPI_Comm_size(X->comm, &size);

509:   VecGetArray(X, &x);

511:   for (k = 0; k < bs; k++) {
512:     for (i = 0; i < n; i++) {
513:       xlf[i] = (float) x[i*bs + k];
514:     }
515:     if (!rank) {
516:       cur = 0;
517:       PetscMemcpy(xf + cur, xlf, n * sizeof(float));
518:       cur += n;
519:       for (j = 1; j < size; j++) {
520:         MPI_Recv(xf + cur, N - cur, MPI_FLOAT, j, tag, X->comm,&status);
521:         MPI_Get_count(&status, MPI_FLOAT, &len);cur += len;
522:       }
523:       if (cur != N) {
524:         SETERRQ2(PETSC_ERR_PLIB, "? %D %D", cur, N);
525:       }
526:       PetscViewerHDF4WriteSDS(viewer, xf, 2, dims, bs);
527:     } else {
528:       MPI_Send(xlf, n, MPI_FLOAT, 0, tag, X->comm);
529:     }
530:   }
531:   VecRestoreArray(X, &x);
532:   PetscFree(xlf);
533:   PetscFree(xf);
534:   return(0);
535: }
536: #endif

538: #if defined(PETSC_HAVE_HDF4)
541: PetscErrorCode VecView_MPI_HDF4(Vec xin,PetscViewer viewer)
542: {
544:   PetscErrorCode  bs, dims[1];

546:   bs = xin->bs > 0 ? xin->bs : 1;
547:   dims[0] = xin->N / bs;
548:   VecView_MPI_HDF4_Ex(xin, viewer, 1, dims);
549:   return(0);
550: }
551: #endif

555: PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
556: {
558:   PetscTruth     iascii,issocket,isbinary,isdraw;
559: #if defined(PETSC_HAVE_MATHEMATICA)
560:   PetscTruth     ismathematica;
561: #endif
562: #if defined(PETSC_HAVE_NETCDF)
563:   PetscTruth     isnetcdf;
564: #endif
565: #if defined(PETSC_HAVE_HDF4)
566:   PetscTruth     ishdf4;
567: #endif
568: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
569:   PetscTruth     ismatlab;
570: #endif

573:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
574:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);
575:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
576:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
577: #if defined(PETSC_HAVE_MATHEMATICA)
578:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATHEMATICA,&ismathematica);
579: #endif
580: #if defined(PETSC_HAVE_NETCDF)
581:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
582: #endif
583: #if defined(PETSC_HAVE_HDF4)
584:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF4,&ishdf4);
585: #endif
586: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
587:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATLAB,&ismatlab);
588: #endif
589:   if (iascii){
590:     VecView_MPI_ASCII(xin,viewer);
591: #if defined(PETSC_USE_SOCKET_VIEWER)
592:   } else if (issocket) {
593:     VecView_MPI_Socket(xin,viewer);
594: #endif
595:   } else if (isbinary) {
596:     VecView_MPI_Binary(xin,viewer);
597:   } else if (isdraw) {
598:     PetscViewerFormat format;

600:     PetscViewerGetFormat(viewer,&format);
601:     if (format == PETSC_VIEWER_DRAW_LG) {
602:       VecView_MPI_Draw_LG(xin,viewer);
603:     } else {
604:       VecView_MPI_Draw(xin,viewer);
605:     }
606: #if defined(PETSC_HAVE_MATHEMATICA)
607:   } else if (ismathematica) {
608:     PetscViewerMathematicaPutVector(viewer,xin);
609: #endif
610: #if defined(PETSC_HAVE_NETCDF)
611:   } else if (isnetcdf) {
612:     VecView_MPI_Netcdf(xin,viewer);
613: #endif
614: #if defined(PETSC_HAVE_HDF4)
615:   } else if (ishdf4) {
616:     VecView_MPI_HDF4(xin,viewer);
617: #endif
618: #if defined(PETSC_HAVE_MATLAB) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SINGLE) && !defined(PETSC_USE_MAT_SINGLE)
619:   } else if (ismatlab) {
620:     VecView_MPI_Matlab(xin,viewer);
621: #endif
622:   } else {
623:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this object",((PetscObject)viewer)->type_name);
624:   }
625:   return(0);
626: }

630: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
631: {
633:   *N = xin->N;
634:   return(0);
635: }

639: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
640: {
641:   Vec_MPI     *x = (Vec_MPI *)xin->data;
642:   PetscScalar *xx = x->array;
643:   PetscInt    i,tmp,start = xin->map->range[xin->stash.rank];

646:   for (i=0; i<ni; i++) {
647:     tmp = ix[i] - start;
648: #if defined(PETSC_USE_DEBUG)
649:     if (tmp < 0 || tmp >= xin->n) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
650: #endif
651:     y[i] = xx[tmp];
652:   }
653:   return(0);
654: }

658: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
659: {
661:   PetscMPIInt    rank = xin->stash.rank;
662:   PetscInt       *owners = xin->map->range,start = owners[rank];
663:   PetscInt       end = owners[rank+1],i,row;
664:   PetscScalar    *xx;

667: #if defined(PETSC_USE_DEBUG)
668:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
669:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
670:   } else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
671:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
672:   }
673: #endif
674:   VecGetArray(xin,&xx);
675:   xin->stash.insertmode = addv;

677:   if (addv == INSERT_VALUES) {
678:     for (i=0; i<ni; i++) {
679:       if ((row = ix[i]) >= start && row < end) {
680:         xx[row-start] = y[i];
681:       } else if (!xin->stash.donotstash) {
682:         if (ix[i] < 0) continue;
683: #if defined(PETSC_USE_DEBUG)
684:         if (ix[i] >= xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->N);
685: #endif
686:         VecStashValue_Private(&xin->stash,row,y[i]);
687:       }
688:     }
689:   } else {
690:     for (i=0; i<ni; i++) {
691:       if ((row = ix[i]) >= start && row < end) {
692:         xx[row-start] += y[i];
693:       } else if (!xin->stash.donotstash) {
694:         if (ix[i] < 0) continue;
695: #if defined(PETSC_USE_DEBUG)
696:         if (ix[i] > xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->N);
697: #endif        
698:         VecStashValue_Private(&xin->stash,row,y[i]);
699:       }
700:     }
701:   }
702:   VecRestoreArray(xin,&xx);
703:   return(0);
704: }

708: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
709: {
710:   PetscMPIInt    rank = xin->stash.rank;
711:   PetscInt       *owners = xin->map->range,start = owners[rank];
713:   PetscInt       end = owners[rank+1],i,row,bs = xin->bs,j;
714:   PetscScalar    *xx,*y = (PetscScalar*)yin;

717:   VecGetArray(xin,&xx);
718: #if defined(PETSC_USE_DEBUG)
719:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
720:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
721:   }
722:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
723:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
724:   }
725: #endif
726:   xin->stash.insertmode = addv;

728:   if (addv == INSERT_VALUES) {
729:     for (i=0; i<ni; i++) {
730:       if ((row = bs*ix[i]) >= start && row < end) {
731:         for (j=0; j<bs; j++) {
732:           xx[row-start+j] = y[j];
733:         }
734:       } else if (!xin->stash.donotstash) {
735:         if (ix[i] < 0) continue;
736: #if defined(PETSC_USE_DEBUG)
737:         if (ix[i] >= xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->N);
738: #endif
739:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
740:       }
741:       y += bs;
742:     }
743:   } else {
744:     for (i=0; i<ni; i++) {
745:       if ((row = bs*ix[i]) >= start && row < end) {
746:         for (j=0; j<bs; j++) {
747:           xx[row-start+j] += y[j];
748:         }
749:       } else if (!xin->stash.donotstash) {
750:         if (ix[i] < 0) continue;
751: #if defined(PETSC_USE_DEBUG)
752:         if (ix[i] > xin->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->N);
753: #endif
754:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
755:       }
756:       y += bs;
757:     }
758:   }
759:   VecRestoreArray(xin,&xx);
760:   return(0);
761: }

763: /*
764:    Since nsends or nreceives may be zero we add 1 in certain mallocs
765: to make sure we never malloc an empty one.      
766: */
769: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
770: {
772:   PetscInt       *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
773:   PetscMPIInt    size;
774:   InsertMode     addv;
775:   MPI_Comm       comm = xin->comm;

778:   if (xin->stash.donotstash) {
779:     return(0);
780:   }

782:   MPI_Allreduce(&xin->stash.insertmode,&addv,1,MPI_INT,MPI_BOR,comm);
783:   if (addv == (ADD_VALUES|INSERT_VALUES)) {
784:     SETERRQ(PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
785:   }
786:   xin->stash.insertmode = addv; /* in case this processor had no cache */
787: 
788:   bs = xin->bs;
789:   MPI_Comm_size(xin->comm,&size);
790:   if (!xin->bstash.bowners && xin->bs != -1) {
791:     PetscMalloc((size+1)*sizeof(PetscInt),&bowners);
792:     for (i=0; i<size+1; i++){ bowners[i] = owners[i]/bs;}
793:     xin->bstash.bowners = bowners;
794:   } else {
795:     bowners = xin->bstash.bowners;
796:   }
797:   VecStashScatterBegin_Private(&xin->stash,owners);
798:   VecStashScatterBegin_Private(&xin->bstash,bowners);
799:   VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
800:   PetscLogInfo((0,"VecAssemblyBegin_MPI:Stash has %D entries, uses %D mallocs.\n",nstash,reallocs));
801:   VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
802:   PetscLogInfo((0,"VecAssemblyBegin_MPI:Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs));

804:   return(0);
805: }

809: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
810: {
812:   PetscInt       base,i,j,*row,flg,bs;
813:   PetscMPIInt    n;
814:   PetscScalar    *val,*vv,*array,*xarray;

817:   if (!vec->stash.donotstash) {
818:     VecGetArray(vec,&xarray);
819:     base = vec->map->range[vec->stash.rank];
820:     bs   = vec->bs;

822:     /* Process the stash */
823:     while (1) {
824:       VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
825:       if (!flg) break;
826:       if (vec->stash.insertmode == ADD_VALUES) {
827:         for (i=0; i<n; i++) { xarray[row[i] - base] += val[i]; }
828:       } else if (vec->stash.insertmode == INSERT_VALUES) {
829:         for (i=0; i<n; i++) { xarray[row[i] - base] = val[i]; }
830:       } else {
831:         SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
832:       }
833:     }
834:     VecStashScatterEnd_Private(&vec->stash);

836:     /* now process the block-stash */
837:     while (1) {
838:       VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
839:       if (!flg) break;
840:       for (i=0; i<n; i++) {
841:         array = xarray+row[i]*bs-base;
842:         vv    = val+i*bs;
843:         if (vec->stash.insertmode == ADD_VALUES) {
844:           for (j=0; j<bs; j++) { array[j] += vv[j];}
845:         } else if (vec->stash.insertmode == INSERT_VALUES) {
846:           for (j=0; j<bs; j++) { array[j] = vv[j]; }
847:         } else {
848:           SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
849:         }
850:       }
851:     }
852:     VecStashScatterEnd_Private(&vec->bstash);
853:     VecRestoreArray(vec,&xarray);
854:   }
855:   vec->stash.insertmode = NOT_SET_VALUES;
856:   return(0);
857: }