Actual source code: vecio.c

  1: #define PETSCVEC_DLL
  2: /* 
  3:    This file contains simple binary input routines for vectors.  The
  4:    analogous output routines are within each vector implementation's 
  5:    VecView (with viewer types PETSC_VIEWER_BINARY)
  6:  */

 8:  #include petsc.h
 9:  #include petscsys.h
 10:  #include petscvec.h
 11:  #include vecimpl.h
 12: #if defined(PETSC_HAVE_PNETCDF)
 14: #include "pnetcdf.h"
 16: #endif
 17: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, VecType, Vec*);
 18: EXTERN PetscErrorCode VecLoad_Netcdf(PetscViewer, Vec*);
 19: EXTERN PetscErrorCode VecLoadIntoVector_Binary(PetscViewer, Vec);
 20: EXTERN PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer, Vec);

 24: /*@C 
 25:   VecLoad - Loads a vector that has been stored in binary format
 26:   with VecView().

 28:   Collective on PetscViewer 

 30:   Input Parameters:
 31: + viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
 32:            NetCDF file viewer, obtained from PetscViewerNetcdfOpen()
 33: - outtype - the type of vector VECSEQ or VECMPI or PETSC_NULL (which indicates
 34:             using VECSEQ if the communicator in the Viewer is of size 1; otherwise
 35:             use VECMPI).

 37:   Output Parameter:
 38: . newvec - the newly loaded vector

 40:    Level: intermediate

 42:   Notes:
 43:   The input file must contain the full global vector, as
 44:   written by the routine VecView().

 46:   Notes for advanced users:
 47:   Most users should not need to know the details of the binary storage
 48:   format, since VecLoad() and VecView() completely hide these details.
 49:   But for anyone who's interested, the standard binary matrix storage
 50:   format is
 51: .vb
 52:      int    VEC_FILE_COOKIE
 53:      int    number of rows
 54:      PetscScalar *values of all nonzeros
 55: .ve

 57:    Note for Cray users, the int's stored in the binary file are 32 bit
 58: integers; not 64 as they are represented in the memory, so if you
 59: write your own routines to read/write these binary files from the Cray
 60: you need to adjust the integer sizes that you read in, see
 61: PetscBinaryRead() and PetscBinaryWrite() to see how this may be
 62: done.

 64:    In addition, PETSc automatically does the byte swapping for
 65: machines that store the bytes reversed, e.g.  DEC alpha, freebsd,
 66: linux, Windows and the paragon; thus if you write your own binary
 67: read/write routines you have to swap the bytes; see PetscBinaryRead()
 68: and PetscBinaryWrite() to see how this may be done.

 70:   Concepts: vector^loading from file

 72: .seealso: PetscViewerBinaryOpen(), VecView(), MatLoad(), VecLoadIntoVector() 
 73: @*/
 74: PetscErrorCode PETSCVEC_DLLEXPORT VecLoad(PetscViewer viewer, VecType outtype,Vec *newvec)
 75: {
 77:   PetscTruth     isbinary,flg;
 78:   char           vtype[256];
 79:   const char    *prefix;
 80: #if defined(PETSC_HAVE_PNETCDF)
 81:   PetscTruth     isnetcdf;
 82: #endif

 87:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
 88: #if defined(PETSC_HAVE_PNETCDF)
 89:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
 90:   if ((!isbinary) && (!isnetcdf)) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary or NetCDF viewer");
 91: #else
 92:   if (!isbinary)  SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary viewer");
 93: #endif

 95: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
 96:   VecInitializePackage(PETSC_NULL);
 97: #endif
 98: #if defined(PETSC_HAVE_PNETCDF)
 99:   if (isnetcdf) {
100:     VecLoad_Netcdf(viewer,newvec);
101:   } else
102: #endif
103:   {
104:     Vec            factory;
105:     MPI_Comm       comm;
106:     PetscErrorCode (*r)(PetscViewer, VecType,Vec*);
107:     PetscMPIInt    size;

109:     PetscObjectGetOptionsPrefix((PetscObject)viewer,(const char**)&prefix);
110:     PetscOptionsGetString(prefix,"-vec_type",vtype,256,&flg);
111:     if (flg) {
112:       outtype = vtype;
113:     }
114:     PetscOptionsGetString(prefix,"-vecload_type",vtype,256,&flg);
115:     if (flg) {
116:       outtype = vtype;
117:     }
118:     PetscObjectGetComm((PetscObject)viewer,&comm);
119:     if (!outtype) {
120:       MPI_Comm_size(comm,&size);
121:       outtype = (size > 1) ? VECMPI : VECSEQ;
122:     }

124:     VecCreate(comm,&factory);
125:     VecSetSizes(factory,1,PETSC_DETERMINE);
126:     VecSetType(factory,outtype);
127:     r = factory->ops->load;
128:     VecDestroy(factory);
129:     if (!r) SETERRQ1(PETSC_ERR_SUP,"VecLoad is not supported for type: %s",outtype);
130:     (*r)(viewer,outtype,newvec);
131:   }
132:   return(0);
133: }

135: #if defined(PETSC_HAVE_PNETCDF)
138: PetscErrorCode VecLoad_Netcdf(PetscViewer viewer,Vec *newvec)
139: {
141:   PetscMPIInt    rank;
142:   PetscInt       i,N,n,bs;
143:   PetscInt       ncid,start;
144:   Vec            vec;
145:   PetscScalar    *avec;
146:   MPI_Comm       comm;
147:   MPI_Request    request;
148:   MPI_Status     status;
149:   PetscMap       map;
150:   PetscTruth     isnetcdf,flag;
151:   char           name[NC_MAX_NAME];

154:   PetscLogEventBegin(VEC_Load,viewer,0,0,0);
155:   PetscObjectGetComm((PetscObject)viewer,&comm);
156:   MPI_Comm_rank(comm,&rank);
157:   PetscViewerNetcdfGetID(viewer,&ncid);
158:   ncmpi_inq_dim(ncid,0,name,(size_t*)&N); /* N gets the global vector size */
159:   VecCreate(comm,&vec);
160:   VecSetSizes(vec,PETSC_DECIDE,N);
161:   if (!rank) {
162:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
163:     if (flag) {
164:       VecSetBlockSize(vec,bs);
165:     }
166:   }
167:   VecSetFromOptions(vec);
168:   VecGetLocalSize(vec,&n);
169:   VecGetOwnershipRange(vec,&start,PETSC_NULL);
170:   VecGetArray(vec,&avec);
171:   ncmpi_get_vara_double_all(ncid,0,(const size_t*)&start,(const size_t*)&n,(double *)avec);
172:   VecRestoreArray(vec,&avec);
173:   *newvec = vec;
174:   VecAssemblyBegin(vec);
175:   VecAssemblyEnd(vec);
176:   PetscLogEventEnd(VEC_Load,viewer,0,0,0);
177:   return(0);
178: }
179: #endif

183: PetscErrorCode VecLoad_Binary(PetscViewer viewer, VecType itype,Vec *newvec)
184: {
185:   PetscMPIInt    size,rank,tag;
186:   int            fd;
187:   PetscInt       i,rows,type,n,*range,bs;
188:   PetscErrorCode ierr,nierr;
189:   Vec            vec;
190:   PetscScalar    *avec;
191:   MPI_Comm       comm;
192:   MPI_Request    request;
193:   MPI_Status     status;
194:   PetscMap       map;
195:   PetscTruth     flag;

198:   PetscLogEventBegin(VEC_Load,viewer,0,0,0);
199:   PetscViewerBinaryGetDescriptor(viewer,&fd);
200:   PetscObjectGetComm((PetscObject)viewer,&comm);
201:   MPI_Comm_rank(comm,&rank);
202:   MPI_Comm_size(comm,&size);

204:   if (!rank) {
205:     /* Read vector header. */
206:     PetscBinaryRead(fd,&type,1,PETSC_INT);if (ierr) goto handleerror;
207:     if (type != VEC_FILE_COOKIE) {PETSC_ERR_ARG_WRONG; goto handleerror;}
208:     PetscBinaryRead(fd,&rows,1,PETSC_INT);if (ierr) goto handleerror;
209:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);
210:     VecCreate(comm,&vec);
211:     VecSetSizes(vec,PETSC_DECIDE,rows);
212:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
213:     if (flag) {
214:       VecSetBlockSize(vec,bs);
215:     }
216:     VecSetFromOptions(vec);
217:     VecGetLocalSize(vec,&n);
218:     VecGetArray(vec,&avec);
219:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
220:     VecRestoreArray(vec,&avec);

222:     if (size > 1) {
223:       /* read in other chuncks and send to other processors */
224:       /* determine maximum chunck owned by other */
225:       VecGetPetscMap(vec,&map);
226:       PetscMapGetGlobalRange(map,&range);
227:       n = 1;
228:       for (i=1; i<size; i++) {
229:         n = PetscMax(n,range[i+1] - range[i]);
230:       }
231:       PetscMalloc(n*sizeof(PetscScalar),&avec);
232:       PetscObjectGetNewTag((PetscObject)viewer,&tag);
233:       for (i=1; i<size; i++) {
234:         n    = range[i+1] - range[i];
235:         PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
236:         MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
237:         MPI_Wait(&request,&status);
238:       }
239:       PetscFree(avec);
240:     }
241:   } else {
242:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);
243:     /* this is a marker sent to indicate that the file does not have a vector at this location */
244:     if (rows == -1)  {
245:       nPetscLogEventEnd(VEC_Load,viewer,0,0,0);CHKERRQ(nierr);
246:       SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Error loading vector");
247:     }
248:     VecCreate(comm,&vec);
249:     VecSetSizes(vec,PETSC_DECIDE,rows);
250:     VecSetFromOptions(vec);
251:     VecGetLocalSize(vec,&n);
252:     PetscObjectGetNewTag((PetscObject)viewer,&tag);
253:     VecGetArray(vec,&avec);
254:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
255:     VecRestoreArray(vec,&avec);
256:   }
257:   *newvec = vec;
258:   VecAssemblyBegin(vec);
259:   VecAssemblyEnd(vec);
260:   PetscLogEventEnd(VEC_Load,viewer,0,0,0);
261:   return(0);
262:   /* tell the other processors we've had an error; only used on process 0 */
263:   handleerror:
264:     if (PetscExceptionValue(ierr)) {
265:       nPetscLogEventEnd(VEC_Load,viewer,0,0,0);CHKERRQ(nierr);
266:       n-1; MPI_Bcast(&nierr,1,MPIU_INT,0,comm);
267:     }
268: 
269:   return(0);
270: }

274: PetscErrorCode VecLoadIntoVector_Default(PetscViewer viewer,Vec vec)
275: {
276:   PetscTruth     isbinary;
277: #if defined(PETSC_HAVE_PNETCDF)
278:   PetscTruth     isnetcdf;
279: #endif


284:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
285: #if defined(PETSC_HAVE_PNETCDF)
286:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
287:   if ((!isbinary) && (!isnetcdf)) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary or NetCDF viewer");
288: #else
289:   if (!isbinary) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary viewer");
290: #endif

292: #if defined(PETSC_HAVE_PNETCDF)
293:   if (isnetcdf) {
294:     VecLoadIntoVector_Netcdf(viewer,vec);
295:   } else
296: #endif
297:   {
298:     VecLoadIntoVector_Binary(viewer,vec);
299:   }
300:   return(0);
301: }

303: #if defined(PETSC_HAVE_PNETCDF)
306: PetscErrorCode VecLoadIntoVector_Netcdf(PetscViewer viewer,Vec vec)
307: {
309:   PetscMPIInt    rank;
310:   PetscInt       i,N,rows,n,bs;
311:   PetscInt       ncid,start;
312:   PetscScalar    *avec;
313:   MPI_Comm       comm;
314:   MPI_Request    request;
315:   MPI_Status     status;
316:   PetscMap       map;
317:   PetscTruth     isnetcdf,flag;
318:   char           name[NC_MAX_NAME];

321:   PetscLogEventBegin(VEC_Load,viewer,vec,0,0);
322:   PetscObjectGetComm((PetscObject)viewer,&comm);
323:   MPI_Comm_rank(comm,&rank);
324:   PetscViewerNetcdfGetID(viewer,&ncid);
325:   ncmpi_inq_dim(ncid,0,name,(size_t*)&N); /* N gets the global vector size */
326:   if (!rank) {
327:     VecGetSize(vec,&rows);
328:     if (N != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
329:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
330:     if (flag) {
331:       VecSetBlockSize(vec,bs);
332:     }
333:   }
334:   VecSetFromOptions(vec);
335:   VecGetLocalSize(vec,&n);
336:   VecGetOwnershipRange(vec,&start,PETSC_NULL);
337:   VecGetArray(vec,&avec);
338:   ncmpi_get_vara_double_all(ncid,0,(const size_t*)&start,(const size_t*)&n,(double *)avec);
339:   VecRestoreArray(vec,&avec);
340:   VecAssemblyBegin(vec);
341:   VecAssemblyEnd(vec);
342:   PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
343:   return(0);
344: }
345: #endif

349: PetscErrorCode VecLoadIntoVector_Binary(PetscViewer viewer,Vec vec)
350: {
352:   PetscMPIInt    size,rank,tag;
353:   PetscInt       i,rows,type,n,*range,bs;
354:   int            fd;
355:   PetscScalar    *avec;
356:   MPI_Comm       comm;
357:   MPI_Request    request;
358:   MPI_Status     status;
359:   PetscMap       map;
360:   PetscTruth     flag;
361:   const char     *prefix;

364:   PetscLogEventBegin(VEC_Load,viewer,vec,0,0);

366:   PetscViewerBinaryGetDescriptor(viewer,&fd);
367:   PetscObjectGetComm((PetscObject)viewer,&comm);
368:   MPI_Comm_rank(comm,&rank);
369:   MPI_Comm_size(comm,&size);

371:   if (!rank) {
372:     /* Read vector header. */
373:     PetscBinaryRead(fd,&type,1,PETSC_INT);
374:     if (type != VEC_FILE_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
375:     PetscBinaryRead(fd,&rows,1,PETSC_INT);
376:     VecGetSize(vec,&n);
377:     if (n != rows) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"Vector in file different length then input vector");
378:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);

380:     PetscObjectGetOptionsPrefix((PetscObject)vec,(const char**)&prefix);
381:     PetscOptionsGetInt(prefix,"-vecload_block_size",&bs,&flag);
382:     if (flag) {
383:       VecSetBlockSize(vec,bs);
384:     }
385:     VecSetFromOptions(vec);
386:     VecGetLocalSize(vec,&n);
387:     VecGetArray(vec,&avec);
388:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
389:     VecRestoreArray(vec,&avec);

391:     if (size > 1) {
392:       /* read in other chuncks and send to other processors */
393:       /* determine maximum chunck owned by other */
394:       VecGetPetscMap(vec,&map);
395:       PetscMapGetGlobalRange(map,&range);
396:       n = 1;
397:       for (i=1; i<size; i++) {
398:         n = PetscMax(n,range[i+1] - range[i]);
399:       }
400:       PetscMalloc(n*sizeof(PetscScalar),&avec);
401:       PetscObjectGetNewTag((PetscObject)viewer,&tag);
402:       for (i=1; i<size; i++) {
403:         n    = range[i+1] - range[i];
404:         PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
405:         MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
406:         MPI_Wait(&request,&status);
407:       }
408:       PetscFree(avec);
409:     }
410:   } else {
411:     MPI_Bcast(&rows,1,MPIU_INT,0,comm);
412:     VecSetFromOptions(vec);
413:     VecGetLocalSize(vec,&n);
414:     PetscObjectGetNewTag((PetscObject)viewer,&tag);
415:     VecGetArray(vec,&avec);
416:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
417:     VecRestoreArray(vec,&avec);
418:   }
419:   VecAssemblyBegin(vec);
420:   VecAssemblyEnd(vec);
421:   PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
422:   return(0);
423: }