Actual source code: vecio.c

  1: /*$Id: vecio.c,v 1.74 2001/08/07 03:02:17 balay Exp $*/

  3: /* 
  4:    This file contains simple binary input routines for vectors.  The
  5:    analogous output routines are within each vector implementation's 
  6:    VecView (with viewer types PETSC_VIEWER_BINARY)
  7:  */

 9:  #include petsc.h
 10:  #include petscsys.h
 11:  #include petscvec.h

 13: /*@C 
 14:   VecLoad - Loads a vector that has been stored in binary format
 15:   with VecView().

 17:   Collective on PetscViewer 

 19:   Input Parameters:
 20: . viewer - binary file viewer, obtained from PetscViewerBinaryOpen()

 22:   Output Parameter:
 23: . newvec - the newly loaded vector

 25:    Level: intermediate

 27:   Notes:
 28:   The input file must contain the full global vector, as
 29:   written by the routine VecView().

 31:   Notes for advanced users:
 32:   Most users should not need to know the details of the binary storage
 33:   format, since VecLoad() and VecView() completely hide these details.
 34:   But for anyone who's interested, the standard binary matrix storage
 35:   format is
 36: .vb
 37:      int    VEC_FILE_COOKIE
 38:      int    number of rows
 39:      PetscScalar *values of all nonzeros
 40: .ve

 42:    Note for Cray users, the int's stored in the binary file are 32 bit
 43: integers; not 64 as they are represented in the memory, so if you
 44: write your own routines to read/write these binary files from the Cray
 45: you need to adjust the integer sizes that you read in, see
 46: PetscReadBinary() and PetscWriteBinary() to see how this may be
 47: done.

 49:    In addition, PETSc automatically does the byte swapping for
 50: machines that store the bytes reversed, e.g.  DEC alpha, freebsd,
 51: linux, nt and the paragon; thus if you write your own binary
 52: read/write routines you have to swap the bytes; see PetscReadBinary()
 53: and PetscWriteBinary() to see how this may be done.

 55:   Concepts: vector^loading from file

 57: .seealso: PetscViewerBinaryOpen(), VecView(), MatLoad(), VecLoadIntoVector() 
 58: @*/
 59: int VecLoad(PetscViewer viewer,Vec *newvec)
 60: {
 61:   int         i,rows,ierr,type,fd,rank,size,n,*range,tag,bs;
 62:   Vec         vec;
 63:   PetscScalar *avec;
 64:   MPI_Comm    comm;
 65:   MPI_Request request;
 66:   MPI_Status  status;
 67:   PetscMap    map;
 68:   PetscTruth  isbinary,flag;

 72:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
 73:   if (!isbinary) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary viewer");
 74:   PetscLogEventBegin(VEC_Load,viewer,0,0,0);
 75:   PetscViewerBinaryGetDescriptor(viewer,&fd);
 76:   PetscObjectGetComm((PetscObject)viewer,&comm);
 77:   MPI_Comm_rank(comm,&rank);
 78:   MPI_Comm_size(comm,&size);

 80:   if (!rank) {
 81:     /* Read vector header. */
 82:     PetscBinaryRead(fd,&type,1,PETSC_INT);
 83:     if (type != VEC_FILE_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
 84:     PetscBinaryRead(fd,&rows,1,PETSC_INT);
 85:     MPI_Bcast(&rows,1,MPI_INT,0,comm);
 86:     VecCreate(comm,&vec);
 87:     VecSetSizes(vec,PETSC_DECIDE,rows);
 88:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
 89:     if (flag) {
 90:       VecSetBlockSize(vec,bs);
 91:     }
 92:     VecSetFromOptions(vec);
 93:     VecGetLocalSize(vec,&n);
 94:     VecGetArray(vec,&avec);
 95:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
 96:     VecRestoreArray(vec,&avec);

 98:     if (size > 1) {
 99:       /* read in other chuncks and send to other processors */
100:       /* determine maximum chunck owned by other */
101:       VecGetPetscMap(vec,&map);
102:       PetscMapGetGlobalRange(map,&range);
103:       n = 1;
104:       for (i=1; i<size; i++) {
105:         n = PetscMax(n,range[i] - range[i-1]);
106:       }
107:       PetscMalloc(n*sizeof(PetscScalar),&avec);
108:       PetscObjectGetNewTag((PetscObject)viewer,&tag);
109:       for (i=1; i<size; i++) {
110:         n    = range[i+1] - range[i];
111:         PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
112:         MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
113:         MPI_Wait(&request,&status);
114:       }
115:       PetscFree(avec);
116:     }
117:   } else {
118:     MPI_Bcast(&rows,1,MPI_INT,0,comm);
119:     VecCreate(comm,&vec);
120:     VecSetSizes(vec,PETSC_DECIDE,rows);
121:     VecSetFromOptions(vec);
122:     VecGetLocalSize(vec,&n);
123:     PetscObjectGetNewTag((PetscObject)viewer,&tag);
124:     VecGetArray(vec,&avec);
125:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
126:     VecRestoreArray(vec,&avec);
127:   }
128:   *newvec = vec;
129:   VecAssemblyBegin(vec);
130:   VecAssemblyEnd(vec);
131:   PetscLogEventEnd(VEC_Load,viewer,0,0,0);
132:   return(0);
133: }

135: int VecLoadIntoVector_Default(PetscViewer viewer,Vec vec)
136: {
137:   int         i,rows,ierr,type,fd,rank,size,n,*range,tag,bs;
138:   PetscScalar *avec;
139:   MPI_Comm    comm;
140:   MPI_Request request;
141:   MPI_Status  status;
142:   PetscMap    map;
143:   PetscTruth  isbinary,flag;


147:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
148:   if (!isbinary) SETERRQ(PETSC_ERR_ARG_WRONG,"Must be binary viewer");
149:   PetscLogEventBegin(VEC_Load,viewer,vec,0,0);

151:   PetscViewerBinaryGetDescriptor(viewer,&fd);
152:   PetscObjectGetComm((PetscObject)viewer,&comm);
153:   MPI_Comm_rank(comm,&rank);
154:   MPI_Comm_size(comm,&size);

156:   if (!rank) {
157:     /* Read vector header. */
158:     PetscBinaryRead(fd,&type,1,PETSC_INT);
159:     if (type != VEC_FILE_COOKIE) SETERRQ(PETSC_ERR_ARG_WRONG,"Non-vector object");
160:     PetscBinaryRead(fd,&rows,1,PETSC_INT);
161:     VecGetSize(vec,&n);
162:     if (n != rows) SETERRQ(1,"Vector in file different length then input vector");
163:     MPI_Bcast(&rows,1,MPI_INT,0,comm);

165:     PetscOptionsGetInt(PETSC_NULL,"-vecload_block_size",&bs,&flag);
166:     if (flag) {
167:       VecSetBlockSize(vec,bs);
168:     }
169:     VecSetFromOptions(vec);
170:     VecGetLocalSize(vec,&n);
171:     VecGetArray(vec,&avec);
172:     PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
173:     VecRestoreArray(vec,&avec);

175:     if (size > 1) {
176:       /* read in other chuncks and send to other processors */
177:       /* determine maximum chunck owned by other */
178:       VecGetPetscMap(vec,&map);
179:       PetscMapGetGlobalRange(map,&range);
180:       n = 1;
181:       for (i=1; i<size; i++) {
182:         n = PetscMax(n,range[i] - range[i-1]);
183:       }
184:       PetscMalloc(n*sizeof(PetscScalar),&avec);
185:       PetscObjectGetNewTag((PetscObject)viewer,&tag);
186:       for (i=1; i<size; i++) {
187:         n    = range[i+1] - range[i];
188:         PetscBinaryRead(fd,avec,n,PETSC_SCALAR);
189:         MPI_Isend(avec,n,MPIU_SCALAR,i,tag,comm,&request);
190:         MPI_Wait(&request,&status);
191:       }
192:       PetscFree(avec);
193:     }
194:   } else {
195:     MPI_Bcast(&rows,1,MPI_INT,0,comm);
196:     VecSetFromOptions(vec);
197:     VecGetLocalSize(vec,&n);
198:     PetscObjectGetNewTag((PetscObject)viewer,&tag);
199:     VecGetArray(vec,&avec);
200:     MPI_Recv(avec,n,MPIU_SCALAR,0,tag,comm,&status);
201:     VecRestoreArray(vec,&avec);
202:   }
203:   VecAssemblyBegin(vec);
204:   VecAssemblyEnd(vec);
205:   PetscLogEventEnd(VEC_Load,viewer,vec,0,0);
206:   return(0);
207: }