Actual source code: pmap.c

  1: /*$Id: pmap.c,v 1.21 2001/07/20 21:18:16 bsmith Exp $*/

  3: /*
  4:    This file contains routines for basic map object implementation.
  5: */

 7:  #include src/vec/vecimpl.h

  9: int PetscMapDestroy_MPI(PetscMap m)
 10: {
 12:   return(0);
 13: }

 15: static struct _PetscMapOps DvOps = {
 16:   PETSC_NULL,
 17:   PetscMapDestroy_MPI,
 18: };

 20: EXTERN_C_BEGIN
 21: int PetscMapCreate_MPI(PetscMap m)
 22: {
 23:   int rank,size;
 24:   int p;

 28:   PetscMemcpy(m->ops, &DvOps, sizeof(DvOps));

 30:   MPI_Comm_size(m->comm, &size);
 31:   MPI_Comm_rank(m->comm, &rank);
 32:   PetscSplitOwnership(m->comm,&m->n,&m->N);
 33:   PetscMalloc((size+1)*sizeof(int), &m->range);
 34:   MPI_Allgather(&m->n, 1, MPI_INT, m->range+1, 1, MPI_INT, m->comm);

 36:   m->range[0] = 0;
 37:   for(p = 2; p <= size; p++) {
 38:     m->range[p] += m->range[p-1];
 39:   }

 41:   m->rstart = m->range[rank];
 42:   m->rend   = m->range[rank+1];
 43:   return(0);
 44: }
 45: EXTERN_C_END

 47: EXTERN_C_BEGIN
 48: int PetscMapSerialize_MPI(MPI_Comm comm, PetscMap *map, PetscViewer viewer, PetscTruth store)
 49: {
 50:   PetscMap m;
 51:   int      fd;
 52:   int      n, N, checkN;
 53:   int      numProcs;
 54:   int      ierr;

 57:   PetscViewerBinaryGetDescriptor(viewer, &fd);
 58:   if (store) {
 59:     m    = *map;
 60:     MPI_Comm_size(m->comm, &numProcs);
 61:     PetscBinaryWrite(fd, &m->n,      1,          PETSC_INT, 0);
 62:     PetscBinaryWrite(fd, &m->N,      1,          PETSC_INT, 0);
 63:     PetscBinaryWrite(fd, &m->rstart, 1,          PETSC_INT, 0);
 64:     PetscBinaryWrite(fd, &m->rend,   1,          PETSC_INT, 0);
 65:     PetscBinaryWrite(fd,  m->range,  numProcs+1, PETSC_INT, 0);
 66:   } else {
 67:     PetscBinaryRead(fd, &n,         1,          PETSC_INT);
 68:     PetscBinaryRead(fd, &N,         1,          PETSC_INT);
 69:     MPI_Allreduce(&n, &checkN, 1, MPI_INT, MPI_SUM, comm);
 70:     if (checkN != N) SETERRQ(PETSC_ERR_ARG_CORRUPT, "Invalid partition");
 71:     PetscMapCreate(comm, &m);
 72:     PetscMapSetLocalSize(m, n);
 73:     PetscMapSetSize(m, N);
 74:     MPI_Comm_size(comm, &numProcs);
 75:     PetscMalloc((numProcs+1) * sizeof(int), &m->range);
 76:     PetscBinaryRead(fd, &m->rstart, 1,          PETSC_INT);
 77:     PetscBinaryRead(fd, &m->rend,   1,          PETSC_INT);
 78:     PetscBinaryRead(fd,  m->range,  numProcs+1, PETSC_INT);

 80:     *map = m;
 81:   }

 83:   return(0);
 84: }
 85: EXTERN_C_END

 87: /*@C
 88:    PetscMapCreateMPI - Creates a map object.

 90:    Collective on MPI_Comm
 91:  
 92:    Input Parameters:
 93: +  comm - the MPI communicator to use 
 94: .  n - local vector length (or PETSC_DECIDE to have calculated if N is given)
 95: -  N - global vector length (or PETSC_DECIDE to have calculated if n is given)

 97:    Output Parameter:
 98: .  mm - the map object

100:    Suggested by:
101:    Robert Clay and Alan Williams, developers of ISIS++, Sandia National Laboratories.

103:    Level: developer

105:    Concepts: maps^creating

107: .seealso: PetscMapDestroy(), PetscMapGetLocalSize(), PetscMapGetSize(), PetscMapGetGlobalRange(),
108:           PetscMapGetLocalRange()

110: @*/
111: int PetscMapCreateMPI(MPI_Comm comm,int n,int N,PetscMap *m)
112: {

116:   PetscMapCreate(comm, m);
117:   PetscMapSetLocalSize(*m, n);
118:   PetscMapSetSize(*m, N);
119:   PetscMapSetType(*m, MAP_MPI);
120:   return(0);
121: }