Actual source code: dagtona.c

  1: #define PETSCDM_DLL

  3: /*
  4:      Tools to help solve the coarse grid problem redundantly.
  5:   Provides two scatter contexts that (1) map from the usual global vector
  6:   to all processors the entire vector in NATURAL numbering and (2)
  7:   from the entire vector on each processor in natural numbering extracts
  8:   out this processors piece in GLOBAL numbering
  9: */

 11:  #include src/dm/da/daimpl.h

 15: /*@
 16:    DAGlobalToNaturalAllCreate - Creates a scatter context that maps from the 
 17:      global vector the entire vector to each processor in natural numbering

 19:    Collective on DA

 21:    Input Parameter:
 22: .  da - the distributed array context

 24:    Output Parameter:
 25: .  scatter - the scatter context

 27:    Level: advanced

 29: .keywords: distributed array, global to local, begin, coarse problem

 31: .seealso: DAGlobalToNaturalEnd(), DALocalToGlobal(), DACreate2d(), 
 32:           DAGlobalToLocalBegin(), DAGlobalToLocalEnd(), DACreateNaturalVector()
 33: @*/
 34: PetscErrorCode PETSCDM_DLLEXPORT DAGlobalToNaturalAllCreate(DA da,VecScatter *scatter)
 35: {
 37:   PetscInt N;
 38:   IS  from,to;
 39:   Vec tmplocal,global;
 40:   AO  ao;

 45:   DAGetAO(da,&ao);

 47:   /* create the scatter context */
 48:   ISCreateStride(da->comm,da->Nlocal,0,1,&to);
 49:   AOPetscToApplicationIS(ao,to);
 50:   ISCreateStride(da->comm,da->Nlocal,0,1,&from);
 51:   MPI_Allreduce(&da->Nlocal,&N,1,MPIU_INT,MPI_SUM,da->comm);
 52:   VecCreateSeqWithArray(PETSC_COMM_SELF,N,0,&tmplocal);
 53:   VecCreateMPIWithArray(da->comm,da->Nlocal,PETSC_DETERMINE,0,&global);
 54:   VecSetBlockSize(tmplocal,da->w);
 55:   VecSetBlockSize(global,da->w);
 56:   VecScatterCreate(global,from,tmplocal,to,scatter);
 57:   VecDestroy(tmplocal);
 58:   VecDestroy(global);
 59:   ISDestroy(from);
 60:   ISDestroy(to);
 61:   return(0);
 62: }

 66: /*@
 67:    DANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy
 68:      of the entire vector on each processor to its local part in the global vector.

 70:    Collective on DA

 72:    Input Parameter:
 73: .  da - the distributed array context

 75:    Output Parameter:
 76: .  scatter - the scatter context

 78:    Level: advanced

 80: .keywords: distributed array, global to local, begin, coarse problem

 82: .seealso: DAGlobalToNaturalEnd(), DALocalToGlobal(), DACreate2d(), 
 83:           DAGlobalToLocalBegin(), DAGlobalToLocalEnd(), DACreateNaturalVector()
 84: @*/
 85: PetscErrorCode PETSCDM_DLLEXPORT DANaturalAllToGlobalCreate(DA da,VecScatter *scatter)
 86: {
 88:   PetscInt M,m = da->Nlocal,start;
 89:   IS  from,to;
 90:   Vec tmplocal,global;
 91:   AO  ao;

 96:   DAGetAO(da,&ao);

 98:   /* create the scatter context */
 99:   MPI_Allreduce(&m,&M,1,MPIU_INT,MPI_SUM,da->comm);
100:   VecCreateMPIWithArray(da->comm,m,PETSC_DETERMINE,0,&global);
101:   VecGetOwnershipRange(global,&start,PETSC_NULL);
102:   ISCreateStride(da->comm,m,start,1,&from);
103:   AOPetscToApplicationIS(ao,from);
104:   ISCreateStride(da->comm,m,start,1,&to);
105:   VecCreateSeqWithArray(PETSC_COMM_SELF,M,0,&tmplocal);
106:   VecSetBlockSize(tmplocal,da->w);
107:   VecSetBlockSize(global,da->w);
108:   VecScatterCreate(tmplocal,from,global,to,scatter);
109:   VecDestroy(tmplocal);
110:   VecDestroy(global);
111:   ISDestroy(from);
112:   ISDestroy(to);
113:   return(0);
114: }