Actual source code: psplit.c

  1: /*$Id: psplit.c,v 1.16 2001/03/23 23:20:45 balay Exp $*/

 3:  #include petsc.h

  5: /*@C
  6:     PetscSplitOwnershipBlock - Given a global (or local) length determines a local 
  7:         (or global) length via a simple formula. Splits so each processors local size
  8:         is divisible by the block size.

 10:    Collective on MPI_Comm (if N is PETSC_DECIDE)

 12:    Input Parameters:
 13: +    comm - MPI communicator that shares the object being divided
 14: .    bs - block size
 15: .    n - local length (or PETSC_DECIDE to have it set)
 16: -    N - global length (or PETSC_DECIDE)

 18:   Level: developer

 20:    Notes:
 21:      n and N cannot be both PETSC_DECIDE

 23:      If one processor calls this with N of PETSC_DECIDE then all processors
 24:      must, otherwise the program will hang.

 26: .seealso: PetscSplitOwnership()

 28: @*/
 29: int PetscSplitOwnershipBlock(MPI_Comm comm,int bs,int *n,int *N)
 30: {
 31:   int ierr,size,rank;

 34:   if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(1,"Both n and N cannot be PETSC_DECIDE");

 36:   if (*N == PETSC_DECIDE) {
 37:     if (*n % bs != 0) SETERRQ2(1,"local size %d not divisible by block size %d",*n,bs);
 38:     MPI_Allreduce(n,N,1,MPI_INT,MPI_SUM,comm);
 39:   } else if (*n == PETSC_DECIDE) {
 40:     int Nbs = *N/bs;
 41:     MPI_Comm_size(comm,&size);
 42:     MPI_Comm_rank(comm,&rank);
 43:     *n = bs*(Nbs/size + ((Nbs % size) > rank));
 44:   }
 45:   return(0);
 46: }


 49: /*@C
 50:     PetscSplitOwnership - Given a global (or local) length determines a local 
 51:         (or global) length via a simple formula

 53:    Collective on MPI_Comm (if N is PETSC_DECIDE)

 55:    Input Parameters:
 56: +    comm - MPI communicator that shares the object being divided
 57: .    n - local length (or PETSC_DECIDE to have it set)
 58: -    N - global length (or PETSC_DECIDE)

 60:   Level: developer

 62:    Notes:
 63:      n and N cannot be both PETSC_DECIDE

 65:      If one processor calls this with N of PETSC_DECIDE then all processors
 66:      must, otherwise the program will hang.

 68: .seealso: PetscSplitOwnershipBlock()

 70: @*/
 71: int PetscSplitOwnership(MPI_Comm comm,int *n,int *N)
 72: {
 73:   int ierr,size,rank;

 76:   if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(1,"Both n and N cannot be PETSC_DECIDE");

 78:   if (*N == PETSC_DECIDE) {
 79:     MPI_Allreduce(n,N,1,MPI_INT,MPI_SUM,comm);
 80:   } else if (*n == PETSC_DECIDE) {
 81:     MPI_Comm_size(comm,&size);
 82:     MPI_Comm_rank(comm,&rank);
 83:     *n = *N/size + ((*N % size) > rank);
 84: #if defined(PETSC_USE_BOPT_g)
 85:   } else {
 86:     int tmp;
 87:     MPI_Allreduce(n,&tmp,1,MPI_INT,MPI_SUM,comm);
 88:     if (tmp != *N) SETERRQ3(1,"Sum of local lengths %d does not equal global length %d, my local length %d",tmp,*N,*n);
 89: #endif
 90:   }

 92:   return(0);
 93: }