Actual source code: ex8.c

  1: /*$Id: ex8.c,v 1.50 2001/08/07 03:04:00 balay Exp $*/

  3: static char help[] = "Illustrates use of the preconditioner ASM.n
  4: The Additive Schwarz Method for solving a linear system in parallel with SLES.  Then
  5: code indicates the procedure for setting user-defined subdomains.  Inputn
  6: parameters include:n
  7:   -user_set_subdomain_solvers:  User explicitly sets subdomain solversn
  8:   -user_set_subdomains:  Activate user-defined subdomainsnn";

 10: /*
 11:    Note:  This example focuses on setting the subdomains for the ASM 
 12:    preconditioner for a problem on a 2D rectangular grid.  See ex1.c
 13:    and ex2.c for more detailed comments on the basic usage of SLES
 14:    (including working with matrices and vectors).

 16:    The ASM preconditioner is fully parallel, but currently the routine
 17:    PCASMCreateSubDomains2D(), which is used in this example to demonstrate
 18:    user-defined subdomains (activated via -user_set_subdomains), is
 19:    uniprocessor only.

 21:    This matrix in this linear system arises from the discretized Laplacian,
 22:    and thus is not very interesting in terms of experimenting with variants
 23:    of the ASM preconditioner.  
 24: */

 26: /*T
 27:    Concepts: SLES^Additive Schwarz Method (ASM) with user-defined subdomains
 28:    Processors: n
 29: T*/

 31: /* 
 32:   Include "petscsles.h" so that we can use SLES solvers.  Note that this file
 33:   automatically includes:
 34:      petsc.h       - base PETSc routines   petscvec.h - vectors
 35:      petscsys.h    - system routines       petscmat.h - matrices
 36:      petscis.h     - index sets            petscksp.h - Krylov subspace methods
 37:      petscviewer.h - viewers               petscpc.h  - preconditioners
 38: */
 39:  #include petscsles.h

 41: int main(int argc,char **args)
 42: {
 43:   Vec          x,b,u;                 /* approx solution, RHS, exact solution */
 44:   Mat          A;                       /* linear system matrix */
 45:   SLES         sles;                    /* linear solver context */
 46:   PC           pc;                      /* PC context */
 47:   IS           *is;                     /* array of index sets that define the subdomains */
 48:   int          overlap = 1;             /* width of subdomain overlap */
 49:   int          Nsub;                    /* number of subdomains */
 50:   int          m = 15,n = 17;          /* mesh dimensions in x- and y- directions */
 51:   int          M = 2,N = 1;            /* number of subdomains in x- and y- directions */
 52:   int          i,j,its,I,J,ierr,Istart,Iend,size;
 53:   PetscTruth   flg;
 54:   PetscTruth   user_subdomains;         /* flag - 1 indicates user-defined subdomains */
 55:   PetscScalar  v, one = 1.0;

 57:   PetscInitialize(&argc,&args,(char *)0,help);
 58:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 59:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 60:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
 61:   PetscOptionsGetInt(PETSC_NULL,"-M",&M,PETSC_NULL);
 62:   PetscOptionsGetInt(PETSC_NULL,"-N",&N,PETSC_NULL);
 63:   PetscOptionsGetInt(PETSC_NULL,"-overlap",&overlap,PETSC_NULL);
 64:   PetscOptionsHasName(PETSC_NULL,"-user_set_subdomains",&user_subdomains);

 66:   /* -------------------------------------------------------------------
 67:          Compute the matrix and right-hand-side vector that define
 68:          the linear system, Ax = b.
 69:      ------------------------------------------------------------------- */

 71:   /* 
 72:      Assemble the matrix for the five point stencil, YET AGAIN 
 73:   */
 74:   MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
 75:   MatSetFromOptions(A);
 76:   MatGetOwnershipRange(A,&Istart,&Iend);
 77:   for (I=Istart; I<Iend; I++) {
 78:     v = -1.0; i = I/n; j = I - i*n;
 79:     if (i>0)   {J = I - n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 80:     if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 81:     if (j>0)   {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 82:     if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
 83:     v = 4.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
 84:   }
 85:   MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
 86:   MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);

 88:   /* 
 89:      Create and set vectors 
 90:   */
 91:   VecCreate(PETSC_COMM_WORLD,&b);
 92:   VecSetSizes(b,PETSC_DECIDE,m*n);
 93:   VecSetFromOptions(b);
 94:   VecDuplicate(b,&u);
 95:   VecDuplicate(b,&x);
 96:   VecSet(&one,u);
 97:   MatMult(A,u,b);

 99:   /* 
100:      Create linear solver context 
101:   */
102:   SLESCreate(PETSC_COMM_WORLD,&sles);

104:   /* 
105:      Set operators. Here the matrix that defines the linear system
106:      also serves as the preconditioning matrix.
107:   */
108:   SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN);

110:   /* 
111:      Set the default preconditioner for this program to be ASM
112:   */
113:   SLESGetPC(sles,&pc);
114:   PCSetType(pc,PCASM);

116:   /* -------------------------------------------------------------------
117:                   Define the problem decomposition
118:      ------------------------------------------------------------------- */

120:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
121:        Basic method, should be sufficient for the needs of many users.
122:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

124:      Set the overlap, using the default PETSc decomposition via
125:          PCASMSetOverlap(pc,overlap);
126:      Could instead use the option -pc_asm_overlap <ovl> 

128:      Set the total number of blocks via -pc_asm_blocks <blks>
129:      Note:  The ASM default is to use 1 block per processor.  To
130:      experiment on a single processor with various overlaps, you
131:      must specify use of multiple blocks!
132:   */

134:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
135:        More advanced method, setting user-defined subdomains
136:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

138:      Firstly, create index sets that define the subdomains.  The utility
139:      routine PCASMCreateSubdomains2D() is a simple example (that currently
140:      supports 1 processor only!).  More generally, the user should write
141:      a custom routine for a particular problem geometry.

143:      Then call either PCASMSetLocalSubdomains() or PCASMSetTotalSubdomains()
144:      to set the subdomains for the ASM preconditioner.
145:   */

147:   if (!user_subdomains) { /* basic version */
148:     PCASMSetOverlap(pc,overlap);
149:   } else { /* advanced version */
150:     if (size != 1) SETERRQ(1,"PCASMCreateSubdomains() is currently a uniprocessor routine only!");
151:     PCASMCreateSubdomains2D(m,n,M,N,1,overlap,&Nsub,&is);
152:     PCASMSetLocalSubdomains(pc,Nsub,is);
153:   }

155:   /* -------------------------------------------------------------------
156:                 Set the linear solvers for the subblocks
157:      ------------------------------------------------------------------- */

159:   /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
160:        Basic method, should be sufficient for the needs of most users.
161:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

163:      By default, the ASM preconditioner uses the same solver on each
164:      block of the problem.  To set the same solver options on all blocks,
165:      use the prefix -sub before the usual PC and KSP options, e.g.,
166:           -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4

168:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 
169:         Advanced method, setting different solvers for various blocks.
170:      - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 

172:      Note that each block's SLES context is completely independent of
173:      the others, and the full range of uniprocessor SLES options is
174:      available for each block.

176:      - Use PCASMGetSubSLES() to extract the array of SLES contexts for
177:        the local blocks.
178:      - See ex7.c for a simple example of setting different linear solvers
179:        for the individual blocks for the block Jacobi method (which is
180:        equivalent to the ASM method with zero overlap).
181:   */

183:   PetscOptionsHasName(PETSC_NULL,"-user_set_subdomain_solvers",&flg);
184:   if (flg) {
185:     SLES       *subsles;       /* array of SLES contexts for local subblocks */
186:     int        nlocal,first;  /* number of local subblocks, first local subblock */
187:     KSP        subksp;         /* KSP context for subblock */
188:     PC         subpc;          /* PC context for subblock */
189:     PetscTruth isasm;

191:     PetscPrintf(PETSC_COMM_WORLD,"User explicitly sets subdomain solvers.n");

193:     /* 
194:        Set runtime options
195:     */
196:     SLESSetFromOptions(sles);

198:     /* 
199:        Flag an error if PCTYPE is changed from the runtime options
200:      */
201:     PetscTypeCompare((PetscObject)pc,PCASM,&isasm);
202:     if (isasm) {
203:       SETERRQ(1,"Cannot Change the PCTYPE when manually changing the subdomain solver settings");
204:     }
205:     /* 
206:        Call SLESSetUp() to set the block Jacobi data structures (including
207:        creation of an internal SLES context for each block).

209:        Note: SLESSetUp() MUST be called before PCASMGetSubSLES().
210:     */
211:     SLESSetUp(sles,x,b);

213:     /*
214:        Extract the array of SLES contexts for the local blocks
215:     */
216:     PCASMGetSubSLES(pc,&nlocal,&first,&subsles);

218:     /*
219:        Loop over the local blocks, setting various SLES options
220:        for each block.  
221:     */
222:     for (i=0; i<nlocal; i++) {
223:       SLESGetPC(subsles[i],&subpc);
224:       SLESGetKSP(subsles[i],&subksp);
225:       PCSetType(subpc,PCILU);
226:       KSPSetType(subksp,KSPGMRES);
227:       KSPSetTolerances(subksp,1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
228:     }
229:   } else {
230:     /* 
231:        Set runtime options
232:     */
233:     SLESSetFromOptions(sles);
234:   }

236:   /* -------------------------------------------------------------------
237:                       Solve the linear system
238:      ------------------------------------------------------------------- */

240:   SLESSolve(sles,b,x,&its);

242:   /* 
243:      Free work space.  All PETSc objects should be destroyed when they
244:      are no longer needed.
245:   */

247:   if (user_subdomains) {
248:     for (i=0; i<Nsub; i++) {
249:       ISDestroy(is[i]);
250:     }
251:     PetscFree(is);
252:   }
253:   SLESDestroy(sles);
254:   VecDestroy(u);
255:   VecDestroy(x);
256:   VecDestroy(b);
257:   MatDestroy(A);
258:   PetscFinalize();
259:   return 0;
260: }