Actual source code: ex7.c
1: /*$Id: ex7.c,v 1.58 2001/08/07 21:30:54 bsmith Exp $*/
3: static char help[] = "Block Jacobi preconditioner for solving a linear system in parallel with SLES.n
4: The code indicates then
5: procedures for setting the particular block sizes and for using differentn
6: linear solvers on the individual blocks.nn";
8: /*
9: Note: This example focuses on ways to customize the block Jacobi
10: preconditioner. See ex1.c and ex2.c for more detailed comments on
11: the basic usage of SLES (including working with matrices and vectors).
13: Recall: The block Jacobi method is equivalent to the ASM preconditioner
14: with zero overlap.
15: */
17: /*T
18: Concepts: SLES^customizing the block Jacobi preconditioner
19: Processors: n
20: T*/
22: /*
23: Include "petscsles.h" so that we can use SLES solvers. Note that this file
24: automatically includes:
25: petsc.h - base PETSc routines petscvec.h - vectors
26: petscsys.h - system routines petscmat.h - matrices
27: petscis.h - index sets petscksp.h - Krylov subspace methods
28: petscviewer.h - viewers petscpc.h - preconditioners
29: */
30: #include petscsles.h
32: int main(int argc,char **args)
33: {
34: Vec x,b,u; /* approx solution, RHS, exact solution */
35: Mat A; /* linear system matrix */
36: SLES sles; /* SLES context */
37: SLES *subsles; /* array of local SLES contexts on this processor */
38: PC pc; /* PC context */
39: PC subpc; /* PC context for subdomain */
40: KSP subksp; /* KSP context for subdomain */
41: PetscReal norm; /* norm of solution error */
42: int i,j,I,J,ierr,*blks,m = 8,n;
43: int rank,size,its,nlocal,first,Istart,Iend;
44: PetscScalar v,one = 1.0,none = -1.0;
45: PetscTruth isbjacobi,flg;
47: PetscInitialize(&argc,&args,(char *)0,help);
48: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
49: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
50: MPI_Comm_size(PETSC_COMM_WORLD,&size);
51: n = m+2;
53: /* -------------------------------------------------------------------
54: Compute the matrix and right-hand-side vector that define
55: the linear system, Ax = b.
56: ------------------------------------------------------------------- */
58: /*
59: Create and assemble parallel matrix
60: */
61: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
62: MatSetFromOptions(A);
63: MatGetOwnershipRange(A,&Istart,&Iend);
64: for (I=Istart; I<Iend; I++) {
65: v = -1.0; i = I/n; j = I - i*n;
66: if (i>0) {J = I - n; MatSetValues(A,1,&I,1,&J,&v,ADD_VALUES);}
67: if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,ADD_VALUES);}
68: if (j>0) {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,ADD_VALUES);}
69: if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,ADD_VALUES);}
70: v = 4.0; MatSetValues(A,1,&I,1,&I,&v,ADD_VALUES);
71: }
72: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
73: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
75: /*
76: Create parallel vectors
77: */
78: VecCreate(PETSC_COMM_WORLD,&u);
79: VecSetSizes(u,PETSC_DECIDE,m*n);
80: VecSetFromOptions(u);
81: VecDuplicate(u,&b);
82: VecDuplicate(b,&x);
84: /*
85: Set exact solution; then compute right-hand-side vector.
86: */
87: VecSet(&one,u);
88: MatMult(A,u,b);
90: /*
91: Create linear solver context
92: */
93: SLESCreate(PETSC_COMM_WORLD,&sles);
95: /*
96: Set operators. Here the matrix that defines the linear system
97: also serves as the preconditioning matrix.
98: */
99: SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN);
101: /*
102: Set default preconditioner for this program to be block Jacobi.
103: This choice can be overridden at runtime with the option
104: -pc_type <type>
105: */
106: SLESGetPC(sles,&pc);
107: PCSetType(pc,PCBJACOBI);
110: /* -------------------------------------------------------------------
111: Define the problem decomposition
112: ------------------------------------------------------------------- */
114: /*
115: Call PCBJacobiSetTotalBlocks() to set individually the size of
116: each block in the preconditioner. This could also be done with
117: the runtime option
118: -pc_bjacobi_blocks <blocks>
119: Also, see the command PCBJacobiSetLocalBlocks() to set the
120: local blocks.
122: Note: The default decomposition is 1 block per processor.
123: */
124: PetscMalloc(m*sizeof(int),&blks);
125: for (i=0; i<m; i++) blks[i] = n;
126: PCBJacobiSetTotalBlocks(pc,m,blks);
127: PetscFree(blks);
130: /* -------------------------------------------------------------------
131: Set the linear solvers for the subblocks
132: ------------------------------------------------------------------- */
134: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
135: Basic method, should be sufficient for the needs of most users.
136: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
138: By default, the block Jacobi method uses the same solver on each
139: block of the problem. To set the same solver options on all blocks,
140: use the prefix -sub before the usual PC and KSP options, e.g.,
141: -sub_pc_type <pc> -sub_ksp_type <ksp> -sub_ksp_rtol 1.e-4
142: */
144: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
145: Advanced method, setting different solvers for various blocks.
146: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
148: Note that each block's SLES context is completely independent of
149: the others, and the full range of uniprocessor SLES options is
150: available for each block. The following section of code is intended
151: to be a simple illustration of setting different linear solvers for
152: the individual blocks. These choices are obviously not recommended
153: for solving this particular problem.
154: */
155: PetscTypeCompare((PetscObject)pc,PCBJACOBI,&isbjacobi);
156: if (isbjacobi) {
157: /*
158: Call SLESSetUp() to set the block Jacobi data structures (including
159: creation of an internal SLES context for each block).
161: Note: SLESSetUp() MUST be called before PCBJacobiGetSubSLES().
162: */
163: SLESSetUp(sles,x,b);
165: /*
166: Extract the array of SLES contexts for the local blocks
167: */
168: PCBJacobiGetSubSLES(pc,&nlocal,&first,&subsles);
170: /*
171: Loop over the local blocks, setting various SLES options
172: for each block.
173: */
174: for (i=0; i<nlocal; i++) {
175: SLESGetPC(subsles[i],&subpc);
176: SLESGetKSP(subsles[i],&subksp);
177: if (!rank) {
178: if (i%2) {
179: PCSetType(subpc,PCILU);
180: } else {
181: PCSetType(subpc,PCNONE);
182: KSPSetType(subksp,KSPBCGS);
183: KSPSetTolerances(subksp,1.e-6,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
184: }
185: } else {
186: PCSetType(subpc,PCJACOBI);
187: KSPSetType(subksp,KSPGMRES);
188: KSPSetTolerances(subksp,1.e-7,PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT);
189: }
190: }
191: }
193: /* -------------------------------------------------------------------
194: Solve the linear system
195: ------------------------------------------------------------------- */
197: /*
198: Set runtime options
199: */
200: SLESSetFromOptions(sles);
202: /*
203: Solve the linear system
204: */
205: SLESSolve(sles,b,x,&its);
207: /*
208: View info about the solver
209: */
210: PetscOptionsHasName(PETSC_NULL,"-noslesview",&flg);
211: if (!flg) {
212: SLESView(sles,PETSC_VIEWER_STDOUT_WORLD);
213: }
215: /* -------------------------------------------------------------------
216: Check solution and clean up
217: ------------------------------------------------------------------- */
219: /*
220: Check the error
221: */
222: VecAXPY(&none,u,x);
223: VecNorm(x,NORM_2,&norm);
224: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A iterations %dn",norm,its);
226: /*
227: Free work space. All PETSc objects should be destroyed when they
228: are no longer needed.
229: */
230: SLESDestroy(sles);
231: VecDestroy(u); VecDestroy(x);
232: VecDestroy(b); MatDestroy(A);
233: PetscFinalize();
234: return 0;
235: }