Actual source code: ex2.c
1: /*$Id: ex2.c,v 1.94 2001/08/07 21:30:54 bsmith Exp $*/
3: /* Program usage: mpirun -np <procs> ex2 [-help] [all PETSc options] */
5: static char help[] = "Solves a linear system in parallel with SLES.n
6: Input parameters include:n
7: -random_exact_sol : use a random exact solution vectorn
8: -view_exact_sol : write exact solution vector to stdoutn
9: -m <mesh_x> : number of mesh points in x-directionn
10: -n <mesh_n> : number of mesh points in y-directionnn";
12: /*T
13: Concepts: SLES^basic parallel example;
14: Concepts: SLES^Laplacian, 2d
15: Concepts: Laplacian, 2d
16: Processors: n
17: T*/
19: /*
20: Include "petscsles.h" so that we can use SLES solvers. Note that this file
21: automatically includes:
22: petsc.h - base PETSc routines petscvec.h - vectors
23: petscsys.h - system routines petscmat.h - matrices
24: petscis.h - index sets petscksp.h - Krylov subspace methods
25: petscviewer.h - viewers petscpc.h - preconditioners
26: */
27: #include petscsles.h
29: int main(int argc,char **args)
30: {
31: Vec x,b,u; /* approx solution, RHS, exact solution */
32: Mat A; /* linear system matrix */
33: SLES sles; /* linear solver context */
34: PetscRandom rctx; /* random number generator context */
35: PetscReal norm; /* norm of solution error */
36: int i,j,I,J,Istart,Iend,ierr,m = 8,n = 7,its;
37: PetscTruth flg;
38: PetscScalar v,one = 1.0,neg_one = -1.0;
39: KSP ksp;
41: PetscInitialize(&argc,&args,(char *)0,help);
42: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
43: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
45: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
46: Compute the matrix and right-hand-side vector that define
47: the linear system, Ax = b.
48: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
49: /*
50: Create parallel matrix, specifying only its global dimensions.
51: When using MatCreate(), the matrix format can be specified at
52: runtime. Also, the parallel partitioning of the matrix is
53: determined by PETSc at runtime.
55: Performance tuning note: For problems of substantial size,
56: preallocation of matrix memory is crucial for attaining good
57: performance. Since preallocation is not possible via the generic
58: matrix creation routine MatCreate(), we recommend for practical
59: problems instead to use the creation routine for a particular matrix
60: format, e.g.,
61: MatCreateMPIAIJ() - parallel AIJ (compressed sparse row)
62: MatCreateMPIBAIJ() - parallel block AIJ
63: See the matrix chapter of the users manual for details.
64: */
65: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&A);
66: MatSetFromOptions(A);
68: /*
69: Currently, all PETSc parallel matrix formats are partitioned by
70: contiguous chunks of rows across the processors. Determine which
71: rows of the matrix are locally owned.
72: */
73: MatGetOwnershipRange(A,&Istart,&Iend);
75: /*
76: Set matrix elements for the 2-D, five-point stencil in parallel.
77: - Each processor needs to insert only elements that it owns
78: locally (but any non-local elements will be sent to the
79: appropriate processor during matrix assembly).
80: - Always specify global rows and columns of matrix entries.
82: Note: this uses the less common natural ordering that orders first
83: all the unknowns for x = h then for x = 2h etc; Hence you see J = I +- n
84: instead of J = I +- m as you might expect. The more standard ordering
85: would first do all variables for y = h, then y = 2h etc.
87: */
88: for (I=Istart; I<Iend; I++) {
89: v = -1.0; i = I/n; j = I - i*n;
90: if (i>0) {J = I - n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
91: if (i<m-1) {J = I + n; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
92: if (j>0) {J = I - 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
93: if (j<n-1) {J = I + 1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);}
94: v = 4.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
95: }
97: /*
98: Assemble matrix, using the 2-step process:
99: MatAssemblyBegin(), MatAssemblyEnd()
100: Computations can be done while messages are in transition
101: by placing code between these two statements.
102: */
103: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
104: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
106: /*
107: Create parallel vectors.
108: - We form 1 vector from scratch and then duplicate as needed.
109: - When using VecCreate(), VecSetSizes and VecSetFromOptions()
110: in this example, we specify only the
111: vector's global dimension; the parallel partitioning is determined
112: at runtime.
113: - When solving a linear system, the vectors and matrices MUST
114: be partitioned accordingly. PETSc automatically generates
115: appropriately partitioned matrices and vectors when MatCreate()
116: and VecCreate() are used with the same communicator.
117: - The user can alternatively specify the local vector and matrix
118: dimensions when more sophisticated partitioning is needed
119: (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
120: below).
121: */
122: VecCreate(PETSC_COMM_WORLD,&u);
123: VecSetSizes(u,PETSC_DECIDE,m*n);
124: VecSetFromOptions(u);
125: VecDuplicate(u,&b);
126: VecDuplicate(b,&x);
128: /*
129: Set exact solution; then compute right-hand-side vector.
130: By default we use an exact solution of a vector with all
131: elements of 1.0; Alternatively, using the runtime option
132: -random_sol forms a solution vector with random components.
133: */
134: PetscOptionsHasName(PETSC_NULL,"-random_exact_sol",&flg);
135: if (flg) {
136: PetscRandomCreate(PETSC_COMM_WORLD,RANDOM_DEFAULT,&rctx);
137: VecSetRandom(rctx,u);
138: PetscRandomDestroy(rctx);
139: } else {
140: VecSet(&one,u);
141: }
142: MatMult(A,u,b);
144: /*
145: View the exact solution vector if desired
146: */
147: PetscOptionsHasName(PETSC_NULL,"-view_exact_sol",&flg);
148: if (flg) {VecView(u,PETSC_VIEWER_STDOUT_WORLD);}
150: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151: Create the linear solver and set various options
152: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
154: /*
155: Create linear solver context
156: */
157: SLESCreate(PETSC_COMM_WORLD,&sles);
159: /*
160: Set operators. Here the matrix that defines the linear system
161: also serves as the preconditioning matrix.
162: */
163: SLESSetOperators(sles,A,A,DIFFERENT_NONZERO_PATTERN);
165: /*
166: Set linear solver defaults for this problem (optional).
167: - By extracting the KSP and PC contexts from the SLES context,
168: we can then directly call any KSP and PC routines to set
169: various options.
170: - The following two statements are optional; all of these
171: parameters could alternatively be specified at runtime via
172: SLESSetFromOptions(). All of these defaults can be
173: overridden at runtime, as indicated below.
174: */
176: SLESGetKSP(sles,&ksp);
177: KSPSetTolerances(ksp,1.e-2/((m+1)*(n+1)),1.e-50,PETSC_DEFAULT,PETSC_DEFAULT);
179: /*
180: Set runtime options, e.g.,
181: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
182: These options will override those specified above as long as
183: SLESSetFromOptions() is called _after_ any other customization
184: routines.
185: */
186: SLESSetFromOptions(sles);
188: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189: Solve the linear system
190: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
192: SLESSolve(sles,b,x,&its);
194: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195: Check solution and clean up
196: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
198: /*
199: Check the error
200: */
201: VecAXPY(&neg_one,u,x);
202: VecNorm(x,NORM_2,&norm);
204: /* Scale the norm */
205: /* norm *= sqrt(1.0/((m+1)*(n+1))); */
207: /*
208: Print convergence information. PetscPrintf() produces a single
209: print statement from all processes that share a communicator.
210: An alternative is PetscFPrintf(), which prints to a file.
211: */
212: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %A iterations %dn",norm,its);
214: /*
215: Free work space. All PETSc objects should be destroyed when they
216: are no longer needed.
217: */
218: SLESDestroy(sles);
219: VecDestroy(u); VecDestroy(x);
220: VecDestroy(b); MatDestroy(A);
222: /*
223: Always call PetscFinalize() before exiting a program. This routine
224: - finalizes the PETSc libraries as well as MPI
225: - provides summary and diagnostic information if certain runtime
226: options are chosen (e.g., -log_summary).
227: */
228: PetscFinalize();
229: return 0;
230: }