Actual source code: ex4.c
1: /*$Id: ex4.c,v 1.53 2001/08/07 03:04:00 balay Exp $*/
3: static char help[] = "Uses a different preconditioner matrix and linear system matrix in the SLES solvers.n
4: Note that different storage formatsn
5: can be used for the different matrices.nn";
7: /*T
8: Concepts: SLES^different matrices for linear system and preconditioner;
9: Processors: n
10: T*/
12: /*
13: Include "petscsles.h" so that we can use SLES solvers. Note that this file
14: automatically includes:
15: petsc.h - base PETSc routines petscvec.h - vectors
16: petscsys.h - system routines petscmat.h - matrices
17: petscis.h - index sets petscksp.h - Krylov subspace methods
18: petscviewer.h - viewers petscpc.h - preconditioners
19: */
20: #include petscsles.h
22: int main(int argc,char **args)
23: {
24: SLES sles; /* linear solver context */
25: Mat A,B; /* linear system matrix, preconditioning matrix */
26: PetscRandom rctx; /* random number generator context */
27: Vec x,b,u; /* approx solution, RHS, exact solution */
28: Vec tmp; /* work vector */
29: PetscScalar v,one = 1.0,scale = 0.0;
30: int i,j,m = 15,n = 17,its,I,J,ierr,Istart,Iend;
32: PetscInitialize(&argc,&args,(char *)0,help);
33: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
34: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
35: PetscOptionsGetScalar(PETSC_NULL,"-scale",&scale,PETSC_NULL);
37: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
38: Compute the matrix and right-hand-side vector that define
39: the linear system,Ax = b. Also, create a different
40: preconditioner matrix.
41: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
43: /*
44: Create the linear system matrix (A).
45: - Here we use a block diagonal matrix format (MATMPIBDIAG) and
46: specify only the global size. The parallel partitioning of
47: the matrix will be determined at runtime by PETSc.
48: */
49: MatCreateMPIBDiag(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,m*n,
50: 0,1,PETSC_NULL,PETSC_NULL,&A);
52: /*
53: Create a different preconditioner matrix (B). This is usually
54: done to form a cheaper (or sparser) preconditioner matrix
55: compared to the linear system matrix.
56: - Here we use MatCreate(), so that the matrix format and
57: parallel partitioning will be determined at runtime.
58: */
59: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,&B);
60: MatSetFromOptions(B);
62: /*
63: Currently, all PETSc parallel matrix formats are partitioned by
64: contiguous chunks of rows across the processors. Determine which
65: rows of the matrix are locally owned.
66: */
67: MatGetOwnershipRange(A,&Istart,&Iend);
69: /*
70: Set entries within the two matrices
71: */
72: for (I=Istart; I<Iend; I++) {
73: v = -1.0; i = I/n; j = I - i*n;
74: if (i>0) {
75: J=I-n;
76: MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
77: MatSetValues(B,1,&I,1,&J,&v,INSERT_VALUES);
78: }
79: if (i<m-1) {
80: J=I+n;
81: MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
82: MatSetValues(B,1,&I,1,&J,&v,INSERT_VALUES);
83: }
84: if (j>0) {
85: J=I-1;
86: MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
87: }
88: if (j<n-1) {
89: J=I+1;
90: MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
91: }
92: v = 5.0; MatSetValues(A,1,&I,1,&I,&v,INSERT_VALUES);
93: v = 3.0; MatSetValues(B,1,&I,1,&I,&v,INSERT_VALUES);
94: }
96: /*
97: Assemble the preconditioner matrix (B), using the 2-step process
98: MatAssemblyBegin(), MatAssemblyEnd()
99: Note that computations can be done while messages are in
100: transition by placing code between these two statements.
101: */
102: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
103: for (I=Istart; I<Iend; I++) {
104: v = -0.5; i = I/n;
105: if (i>1) {
106: J=I-(n+1); MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
107: }
108: if (i<m-2) {
109: J=I+n+1; MatSetValues(A,1,&I,1,&J,&v,INSERT_VALUES);
110: }
111: }
112: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
114: /*
115: Assemble the linear system matrix, (A)
116: */
117: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
118: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
120: /*
121: Create parallel vectors.
122: - When using VecSeSizes(), we specify only the vector's global
123: dimension; the parallel partitioning is determined at runtime.
124: - Note: We form 1 vector from scratch and then duplicate as needed.
125: */
126: VecCreate(PETSC_COMM_WORLD,&b);
127: VecSetSizes(b,PETSC_DECIDE,m*n);
128: VecSetFromOptions(b);
129: VecDuplicate(b,&u);
130: VecDuplicate(b,&x);
132: /*
133: Make solution vector be 1 to random noise
134: */
135: VecSet(&one,u);
136: VecDuplicate(u,&tmp);
137: PetscRandomCreate(PETSC_COMM_WORLD,RANDOM_DEFAULT,&rctx);
138: VecSetRandom(rctx,tmp);
139: PetscRandomDestroy(rctx);
140: VecAXPY(&scale,tmp,u);
141: VecDestroy(tmp);
143: /*
144: Compute right-hand-side vector
145: */
146: MatMult(A,u,b);
148: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149: Create the linear solver and set various options
150: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
152: /*
153: Create linear solver context
154: */
155: SLESCreate(PETSC_COMM_WORLD,&sles);
157: /*
158: Set operators. Note that we use different matrices to define the
159: linear system and to precondition it.
160: */
161: SLESSetOperators(sles,A,B,DIFFERENT_NONZERO_PATTERN);
163: /*
164: Set runtime options (e.g., -ksp_type <type> -pc_type <type>)
165: */
166: SLESSetFromOptions(sles);
168: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
169: Solve the linear system
170: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
172: SLESSolve(sles,b,x,&its);
174: /*
175: Free work space. All PETSc objects should be destroyed when they
176: are no longer needed.
177: */
178: SLESDestroy(sles); VecDestroy(u);
179: MatDestroy(B); VecDestroy(x);
180: MatDestroy(A); VecDestroy(b);
182: /*
183: Always call PetscFinalize() before exiting a program. This routine
184: - finalizes the PETSc libraries as well as MPI
185: - provides summary and diagnostic information if certain runtime
186: options are chosen (e.g., -log_summary).
187: */
188: PetscFinalize();
189: return 0;
190: }