Actual source code: ex14.c
1: /*$Id: ex14.c,v 1.33 2001/08/07 21:30:50 bsmith Exp $*/
3: /* Program usage: mpirun -np <procs> ex14 [-help] [all PETSc options] */
5: static char help[] = "Solves a nonlinear system in parallel with a user-defined Newton method.n
6: Uses SLES to solve the linearized Newton sytems. This solvern
7: is a very simplistic inexact Newton method. The intent of this code is ton
8: demonstrate the repeated solution of linear sytems with the same nonzero pattern.n
9: n
10: This is NOT the recommended approach for solving nonlinear problems with PETSc!n
11: We urge users to employ the SNES component for solving nonlinear problems whenevern
12: possible, as it offers many advantages over coding nonlinear solvers independently.n
13: n
14: We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangularn
15: domain, using distributed arrays (DAs) to partition the parallel grid.n
16: The command line options include:n
17: -par <parameter>, where <parameter> indicates the problem's nonlinearityn
18: problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)n
19: -mx <xg>, where <xg> = number of grid points in the x-directionn
20: -my <yg>, where <yg> = number of grid points in the y-directionn
21: -Nx <npx>, where <npx> = number of processors in the x-directionn
22: -Ny <npy>, where <npy> = number of processors in the y-directionnn";
24: /*T
25: Concepts: SLES^writing a user-defined nonlinear solver (parallel Bratu example);
26: Concepts: DA^using distributed arrays;
27: Processors: n
28: T*/
30: /* ------------------------------------------------------------------------
32: Solid Fuel Ignition (SFI) problem. This problem is modeled by
33: the partial differential equation
34:
35: -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
36:
37: with boundary conditions
38:
39: u = 0 for x = 0, x = 1, y = 0, y = 1.
40:
41: A finite difference approximation with the usual 5-point stencil
42: is used to discretize the boundary value problem to obtain a nonlinear
43: system of equations.
45: The SNES version of this problem is: snes/examples/tutorials/ex5.c
46: We urge users to employ the SNES component for solving nonlinear
47: problems whenever possible, as it offers many advantages over coding
48: nonlinear solvers independently.
50: ------------------------------------------------------------------------- */
52: /*
53: Include "petscda.h" so that we can use distributed arrays (DAs).
54: Include "petscsles.h" so that we can use SLES solvers. Note that this
55: file automatically includes:
56: petsc.h - base PETSc routines petscvec.h - vectors
57: petscsys.h - system routines petscmat.h - matrices
58: petscis.h - index sets petscksp.h - Krylov subspace methods
59: petscviewer.h - viewers petscpc.h - preconditioners
60: */
61: #include petscda.h
62: #include petscsles.h
64: /*
65: User-defined application context - contains data needed by the
66: application-provided call-back routines, ComputeJacobian() and
67: ComputeFunction().
68: */
69: typedef struct {
70: PetscReal param; /* test problem parameter */
71: int mx,my; /* discretization in x,y directions */
72: Vec localX,localF; /* ghosted local vector */
73: DA da; /* distributed array data structure */
74: int rank; /* processor rank */
75: } AppCtx;
77: /*
78: User-defined routines
79: */
80: extern int ComputeFunction(AppCtx*,Vec,Vec),FormInitialGuess(AppCtx*,Vec);
81: extern int ComputeJacobian(AppCtx*,Vec,Mat,MatStructure*);
83: int main(int argc,char **argv)
84: {
85: /* -------------- Data to define application problem ---------------- */
86: MPI_Comm comm; /* communicator */
87: SLES sles; /* linear solver */
88: Vec X,Y,F; /* solution, update, residual vectors */
89: Mat J; /* Jacobian matrix */
90: AppCtx user; /* user-defined work context */
91: int Nx,Ny; /* number of preocessors in x- and y- directions */
92: int size; /* number of processors */
93: PetscReal bratu_lambda_max = 6.81,bratu_lambda_min = 0.;
94: int m,N,ierr;
96: /* --------------- Data to define nonlinear solver -------------- */
97: PetscReal rtol = 1.e-8; /* relative convergence tolerance */
98: PetscReal xtol = 1.e-8; /* step convergence tolerance */
99: PetscReal ttol; /* convergence tolerance */
100: PetscReal fnorm,ynorm,xnorm; /* various vector norms */
101: int max_nonlin_its = 10; /* maximum number of iterations for nonlinear solver */
102: int max_functions = 50; /* maximum number of function evaluations */
103: int lin_its; /* number of linear solver iterations for each step */
104: int i; /* nonlinear solve iteration number */
105: MatStructure mat_flag; /* flag indicating structure of preconditioner matrix */
106: PetscTruth no_output; /* flag indicating whether to surpress output */
107: PetscScalar mone = -1.0;
109: PetscInitialize(&argc,&argv,(char *)0,help);
110: comm = PETSC_COMM_WORLD;
111: MPI_Comm_rank(comm,&user.rank);
112: PetscOptionsHasName(PETSC_NULL,"-no_output",&no_output);
114: /*
115: Initialize problem parameters
116: */
117: user.mx = 4; user.my = 4; user.param = 6.0;
118: PetscOptionsGetInt(PETSC_NULL,"-mx",&user.mx,PETSC_NULL);
119: PetscOptionsGetInt(PETSC_NULL,"-my",&user.my,PETSC_NULL);
120: PetscOptionsGetReal(PETSC_NULL,"-par",&user.param,PETSC_NULL);
121: if (user.param >= bratu_lambda_max || user.param <= bratu_lambda_min) {
122: SETERRQ(1,"Lambda is out of range");
123: }
124: N = user.mx*user.my;
126: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
127: Create linear solver context
128: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
130: SLESCreate(comm,&sles);
132: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
133: Create vector data structures
134: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
136: /*
137: Create distributed array (DA) to manage parallel grid and vectors
138: */
139: MPI_Comm_size(comm,&size);
140: Nx = PETSC_DECIDE; Ny = PETSC_DECIDE;
141: PetscOptionsGetInt(PETSC_NULL,"-Nx",&Nx,PETSC_NULL);
142: PetscOptionsGetInt(PETSC_NULL,"-Ny",&Ny,PETSC_NULL);
143: if (Nx*Ny != size && (Nx != PETSC_DECIDE || Ny != PETSC_DECIDE))
144: SETERRQ(1,"Incompatible number of processors: Nx * Ny != size");
145: DACreate2d(comm,DA_NONPERIODIC,DA_STENCIL_STAR,user.mx,
146: user.my,Nx,Ny,1,1,PETSC_NULL,PETSC_NULL,&user.da);
148: /*
149: Extract global and local vectors from DA; then duplicate for remaining
150: vectors that are the same types
151: */
152: DACreateGlobalVector(user.da,&X);
153: DACreateLocalVector(user.da,&user.localX);
154: VecDuplicate(X,&F);
155: VecDuplicate(X,&Y);
156: VecDuplicate(user.localX,&user.localF);
159: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160: Create matrix data structure for Jacobian
161: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
162: /*
163: Note: For the parallel case, vectors and matrices MUST be partitioned
164: accordingly. When using distributed arrays (DAs) to create vectors,
165: the DAs determine the problem partitioning. We must explicitly
166: specify the local matrix dimensions upon its creation for compatibility
167: with the vector distribution. Thus, the generic MatCreate() routine
168: is NOT sufficient when working with distributed arrays.
170: Note: Here we only approximately preallocate storage space for the
171: Jacobian. See the users manual for a discussion of better techniques
172: for preallocating matrix memory.
173: */
174: if (size == 1) {
175: MatCreateSeqAIJ(comm,N,N,5,PETSC_NULL,&J);
176: } else {
177: VecGetLocalSize(X,&m);
178: MatCreateMPIAIJ(comm,m,m,N,N,5,PETSC_NULL,3,PETSC_NULL,&J);
179: }
181: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
182: Customize linear solver; set runtime options
183: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
185: /*
186: Set runtime options (e.g.,-ksp_monitor -ksp_rtol <rtol> -ksp_type <type>)
187: */
188: SLESSetFromOptions(sles);
190: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
191: Evaluate initial guess
192: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
194: FormInitialGuess(&user,X);
195: ComputeFunction(&user,X,F); /* Compute F(X) */
196: VecNorm(F,NORM_2,&fnorm); /* fnorm = || F || */
197: ttol = fnorm*rtol;
198: if (!no_output) PetscPrintf(comm,"Initial function norm = %gn",fnorm);
200: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
201: Solve nonlinear system with a user-defined method
202: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
204: /*
205: This solver is a very simplistic inexact Newton method, with no
206: no damping strategies or bells and whistles. The intent of this code
207: is merely to demonstrate the repeated solution with SLES of linear
208: sytems with the same nonzero structure.
210: This is NOT the recommended approach for solving nonlinear problems
211: with PETSc! We urge users to employ the SNES component for solving
212: nonlinear problems whenever possible with application codes, as it
213: offers many advantages over coding nonlinear solvers independently.
214: */
216: for (i=0; i<max_nonlin_its; i++) {
218: /*
219: Compute the Jacobian matrix. See the comments in this routine for
220: important information about setting the flag mat_flag.
221: */
222: ComputeJacobian(&user,X,J,&mat_flag);
224: /*
225: Solve J Y = F, where J is the Jacobian matrix.
226: - First, set the SLES linear operators. Here the matrix that
227: defines the linear system also serves as the preconditioning
228: matrix.
229: - Then solve the Newton system.
230: */
231: SLESSetOperators(sles,J,J,mat_flag);
232: SLESSolve(sles,F,Y,&lin_its);
234: /*
235: Compute updated iterate
236: */
237: VecNorm(Y,NORM_2,&ynorm); /* ynorm = || Y || */
238: VecAYPX(&mone,X,Y); /* Y <- X - Y */
239: VecCopy(Y,X); /* X <- Y */
240: VecNorm(X,NORM_2,&xnorm); /* xnorm = || X || */
241: if (!no_output) {
242: PetscPrintf(comm," linear solve iterations = %d, xnorm=%g, ynorm=%gn",lin_its,xnorm,ynorm);
243: }
245: /*
246: Evaluate new nonlinear function
247: */
248: ComputeFunction(&user,X,F); /* Compute F(X) */
249: VecNorm(F,NORM_2,&fnorm); /* fnorm = || F || */
250: if (!no_output) {
251: PetscPrintf(comm,"Iteration %d, function norm = %gn",i+1,fnorm);
252: }
254: /*
255: Test for convergence
256: */
257: if (fnorm <= ttol) {
258: if (!no_output) {
259: PetscPrintf(comm,"Converged due to function norm %g < %g (relative tolerance)n",fnorm,ttol);
260: }
261: break;
262: }
263: if (ynorm < xtol*(xnorm)) {
264: if (!no_output) {
265: PetscPrintf(comm,"Converged due to small update length: %g < %g * %gn",ynorm,xtol,xnorm);
266: }
267: break;
268: }
269: if (i > max_functions) {
270: if (!no_output) {
271: PetscPrintf(comm,"Exceeded maximum number of function evaluations: %d > %dn",i,max_functions);
272: }
273: break;
274: }
275: }
276: PetscPrintf(comm,"Number of Newton iterations = %dn",i+1);
278: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
279: Free work space. All PETSc objects should be destroyed when they
280: are no longer needed.
281: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
283: MatDestroy(J); VecDestroy(Y);
284: VecDestroy(user.localX); VecDestroy(X);
285: VecDestroy(user.localF); VecDestroy(F);
286: SLESDestroy(sles); DADestroy(user.da);
287: PetscFinalize();
289: return 0;
290: }
291: /* ------------------------------------------------------------------- */
292: /*
293: FormInitialGuess - Forms initial approximation.
295: Input Parameters:
296: user - user-defined application context
297: X - vector
299: Output Parameter:
300: X - vector
301: */
302: int FormInitialGuess(AppCtx *user,Vec X)
303: {
304: int i,j,row,mx,my,ierr,xs,ys,xm,ym,gxm,gym,gxs,gys;
305: PetscReal one = 1.0,lambda,temp1,temp,hx,hy;
306: PetscScalar *x;
307: Vec localX = user->localX;
309: mx = user->mx; my = user->my; lambda = user->param;
310: hx = one/(PetscReal)(mx-1); hy = one/(PetscReal)(my-1);
311: temp1 = lambda/(lambda + one);
313: /*
314: Get a pointer to vector data.
315: - For default PETSc vectors, VecGetArray() returns a pointer to
316: the data array. Otherwise, the routine is implementation dependent.
317: - You MUST call VecRestoreArray() when you no longer need access to
318: the array.
319: */
320: VecGetArray(localX,&x);
322: /*
323: Get local grid boundaries (for 2-dimensional DA):
324: xs, ys - starting grid indices (no ghost points)
325: xm, ym - widths of local grid (no ghost points)
326: gxs, gys - starting grid indices (including ghost points)
327: gxm, gym - widths of local grid (including ghost points)
328: */
329: DAGetCorners(user->da,&xs,&ys,PETSC_NULL,&xm,&ym,PETSC_NULL);
330: DAGetGhostCorners(user->da,&gxs,&gys,PETSC_NULL,&gxm,&gym,PETSC_NULL);
332: /*
333: Compute initial guess over the locally owned part of the grid
334: */
335: for (j=ys; j<ys+ym; j++) {
336: temp = (PetscReal)(PetscMin(j,my-j-1))*hy;
337: for (i=xs; i<xs+xm; i++) {
338: row = i - gxs + (j - gys)*gxm;
339: if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
340: x[row] = 0.0;
341: continue;
342: }
343: x[row] = temp1*sqrt(PetscMin((PetscReal)(PetscMin(i,mx-i-1))*hx,temp));
344: }
345: }
347: /*
348: Restore vector
349: */
350: VecRestoreArray(localX,&x);
352: /*
353: Insert values into global vector
354: */
355: DALocalToGlobal(user->da,localX,INSERT_VALUES,X);
356: return 0;
357: }
358: /* ------------------------------------------------------------------- */
359: /*
360: ComputeFunction - Evaluates nonlinear function, F(x).
362: Input Parameters:
363: . X - input vector
364: . user - user-defined application context
366: Output Parameter:
367: . F - function vector
368: */
369: int ComputeFunction(AppCtx *user,Vec X,Vec F)
370: {
371: int ierr,i,j,row,mx,my,xs,ys,xm,ym,gxs,gys,gxm,gym;
372: PetscReal two = 2.0,one = 1.0,lambda,hx,hy,hxdhy,hydhx,sc;
373: PetscScalar u,uxx,uyy,*x,*f;
374: Vec localX = user->localX,localF = user->localF;
376: mx = user->mx; my = user->my; lambda = user->param;
377: hx = one/(PetscReal)(mx-1); hy = one/(PetscReal)(my-1);
378: sc = hx*hy*lambda; hxdhy = hx/hy; hydhx = hy/hx;
380: /*
381: Scatter ghost points to local vector, using the 2-step process
382: DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
383: By placing code between these two statements, computations can be
384: done while messages are in transition.
385: */
386: DAGlobalToLocalBegin(user->da,X,INSERT_VALUES,localX);
387: DAGlobalToLocalEnd(user->da,X,INSERT_VALUES,localX);
389: /*
390: Get pointers to vector data
391: */
392: VecGetArray(localX,&x);
393: VecGetArray(localF,&f);
395: /*
396: Get local grid boundaries
397: */
398: DAGetCorners(user->da,&xs,&ys,PETSC_NULL,&xm,&ym,PETSC_NULL);
399: DAGetGhostCorners(user->da,&gxs,&gys,PETSC_NULL,&gxm,&gym,PETSC_NULL);
401: /*
402: Compute function over the locally owned part of the grid
403: */
404: for (j=ys; j<ys+ym; j++) {
405: row = (j - gys)*gxm + xs - gxs - 1;
406: for (i=xs; i<xs+xm; i++) {
407: row++;
408: if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
409: f[row] = x[row];
410: continue;
411: }
412: u = x[row];
413: uxx = (two*u - x[row-1] - x[row+1])*hydhx;
414: uyy = (two*u - x[row-gxm] - x[row+gxm])*hxdhy;
415: f[row] = uxx + uyy - sc*PetscExpScalar(u);
416: }
417: }
419: /*
420: Restore vectors
421: */
422: VecRestoreArray(localX,&x);
423: VecRestoreArray(localF,&f);
425: /*
426: Insert values into global vector
427: */
428: DALocalToGlobal(user->da,localF,INSERT_VALUES,F);
429: PetscLogFlops(11*ym*xm);
430: return 0;
431: }
432: /* ------------------------------------------------------------------- */
433: /*
434: ComputeJacobian - Evaluates Jacobian matrix.
436: Input Parameters:
437: . x - input vector
438: . user - user-defined application context
440: Output Parameters:
441: . jac - Jacobian matrix
442: . flag - flag indicating matrix structure
444: Notes:
445: Due to grid point reordering with DAs, we must always work
446: with the local grid points, and then transform them to the new
447: global numbering with the "ltog" mapping (via DAGetGlobalIndices()).
448: We cannot work directly with the global numbers for the original
449: uniprocessor grid!
450: */
451: int ComputeJacobian(AppCtx *user,Vec X,Mat jac,MatStructure *flag)
452: {
453: Vec localX = user->localX; /* local vector */
454: int *ltog; /* local-to-global mapping */
455: int ierr,i,j,row,mx,my,col[5];
456: int nloc,xs,ys,xm,ym,gxs,gys,gxm,gym,grow;
457: PetscScalar two = 2.0,one = 1.0,lambda,v[5],hx,hy,hxdhy,hydhx,sc,*x;
459: mx = user->mx; my = user->my; lambda = user->param;
460: hx = one/(PetscReal)(mx-1); hy = one/(PetscReal)(my-1);
461: sc = hx*hy; hxdhy = hx/hy; hydhx = hy/hx;
463: /*
464: Scatter ghost points to local vector, using the 2-step process
465: DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
466: By placing code between these two statements, computations can be
467: done while messages are in transition.
468: */
469: DAGlobalToLocalBegin(user->da,X,INSERT_VALUES,localX);
470: DAGlobalToLocalEnd(user->da,X,INSERT_VALUES,localX);
472: /*
473: Get pointer to vector data
474: */
475: VecGetArray(localX,&x);
477: /*
478: Get local grid boundaries
479: */
480: DAGetCorners(user->da,&xs,&ys,PETSC_NULL,&xm,&ym,PETSC_NULL);
481: DAGetGhostCorners(user->da,&gxs,&gys,PETSC_NULL,&gxm,&gym,PETSC_NULL);
483: /*
484: Get the global node numbers for all local nodes, including ghost points
485: */
486: DAGetGlobalIndices(user->da,&nloc,<og);
488: /*
489: Compute entries for the locally owned part of the Jacobian.
490: - Currently, all PETSc parallel matrix formats are partitioned by
491: contiguous chunks of rows across the processors. The "grow"
492: parameter computed below specifies the global row number
493: corresponding to each local grid point.
494: - Each processor needs to insert only elements that it owns
495: locally (but any non-local elements will be sent to the
496: appropriate processor during matrix assembly).
497: - Always specify global row and columns of matrix entries.
498: - Here, we set all entries for a particular row at once.
499: */
500: for (j=ys; j<ys+ym; j++) {
501: row = (j - gys)*gxm + xs - gxs - 1;
502: for (i=xs; i<xs+xm; i++) {
503: row++;
504: grow = ltog[row];
505: /* boundary points */
506: if (i == 0 || j == 0 || i == mx-1 || j == my-1) {
507: MatSetValues(jac,1,&grow,1,&grow,&one,INSERT_VALUES);
508: continue;
509: }
510: /* interior grid points */
511: v[0] = -hxdhy; col[0] = ltog[row - gxm];
512: v[1] = -hydhx; col[1] = ltog[row - 1];
513: v[2] = two*(hydhx + hxdhy) - sc*lambda*PetscExpScalar(x[row]); col[2] = grow;
514: v[3] = -hydhx; col[3] = ltog[row + 1];
515: v[4] = -hxdhy; col[4] = ltog[row + gxm];
516: MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);
517: }
518: }
520: /*
521: Assemble matrix, using the 2-step process:
522: MatAssemblyBegin(), MatAssemblyEnd().
523: By placing code between these two statements, computations can be
524: done while messages are in transition.
525: */
526: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
527: VecRestoreArray(localX,&x);
528: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
530: /*
531: Set flag to indicate that the Jacobian matrix retains an identical
532: nonzero structure throughout all nonlinear iterations (although the
533: values of the entries change). Thus, we can save some work in setting
534: up the preconditioner (e.g., no need to redo symbolic factorization for
535: ILU/ICC preconditioners).
536: - If the nonzero structure of the matrix is different during
537: successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
538: must be used instead. If you are unsure whether the matrix
539: structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
540: - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
541: believes your assertion and does not check the structure
542: of the matrix. If you erroneously claim that the structure
543: is the same when it actually is not, the new preconditioner
544: will not function correctly. Thus, use this optimization
545: feature with caution!
546: */
547: *flag = SAME_NONZERO_PATTERN;
548: return 0;
549: }