Actual source code: ex5f90.F
1: !
2: ! Description: Solves a nonlinear system in parallel with SNES.
3: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
4: ! domain, using distributed arrays (DAs) to partition the parallel grid.
5: ! The command line options include:
6: ! -par <parameter>, where <parameter> indicates the nonlinearity of the problem
7: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
8: !
9: !/*T
10: ! Concepts: SNES^parallel Bratu example
11: ! Concepts: DA^using distributed arrays;
12: ! Processors: n
13: !T*/
14: !
15: ! --------------------------------------------------------------------------
16: !
17: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
18: ! the partial differential equation
19: !
20: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
21: !
22: ! with boundary conditions
23: !
24: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
25: !
26: ! A finite difference approximation with the usual 5-point stencil
27: ! is used to discretize the boundary value problem to obtain a nonlinear
28: ! system of equations.
29: !
30: ! The uniprocessor version of this code is snes/examples/tutorials/ex4f.F
31: !
32: ! --------------------------------------------------------------------------
33: ! The following define must be used before including any PETSc include files
34: ! into a module or interface. This is because they can't handle declarations
35: ! in them
36: !
38: module f90module
39: type userctx
40: #define PETSC_AVOID_DECLARATIONS
41: #include include/finclude/petsc.h
42: #include include/finclude/petscvec.h
43: #include include/finclude/petscda.h
44: #undef PETSC_AVOID_DECLARATIONS
45: DA da
46: integer xs,xe,xm,gxs,gxe,gxm
47: integer ys,ye,ym,gys,gye,gym
48: integer mx,my,rank
49: double precision lambda
50: end type userctx
51: contains
52: ! ---------------------------------------------------------------------
53: !
54: ! FormFunction - Evaluates nonlinear function, F(x).
55: !
56: ! Input Parameters:
57: ! snes - the SNES context
58: ! X - input vector
59: ! dummy - optional user-defined context, as set by SNESSetFunction()
60: ! (not used here)
61: !
62: ! Output Parameter:
63: ! F - function vector
64: !
65: ! Notes:
66: ! This routine serves as a wrapper for the lower-level routine
67: ! "FormFunctionLocal", where the actual computations are
68: ! done using the standard Fortran style of treating the local
69: ! vector data as a multidimensional array over the local mesh.
70: ! This routine merely handles ghost point scatters and accesses
71: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
72: !
73: subroutine FormFunction(snes,X,F,user,ierr)
74: implicit none
76: #include include/finclude/petsc.h
77: #include include/finclude/petscvec.h
78: #include include/finclude/petscda.h
79: #include include/finclude/petscis.h
80: #include include/finclude/petscmat.h
81: #include include/finclude/petscksp.h
82: #include include/finclude/petscpc.h
83: #include include/finclude/petscsnes.h
85: #include "include/finclude/petscvec.h90"
88: ! Input/output variables:
89: SNES snes
90: Vec X,F
91: integer ierr
92: type (userctx) user
94: ! Declarations for use with local arrays:
95: PetscScalar,pointer :: lx_v(:),lf_v(:)
96: Vec localX
98: ! Scatter ghost points to local vector, using the 2-step process
99: ! DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
100: ! By placing code between these two statements, computations can
101: ! be done while messages are in transition.
103: call DAGetLocalVector(user%da,localX,ierr)
104: call DAGlobalToLocalBegin(user%da,X,INSERT_VALUES, &
105: & localX,ierr)
106: call DAGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)
108: ! Get a pointer to vector data.
109: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
110: ! the data array. Otherwise, the routine is implementation dependent.
111: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
112: ! the array.
113: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
114: ! and is useable from Fortran-90 Only.
116: call VecGetArrayF90(localX,lx_v,ierr)
117: call VecGetArrayF90(F,lf_v,ierr)
119: ! Compute function over the locally owned part of the grid
121: call FormFunctionLocal(lx_v,lf_v,user,ierr)
123: ! Restore vectors
125: call VecRestoreArrayF90(localX,lx_v,ierr)
126: call VecRestoreArrayF90(F,lf_v,ierr)
128: ! Insert values into global vector
130: call DARestoreLocalVector(user%da,localX,ierr)
131: call PetscLogFlops(11*user%ym*user%xm,ierr)
133: ! call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
134: ! call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
136: return
137: end subroutine formfunction
138: end module f90module
142: program main
143: use f90module
144: implicit none
145: !
146: !
147: #include include/finclude/petsc.h
148: #include include/finclude/petscvec.h
149: #include include/finclude/petscda.h
150: #include include/finclude/petscis.h
151: #include include/finclude/petscmat.h
152: #include include/finclude/petscksp.h
153: #include include/finclude/petscpc.h
154: #include include/finclude/petscsnes.h
155: #include "include/finclude/petscvec.h90"
156: #include "include/finclude/petscda.h90"
158: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
159: ! Variable declarations
160: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
161: !
162: ! Variables:
163: ! snes - nonlinear solver
164: ! x, r - solution, residual vectors
165: ! J - Jacobian matrix
166: ! its - iterations for convergence
167: ! Nx, Ny - number of preocessors in x- and y- directions
168: ! matrix_free - flag - 1 indicates matrix-free version
169: !
170: !
171: SNES snes
172: Vec x,r
173: Mat J
174: integer its,matrix_free,flg,ierr
175: double precision lambda_max,lambda_min
176: type (userctx) user
178: ! Note: Any user-defined Fortran routines (such as FormJacobian)
179: ! MUST be declared as external.
181: external FormInitialGuess,FormJacobian
183: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
184: ! Initialize program
185: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
187: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
188: call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)
190: ! Initialize problem parameters
192: lambda_max = 6.81
193: lambda_min = 0.0
194: user%lambda = 6.0
195: call PetscOptionsGetReal(PETSC_NULL_CHARACTER,'-par', &
196: & user%lambda,flg,ierr)
197: if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) &
198: & then
199: if (user%rank .eq. 0) write(6,*) 'Lambda is out of range'
200: SETERRQ(1,' ',ierr)
201: endif
204: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
205: ! Create nonlinear solver context
206: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
208: call SNESCreate(PETSC_COMM_WORLD,snes,ierr)
210: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
211: ! Create vector data structures; set function evaluation routine
212: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
214: ! Create distributed array (DA) to manage parallel grid and vectors
216: ! This really needs only the star-type stencil, but we use the box
217: ! stencil temporarily.
218: call DACreate2d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_BOX, &
219: & -4,-4,PETSC_DECIDE,PETSC_DECIDE,1,1, &
220: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,user%da,ierr)
221: call DAGetInfo(user%da,PETSC_NULL_INTEGER,user%mx,user%my, &
222: & PETSC_NULL_INTEGER, &
223: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
224: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
225: & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
226: & PETSC_NULL_INTEGER,ierr)
227:
228: !
229: ! Visualize the distribution of the array across the processors
230: !
231: ! call DAView(user%da,PETSC_VIEWER_DRAW_WORLD,ierr)
233: ! Extract global and local vectors from DA; then duplicate for remaining
234: ! vectors that are the same types
236: call DACreateGlobalVector(user%da,x,ierr)
237: call VecDuplicate(x,r,ierr)
239: ! Get local grid boundaries (for 2-dimensional DA)
241: call DAGetCorners(user%da,user%xs,user%ys,PETSC_NULL_INTEGER, &
242: & user%xm,user%ym,PETSC_NULL_INTEGER,ierr)
243: call DAGetGhostCorners(user%da,user%gxs,user%gys, &
244: & PETSC_NULL_INTEGER,user%gxm,user%gym, &
245: & PETSC_NULL_INTEGER,ierr)
247: ! Here we shift the starting indices up by one so that we can easily
248: ! use the Fortran convention of 1-based indices (rather 0-based indices).
250: user%xs = user%xs+1
251: user%ys = user%ys+1
252: user%gxs = user%gxs+1
253: user%gys = user%gys+1
255: user%ye = user%ys+user%ym-1
256: user%xe = user%xs+user%xm-1
257: user%gye = user%gys+user%gym-1
258: user%gxe = user%gxs+user%gxm-1
260: ! Set function evaluation routine and vector
262: call SNESSetFunction(snes,r,FormFunction,user,ierr)
264: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
265: ! Create matrix data structure; set Jacobian evaluation routine
266: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
268: ! Set Jacobian matrix data structure and default Jacobian evaluation
269: ! routine. User can override with:
270: ! -snes_fd : default finite differencing approximation of Jacobian
271: ! -snes_mf : matrix-free Newton-Krylov method with no preconditioning
272: ! (unless user explicitly sets preconditioner)
273: ! -snes_mf_operator : form preconditioning matrix as set by the user,
274: ! but use matrix-free approx for Jacobian-vector
275: ! products within Newton-Krylov method
276: !
277: ! Note: For the parallel case, vectors and matrices MUST be partitioned
278: ! accordingly. When using distributed arrays (DAs) to create vectors,
279: ! the DAs determine the problem partitioning. We must explicitly
280: ! specify the local matrix dimensions upon its creation for compatibility
281: ! with the vector distribution. Thus, the generic MatCreate() routine
282: ! is NOT sufficient when working with distributed arrays.
283: !
284: ! Note: Here we only approximately preallocate storage space for the
285: ! Jacobian. See the users manual for a discussion of better techniques
286: ! for preallocating matrix memory.
288: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-snes_mf', &
289: & matrix_free,ierr)
290: if (matrix_free .eq. 0) then
291: call DAGetMatrix(user%da,MATAIJ,J,ierr)
292: call SNESSetJacobian(snes,J,J,FormJacobian,user,ierr)
293: endif
295: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
296: ! Customize nonlinear solver; set runtime options
297: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
299: ! Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
301: call SNESSetFromOptions(snes,ierr)
303: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
304: ! Evaluate initial guess; then solve nonlinear system.
305: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
307: ! Note: The user should initialize the vector, x, with the initial guess
308: ! for the nonlinear solver prior to calling SNESSolve(). In particular,
309: ! to employ an initial guess of zero, the user should explicitly set
310: ! this vector to zero by calling VecSet().
312: call FormInitialGuess(user,x,ierr)
313: call SNESSolve(snes,PETSC_NULL_OBJECT,x,ierr)
314: call SNESGetIterationNumber(snes,its,ierr);
315: if (user%rank .eq. 0) then
316: write(6,100) its
317: endif
318: 100 format('Number of Newton iterations = ',i5)
320: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
321: ! Free work space. All PETSc objects should be destroyed when they
322: ! are no longer needed.
323: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
325: if (matrix_free .eq. 0) call MatDestroy(J,ierr)
326: call VecDestroy(x,ierr)
327: call VecDestroy(r,ierr)
328: call SNESDestroy(snes,ierr)
329: call DADestroy(user%da,ierr)
330: call PetscFinalize(ierr)
331: end
333: ! ---------------------------------------------------------------------
334: !
335: ! FormInitialGuess - Forms initial approximation.
336: !
337: ! Input Parameters:
338: ! X - vector
339: !
340: ! Output Parameter:
341: ! X - vector
342: !
343: ! Notes:
344: ! This routine serves as a wrapper for the lower-level routine
345: ! "InitialGuessLocal", where the actual computations are
346: ! done using the standard Fortran style of treating the local
347: ! vector data as a multidimensional array over the local mesh.
348: ! This routine merely handles ghost point scatters and accesses
349: ! the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
350: !
351: subroutine FormInitialGuess(user,X,ierr)
352: use f90module
353: implicit none
355: #include "include/finclude/petscvec.h90"
356: #include include/finclude/petsc.h
357: #include include/finclude/petscvec.h
358: #include include/finclude/petscda.h
359: #include include/finclude/petscis.h
360: #include include/finclude/petscmat.h
361: #include include/finclude/petscksp.h
362: #include include/finclude/petscpc.h
363: #include include/finclude/petscsnes.h
365: ! Input/output variables:
366: type (userctx) user
367: Vec X
368: integer ierr
369:
370: ! Declarations for use with local arrays:
371: PetscScalar,pointer :: lx_v(:)
372: Vec localX
374: 0
376: ! Get a pointer to vector data.
377: ! - For default PETSc vectors, VecGetArray90() returns a pointer to
378: ! the data array. Otherwise, the routine is implementation dependent.
379: ! - You MUST call VecRestoreArrayF90() when you no longer need access to
380: ! the array.
381: ! - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
382: ! and is useable from Fortran-90 Only.
384: call DAGetLocalVector(user%da,localX,ierr)
385: call VecGetArrayF90(localX,lx_v,ierr)
387: ! Compute initial guess over the locally owned part of the grid
389: call InitialGuessLocal(user,lx_v,ierr)
391: ! Restore vector
393: call VecRestoreArrayF90(localX,lx_v,ierr)
395: ! Insert values into global vector
397: call DALocalToGlobal(user%da,localX,INSERT_VALUES,X,ierr)
398: call DARestoreLocalVector(user%da,localX,ierr)
400: return
401: end
403: ! ---------------------------------------------------------------------
404: !
405: ! InitialGuessLocal - Computes initial approximation, called by
406: ! the higher level routine FormInitialGuess().
407: !
408: ! Input Parameter:
409: ! x - local vector data
410: !
411: ! Output Parameters:
412: ! x - local vector data
413: ! ierr - error code
414: !
415: ! Notes:
416: ! This routine uses standard Fortran-style computations over a 2-dim array.
417: !
418: subroutine InitialGuessLocal(user,x,ierr)
419: use f90module
420: implicit none
422: #include include/finclude/petsc.h
423: #include include/finclude/petscvec.h
424: #include include/finclude/petscda.h
425: #include include/finclude/petscis.h
426: #include include/finclude/petscmat.h
427: #include include/finclude/petscksp.h
428: #include include/finclude/petscpc.h
429: #include include/finclude/petscsnes.h
431: ! Input/output variables:
432: type (userctx) user
433: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
434: integer ierr
436: ! Local variables:
437: integer i,j,hxdhy,hydhx
438: PetscScalar temp1,temp,hx,hy,sc,one
440: ! Set parameters
442: 0
443: one = 1.0
444: hx = one/(dble(user%mx-1))
445: hy = one/(dble(user%my-1))
446: sc = hx*hy*user%lambda
447: hxdhy = hx/hy
448: hydhx = hy/hx
449: temp1 = user%lambda/(user%lambda + one)
451: do 20 j=user%ys,user%ye
452: temp = dble(min(j-1,user%my-j))*hy
453: do 10 i=user%xs,user%xe
454: if (i .eq. 1 .or. j .eq. 1 &
455: & .or. i .eq. user%mx .or. j .eq. user%my) then
456: x(i,j) = 0.0
457: else
458: x(i,j) = temp1 * &
459: & sqrt(min(dble(min(i-1,user%mx-i)*hx),dble(temp)))
460: endif
461: 10 continue
462: 20 continue
464: return
465: end
467: ! ---------------------------------------------------------------------
468: !
469: ! FormFunctionLocal - Computes nonlinear function, called by
470: ! the higher level routine FormFunction().
471: !
472: ! Input Parameter:
473: ! x - local vector data
474: !
475: ! Output Parameters:
476: ! f - local vector data, f(x)
477: ! ierr - error code
478: !
479: ! Notes:
480: ! This routine uses standard Fortran-style computations over a 2-dim array.
481: !
482: subroutine FormFunctionLocal(x,f,user,ierr)
483: use f90module
485: implicit none
487: ! Input/output variables:
488: type (userctx) user
489: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
490: PetscScalar f(user%xs:user%xe,user%ys:user%ye)
491: integer ierr
493: ! Local variables:
494: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
495: PetscScalar u,uxx,uyy
496: integer i,j
498: one = 1.0
499: two = 2.0
500: hx = one/dble(user%mx-1)
501: hy = one/dble(user%my-1)
502: sc = hx*hy*user%lambda
503: hxdhy = hx/hy
504: hydhx = hy/hx
506: ! Compute function over the locally owned part of the grid
508: do 20 j=user%ys,user%ye
509: do 10 i=user%xs,user%xe
510: if (i .eq. 1 .or. j .eq. 1 &
511: & .or. i .eq. user%mx .or. j .eq. user%my) then
512: f(i,j) = x(i,j)
513: else
514: u = x(i,j)
515: uxx = hydhx * (two*u &
516: & - x(i-1,j) - x(i+1,j))
517: uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
518: f(i,j) = uxx + uyy - sc*exp(u)
519: endif
520: 10 continue
521: 20 continue
523: return
524: end
526: ! ---------------------------------------------------------------------
527: !
528: ! FormJacobian - Evaluates Jacobian matrix.
529: !
530: ! Input Parameters:
531: ! snes - the SNES context
532: ! x - input vector
533: ! dummy - optional user-defined context, as set by SNESSetJacobian()
534: ! (not used here)
535: !
536: ! Output Parameters:
537: ! jac - Jacobian matrix
538: ! jac_prec - optionally different preconditioning matrix (not used here)
539: ! flag - flag indicating matrix structure
540: !
541: ! Notes:
542: ! This routine serves as a wrapper for the lower-level routine
543: ! "FormJacobianLocal", where the actual computations are
544: ! done using the standard Fortran style of treating the local
545: ! vector data as a multidimensional array over the local mesh.
546: ! This routine merely accesses the local vector data via
547: ! VecGetArrayF90() and VecRestoreArrayF90().
548: !
549: ! Notes:
550: ! Due to grid point reordering with DAs, we must always work
551: ! with the local grid points, and then transform them to the new
552: ! global numbering with the "ltog" mapping (via DAGetGlobalIndicesF90()).
553: ! We cannot work directly with the global numbers for the original
554: ! uniprocessor grid!
555: !
556: ! Two methods are available for imposing this transformation
557: ! when setting matrix entries:
558: ! (A) MatSetValuesLocal(), using the local ordering (including
559: ! ghost points!)
560: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
561: ! - Associate this map with the matrix by calling
562: ! MatSetLocalToGlobalMapping() once
563: ! - Set matrix entries using the local ordering
564: ! by calling MatSetValuesLocal()
565: ! (B) MatSetValues(), using the global ordering
566: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
567: ! - Then apply this map explicitly yourself
568: ! - Set matrix entries using the global ordering by calling
569: ! MatSetValues()
570: ! Option (A) seems cleaner/easier in many cases, and is the procedure
571: ! used in this example.
572: !
573: subroutine FormJacobian(snes,X,jac,jac_prec,flag,user,ierr)
574: use f90module
575: implicit none
577: #include include/finclude/petsc.h
578: #include include/finclude/petscvec.h
579: #include include/finclude/petscda.h
580: #include include/finclude/petscis.h
581: #include include/finclude/petscmat.h
582: #include include/finclude/petscksp.h
583: #include include/finclude/petscpc.h
584: #include include/finclude/petscsnes.h
586: #include "include/finclude/petscvec.h90"
588: ! Input/output variables:
589: SNES snes
590: Vec X
591: Mat jac,jac_prec
592: MatStructure flag
593: type(userctx) user
594: integer ierr
596: ! Declarations for use with local arrays:
597: PetscScalar,pointer :: lx_v(:)
598: Vec localX
600: ! Scatter ghost points to local vector, using the 2-step process
601: ! DAGlobalToLocalBegin(), DAGlobalToLocalEnd()
602: ! Computations can be done while messages are in transition,
603: ! by placing code between these two statements.
605: call DAGetLocalVector(user%da,localX,ierr)
606: call DAGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX, &
607: & ierr)
608: call DAGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)
610: ! Get a pointer to vector data
612: call VecGetArrayF90(localX,lx_v,ierr)
614: ! Compute entries for the locally owned part of the Jacobian.
616: call FormJacobianLocal(lx_v,jac,jac_prec,user,ierr)
618: ! Assemble matrix, using the 2-step process:
619: ! MatAssemblyBegin(), MatAssemblyEnd()
620: ! Computations can be done while messages are in transition,
621: ! by placing code between these two statements.
623: call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
624: call VecRestoreArrayF90(localX,lx_v,ierr)
625: call DARestoreLocalVector(user%da,localX,ierr)
626: call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
628: ! Set flag to indicate that the Jacobian matrix retains an identical
629: ! nonzero structure throughout all nonlinear iterations (although the
630: ! values of the entries change). Thus, we can save some work in setting
631: ! up the preconditioner (e.g., no need to redo symbolic factorization for
632: ! ILU/ICC preconditioners).
633: ! - If the nonzero structure of the matrix is different during
634: ! successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
635: ! must be used instead. If you are unsure whether the matrix
636: ! structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
637: ! - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
638: ! believes your assertion and does not check the structure
639: ! of the matrix. If you erroneously claim that the structure
640: ! is the same when it actually is not, the new preconditioner
641: ! will not function correctly. Thus, use this optimization
642: ! feature with caution!
644: flag = SAME_NONZERO_PATTERN
646: ! Tell the matrix we will never add a new nonzero location to the
647: ! matrix. If we do it will generate an error.
649: call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,ierr)
651: return
652: end
654: ! ---------------------------------------------------------------------
655: !
656: ! FormJacobianLocal - Computes Jacobian matrix, called by
657: ! the higher level routine FormJacobian().
658: !
659: ! Input Parameters:
660: ! x - local vector data
661: !
662: ! Output Parameters:
663: ! jac - Jacobian matrix
664: ! jac_prec - optionally different preconditioning matrix (not used here)
665: ! ierr - error code
666: !
667: ! Notes:
668: ! This routine uses standard Fortran-style computations over a 2-dim array.
669: !
670: ! Notes:
671: ! Due to grid point reordering with DAs, we must always work
672: ! with the local grid points, and then transform them to the new
673: ! global numbering with the "ltog" mapping (via DAGetGlobalIndicesF90()).
674: ! We cannot work directly with the global numbers for the original
675: ! uniprocessor grid!
676: !
677: ! Two methods are available for imposing this transformation
678: ! when setting matrix entries:
679: ! (A) MatSetValuesLocal(), using the local ordering (including
680: ! ghost points!)
681: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
682: ! - Associate this map with the matrix by calling
683: ! MatSetLocalToGlobalMapping() once
684: ! - Set matrix entries using the local ordering
685: ! by calling MatSetValuesLocal()
686: ! (B) MatSetValues(), using the global ordering
687: ! - Use DAGetGlobalIndicesF90() to extract the local-to-global map
688: ! - Then apply this map explicitly yourself
689: ! - Set matrix entries using the global ordering by calling
690: ! MatSetValues()
691: ! Option (A) seems cleaner/easier in many cases, and is the procedure
692: ! used in this example.
693: !
694: subroutine FormJacobianLocal(x,jac,jac_prec,user,ierr)
695: use f90module
696: implicit none
698: #include include/finclude/petsc.h
699: #include include/finclude/petscvec.h
700: #include include/finclude/petscda.h
701: #include include/finclude/petscis.h
702: #include include/finclude/petscmat.h
703: #include include/finclude/petscksp.h
704: #include include/finclude/petscpc.h
705: #include include/finclude/petscsnes.h
707: ! Input/output variables:
708: type (userctx) user
709: PetscScalar x(user%gxs:user%gxe,user%gys:user%gye)
710: Mat jac,jac_prec
711: integer ierr
713: ! Local variables:
714: integer row,col(5),i,j
715: PetscScalar two,one,hx,hy,hxdhy,hydhx,sc,v(5)
717: ! Set parameters
719: one = 1.0
720: two = 2.0
721: hx = one/dble(user%mx-1)
722: hy = one/dble(user%my-1)
723: sc = hx*hy
724: hxdhy = hx/hy
725: hydhx = hy/hx
727: ! Compute entries for the locally owned part of the Jacobian.
728: ! - Currently, all PETSc parallel matrix formats are partitioned by
729: ! contiguous chunks of rows across the processors.
730: ! - Each processor needs to insert only elements that it owns
731: ! locally (but any non-local elements will be sent to the
732: ! appropriate processor during matrix assembly).
733: ! - Here, we set all entries for a particular row at once.
734: ! - We can set matrix entries either using either
735: ! MatSetValuesLocal() or MatSetValues(), as discussed above.
736: ! - Note that MatSetValues() uses 0-based row and column numbers
737: ! in Fortran as well as in C.
739: do 20 j=user%ys,user%ye
740: row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
741: do 10 i=user%xs,user%xe
742: row = row + 1
743: ! boundary points
744: if (i .eq. 1 .or. j .eq. 1 &
745: & .or. i .eq. user%mx .or. j .eq. user%my) then
746: col(1) = row
747: v(1) = one
748: call MatSetValuesLocal(jac,1,row,1,col,v, &
749: & INSERT_VALUES,ierr)
750: ! interior grid points
751: else
752: v(1) = -hxdhy
753: v(2) = -hydhx
754: v(3) = two*(hydhx + hxdhy) &
755: & - sc*user%lambda*exp(x(i,j))
756: v(4) = -hydhx
757: v(5) = -hxdhy
758: col(1) = row - user%gxm
759: col(2) = row - 1
760: col(3) = row
761: col(4) = row + 1
762: col(5) = row + user%gxm
763: call MatSetValuesLocal(jac,1,row,5,col,v, &
764: & INSERT_VALUES,ierr)
765: endif
766: 10 continue
767: 20 continue
769: return
770: end