Actual source code: ex14f.F
1: !
2: !
3: ! Solves a nonlinear system in parallel with a user-defined
4: ! Newton method that uses KSP to solve the linearized Newton sytems. This solver
5: ! is a very simplistic inexact Newton method. The intent of this code is to
6: ! demonstrate the repeated solution of linear sytems with the same nonzero pattern.
7: !
8: ! This is NOT the recommended approach for solving nonlinear problems with PETSc!
9: ! We urge users to employ the SNES component for solving nonlinear problems whenever
10: ! possible, as it offers many advantages over coding nonlinear solvers independently.
11: !
12: ! We solve the Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
13: ! domain, using distributed arrays (DAs) to partition the parallel grid.
14: !
15: ! The command line options include:
16: ! -par <parameter>, where <parameter> indicates the problem's nonlinearity
17: ! problem SFI: <parameter> = Bratu parameter (0 <= par <= 6.81)
18: ! -mx <xg>, where <xg> = number of grid points in the x-direction
19: ! -my <yg>, where <yg> = number of grid points in the y-direction
20: ! -Nx <npx>, where <npx> = number of processors in the x-direction
21: ! -Ny <npy>, where <npy> = number of processors in the y-direction
22: ! -mf use matrix free for matrix vector product
23: !
24: !/*T
25: ! Concepts: KSP^writing a user-defined nonlinear solver
26: ! Concepts: DA^using distributed arrays
27: ! Processors: n
28: !T*/
29: ! ------------------------------------------------------------------------
30: !
31: ! Solid Fuel Ignition (SFI) problem. This problem is modeled by
32: ! the partial differential equation
33: !
34: ! -Laplacian u - lambda*exp(u) = 0, 0 < x,y < 1,
35: !
36: ! with boundary conditions
37: !
38: ! u = 0 for x = 0, x = 1, y = 0, y = 1.
39: !
40: ! A finite difference approximation with the usual 5-point stencil
41: ! is used to discretize the boundary value problem to obtain a nonlinear
42: ! system of equations.
43: !
44: ! The SNES version of this problem is: snes/examples/tutorials/ex5f.F
45: !
46: ! -------------------------------------------------------------------------
48: program main
49: implicit none
51: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
52: ! Include files
53: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
54: !
55: ! petsc.h - base PETSc routines petscvec.h - vectors
56: ! petscsys.h - system routines petscmat.h - matrices
57: ! petscis.h - index sets petscksp.h - Krylov subspace methods
58: ! petscviewer.h - viewers petscpc.h - preconditioners
60: #include include/finclude/petsc.h
61: #include include/finclude/petscis.h
62: #include include/finclude/petscvec.h
63: #include include/finclude/petscmat.h
64: #include include/finclude/petscpc.h
65: #include include/finclude/petscksp.h
66: #include include/finclude/petscda.h
68: MPI_Comm comm
69: Vec X,Y,F,localX,localF
70: Mat J,B
71: DA da
72: KSP ksp
74: PetscInt Nx,Ny,N,mx,my,ifive,ithree
75: PetscTruth flg,nooutput,usemf
76: common /mycommon/ mx,my,B,localX,localF,da
77: !
78: !
79: ! This is the routine to use for matrix-free approach
80: !
81: external mymult
83: ! --------------- Data to define nonlinear solver --------------
84: double precision rtol,xtol,ttol
85: double precision fnorm,ynorm,xnorm
86: PetscInt max_nonlin_its,one
87: PetscInt lin_its
88: PetscInt i,m
89: PetscScalar mone
90: PetscErrorCode ierr
92: mone = -1.d0
93: rtol = 1.d-8
94: xtol = 1.d-8
95: max_nonlin_its = 10
96: one = 1
97: ifive = 5
98: ithree = 3
100: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
101: comm = PETSC_COMM_WORLD
103: ! Initialize problem parameters
105: !
106: mx = 4
107: my = 4
108: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-mx',mx,flg,ierr)
109: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-my',my,flg,ierr)
110: N = mx*my
112: nooutput = 0
113: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-no_output', &
114: & nooutput,ierr)
116: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
117: ! Create linear solver context
118: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
120: call KSPCreate(comm,ksp,ierr)
122: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
123: ! Create vector data structures
124: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126: !
127: ! Create distributed array (DA) to manage parallel grid and vectors
128: !
129: Nx = PETSC_DECIDE
130: Ny = PETSC_DECIDE
131: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-Nx',Nx,flg,ierr)
132: call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-Ny',Ny,flg,ierr)
133: call DACreate2d(comm,DA_NONPERIODIC,DA_STENCIL_STAR,mx, &
134: & my,Nx,Ny,one,one,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, &
135: & da,ierr)
137: !
138: ! Extract global and local vectors from DA then duplicate for remaining
139: ! vectors that are the same types
140: !
141: call DACreateGlobalVector(da,X,ierr)
142: call DACreateLocalVector(da,localX,ierr)
143: call VecDuplicate(X,F,ierr)
144: call VecDuplicate(X,Y,ierr)
145: call VecDuplicate(localX,localF,ierr)
148: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
149: ! Create matrix data structure for Jacobian
150: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
151: !
152: ! Note: For the parallel case, vectors and matrices MUST be partitioned
153: ! accordingly. When using distributed arrays (DAs) to create vectors,
154: ! the DAs determine the problem partitioning. We must explicitly
155: ! specify the local matrix dimensions upon its creation for compatibility
156: ! with the vector distribution. Thus, the generic MatCreate() routine
157: ! is NOT sufficient when working with distributed arrays.
158: !
159: ! Note: Here we only approximately preallocate storage space for the
160: ! Jacobian. See the users manual for a discussion of better techniques
161: ! for preallocating matrix memory.
162: !
163: call VecGetLocalSize(X,m,ierr)
164: call MatCreateMPIAIJ(comm,m,m,N,N,ifive,PETSC_NULL_INTEGER,ithree, &
165: & PETSC_NULL_INTEGER,B,ierr)
167: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
168: ! if usemf is on then matrix vector product is done via matrix free
169: ! approach. Note this is just an example, and not realistic because
170: ! we still use the actual formed matrix, but in reality one would
171: ! provide their own subroutine that would directly do the matrix
172: ! vector product and not call MatMult()
173: ! Note: we put B into a common block so it will be visible to the
174: ! mymult() routine
175: usemf = 0
176: call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-mf',usemf,ierr)
177: if (usemf .eq. 1) then
178: call MatCreateShell(comm,m,m,N,N,PETSC_NULL_INTEGER,J,ierr)
179: call MatShellSetOperation(J,MATOP_MULT,mymult,ierr)
180: else
181: ! If not doing matrix free then matrix operator, J, and matrix used
182: ! to construct preconditioner, B, are the same
183: J = B
184: endif
186: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
187: ! Customize linear solver set runtime options
188: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
189: !
190: ! Set runtime options (e.g., -ksp_monitor -ksp_rtol <rtol> -ksp_type <type>)
191: !
192: call KSPSetFromOptions(ksp,ierr)
194: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
195: ! Evaluate initial guess
196: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
198: call FormInitialGuess(X,ierr)
199: call ComputeFunction(X,F,ierr)
200: call VecNorm(F,NORM_2,fnorm,ierr)
201: ttol = fnorm*rtol
202: if (nooutput .eq. 0) then
203: print*, 'Initial function norm ',fnorm
204: endif
206: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
207: ! Solve nonlinear system with a user-defined method
208: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
210: ! This solver is a very simplistic inexact Newton method, with no
211: ! no damping strategies or bells and whistles. The intent of this code
212: ! is merely to demonstrate the repeated solution with KSP of linear
213: ! sytems with the same nonzero structure.
214: !
215: ! This is NOT the recommended approach for solving nonlinear problems
216: ! with PETSc! We urge users to employ the SNES component for solving
217: ! nonlinear problems whenever possible with application codes, as it
218: ! offers many advantages over coding nonlinear solvers independently.
220: do 10 i=0,max_nonlin_its
222: ! Compute the Jacobian matrix. See the comments in this routine for
223: ! important information about setting the flag mat_flag.
225: call ComputeJacobian(X,B,ierr)
227: ! Solve J Y = F, where J is the Jacobian matrix.
228: ! - First, set the KSP linear operators. Here the matrix that
229: ! defines the linear system also serves as the preconditioning
230: ! matrix.
231: ! - Then solve the Newton system.
233: call KSPSetOperators(ksp,J,B,SAME_NONZERO_PATTERN,ierr)
234: call KSPSolve(ksp,F,Y,ierr)
236: ! Compute updated iterate
238: call VecNorm(Y,NORM_2,ynorm,ierr)
239: call VecAYPX(Y,mone,X,ierr)
240: call VecCopy(Y,X,ierr)
241: call VecNorm(X,NORM_2,xnorm,ierr)
242: call KSPGetIterationNumber(ksp,lin_its,ierr)
243: if (nooutput .eq. 0) then
244: print*,'linear solve iterations = ',lin_its,' xnorm = ', &
245: & xnorm,' ynorm = ',ynorm
246: endif
248: ! Evaluate nonlinear function at new location
250: call ComputeFunction(X,F,ierr)
251: call VecNorm(F,NORM_2,fnorm,ierr)
252: if (nooutput .eq. 0) then
253: print*, 'Iteration ',i+1,' function norm',fnorm
254: endif
256: ! Test for convergence
258: if (fnorm .le. ttol) then
259: if (nooutput .eq. 0) then
260: print*,'Converged: function norm ',fnorm,' tolerance ',ttol
261: endif
262: goto 20
263: endif
264: 10 continue
265: 20 continue
267: write(6,100) i+1
268: 100 format('Number of Newton iterations =',I2)
270: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
271: ! Free work space. All PETSc objects should be destroyed when they
272: ! are no longer needed.
273: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
275: call MatDestroy(B,ierr)
276: if (usemf .ne. 0) then
277: call MatDestroy(J,ierr)
278: endif
279: call VecDestroy(localX,ierr)
280: call VecDestroy(X,ierr)
281: call VecDestroy(Y,ierr)
282: call VecDestroy(localF,ierr)
283: call VecDestroy(F,ierr)
284: call KSPDestroy(ksp,ierr)
285: call DADestroy(da,ierr)
286: call PetscFinalize(ierr)
287: end
289: ! -------------------------------------------------------------------
290: !
291: ! FormInitialGuess - Forms initial approximation.
292: !
293: ! Input Parameters:
294: ! X - vector
295: !
296: ! Output Parameter:
297: ! X - vector
298: !
299: subroutine FormInitialGuess(X,ierr)
300: implicit none
302: ! petsc.h - base PETSc routines petscvec.h - vectors
303: ! petscsys.h - system routines petscmat.h - matrices
304: ! petscis.h - index sets petscksp.h - Krylov subspace methods
305: ! petscviewer.h - viewers petscpc.h - preconditioners
307: #include include/finclude/petsc.h
308: #include include/finclude/petscis.h
309: #include include/finclude/petscvec.h
310: #include include/finclude/petscmat.h
311: #include include/finclude/petscpc.h
312: #include include/finclude/petscksp.h
313: #include include/finclude/petscda.h
314: PetscErrorCode ierr
315: PetscOffset idx
316: Vec X,localX,localF
317: PetscInt i,j,row,mx,my, xs,ys,xm
318: PetscInt ym,gxm,gym,gxs,gys
319: double precision one,lambda,temp1,temp,hx,hy
320: double precision hxdhy,hydhx,sc
321: PetscScalar xx(1)
322: DA da
323: Mat B
324: common /mycommon/ mx,my,B,localX,localF,da
325:
326: one = 1.d0
327: lambda = 6.d0
328: hx = one/(mx-1)
329: hy = one/(my-1)
330: sc = hx*hy*lambda
331: hxdhy = hx/hy
332: hydhx = hy/hx
333: temp1 = lambda/(lambda + one)
335: ! Get a pointer to vector data.
336: ! - VecGetArray() returns a pointer to the data array.
337: ! - You MUST call VecRestoreArray() when you no longer need access to
338: ! the array.
339: call VecGetArray(localX,xx,idx,ierr)
341: ! Get local grid boundaries (for 2-dimensional DA):
342: ! xs, ys - starting grid indices (no ghost points)
343: ! xm, ym - widths of local grid (no ghost points)
344: ! gxs, gys - starting grid indices (including ghost points)
345: ! gxm, gym - widths of local grid (including ghost points)
347: call DAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym, &
348: & PETSC_NULL_INTEGER,ierr)
349: call DAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym, &
350: & PETSC_NULL_INTEGER,ierr)
352: ! Compute initial guess over the locally owned part of the grid
354: do 30 j=ys,ys+ym-1
355: temp = (min(j,my-j-1))*hy
356: do 40 i=xs,xs+xm-1
357: row = i - gxs + (j - gys)*gxm + 1
358: if (i .eq. 0 .or. j .eq. 0 .or. i .eq. mx-1 .or. &
359: & j .eq. my-1) then
360: xx(idx+row) = 0.d0
361: continue
362: endif
363: xx(idx+row) = temp1*sqrt(min((min(i,mx-i-1))*hx,temp))
364: 40 continue
365: 30 continue
367: ! Restore vector
369: call VecRestoreArray(localX,xx,idx,ierr)
371: ! Insert values into global vector
373: call DALocalToGlobal(da,localX,INSERT_VALUES,X,ierr)
374: return
375: end
377: ! -------------------------------------------------------------------
378: !
379: ! ComputeFunction - Evaluates nonlinear function, F(x).
380: !
381: ! Input Parameters:
382: !. X - input vector
383: !
384: ! Output Parameter:
385: !. F - function vector
386: !
387: subroutine ComputeFunction(X,F,ierr)
388: implicit none
390: ! petsc.h - base PETSc routines petscvec.h - vectors
391: ! petscsys.h - system routines petscmat.h - matrices
392: ! petscis.h - index sets petscksp.h - Krylov subspace methods
393: ! petscviewer.h - viewers petscpc.h - preconditioners
395: #include include/finclude/petsc.h
396: #include include/finclude/petscis.h
397: #include include/finclude/petscvec.h
398: #include include/finclude/petscmat.h
399: #include include/finclude/petscpc.h
400: #include include/finclude/petscksp.h
401: #include include/finclude/petscda.h
403: Vec X,F,localX,localF
404: PetscInt gys,gxm,gym
405: PetscOffset idx,idf
406: PetscErrorCode ierr
407: PetscInt i,j,row,mx,my,xs,ys,xm,ym,gxs
408: double precision two,one,lambda,hx
409: double precision hy,hxdhy,hydhx,sc
410: PetscScalar u,uxx,uyy,xx(1),ff(1)
411: DA da
412: Mat B
413: common /mycommon/ mx,my,B,localX,localF,da
415: two = 2.d0
416: one = 1.d0
417: lambda = 6.d0
419: hx = one/(mx-1)
420: hy = one/(my-1)
421: sc = hx*hy*lambda
422: hxdhy = hx/hy
423: hydhx = hy/hx
425: ! Scatter ghost points to local vector, using the 2-step process
426: ! DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
427: ! By placing code between these two statements, computations can be
428: ! done while messages are in transition.
429: !
430: call DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr)
431: call DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)
433: ! Get pointers to vector data
435: call VecGetArray(localX,xx,idx,ierr)
436: call VecGetArray(localF,ff,idf,ierr)
438: ! Get local grid boundaries
440: call DAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym, &
441: & PETSC_NULL_INTEGER,ierr)
442: call DAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym, &
443: & PETSC_NULL_INTEGER,ierr)
445: ! Compute function over the locally owned part of the grid
447: do 50 j=ys,ys+ym-1
449: row = (j - gys)*gxm + xs - gxs
450: do 60 i=xs,xs+xm-1
451: row = row + 1
453: if (i .eq. 0 .or. j .eq. 0 .or. i .eq. mx-1 .or. &
454: & j .eq. my-1) then
455: ff(idf+row) = xx(idx+row)
456: goto 60
457: endif
458: u = xx(idx+row)
459: uxx = (two*u - xx(idx+row-1) - xx(idx+row+1))*hydhx
460: uyy = (two*u - xx(idx+row-gxm) - xx(idx+row+gxm))*hxdhy
461: ff(idf+row) = uxx + uyy - sc*exp(u)
462: 60 continue
463: 50 continue
465: ! Restore vectors
467: call VecRestoreArray(localX,xx,idx,ierr)
468: call VecRestoreArray(localF,ff,idf,ierr)
470: ! Insert values into global vector
472: call DALocalToGlobal(da,localF,INSERT_VALUES,F,ierr)
473: return
474: end
476: ! -------------------------------------------------------------------
477: !
478: ! ComputeJacobian - Evaluates Jacobian matrix.
479: !
480: ! Input Parameters:
481: ! x - input vector
482: !
483: ! Output Parameters:
484: ! jac - Jacobian matrix
485: ! flag - flag indicating matrix structure
486: !
487: ! Notes:
488: ! Due to grid point reordering with DAs, we must always work
489: ! with the local grid points, and then transform them to the new
490: ! global numbering with the 'ltog' mapping (via DAGetGlobalIndices()).
491: ! We cannot work directly with the global numbers for the original
492: ! uniprocessor grid!
493: !
494: subroutine ComputeJacobian(X,jac,ierr)
495: implicit none
497: ! petsc.h - base PETSc routines petscvec.h - vectors
498: ! petscsys.h - system routines petscmat.h - matrices
499: ! petscis.h - index sets petscksp.h - Krylov subspace methods
500: ! petscviewer.h - viewers petscpc.h - preconditioners
502: #include include/finclude/petsc.h
503: #include include/finclude/petscis.h
504: #include include/finclude/petscvec.h
505: #include include/finclude/petscmat.h
506: #include include/finclude/petscpc.h
507: #include include/finclude/petscksp.h
508: #include include/finclude/petscda.h
510: Vec X
511: Mat jac
512: Vec localX,localF
513: DA da
514: PetscInt ltog(1)
515: PetscOffset idltog,idx
516: PetscErrorCode ierr
517: PetscInt nloc,xs,ys,xm,ym,gxs,gys,gxm,gym,grow,i,j,row,mx,my
518: PetscInt ione,col(5),ifive
519: PetscScalar two,one,lambda,v(5),hx,hy,hxdhy
520: PetscScalar hydhx,sc,xx(1)
521: Mat B
522: common /mycommon/ mx,my,B,localX,localF,da
524: ione = 1
525: ifive = 5
526: one = 1.d0
527: two = 2.d0
528: hx = one/(mx-1)
529: hy = one/(my-1)
530: sc = hx*hy
531: hxdhy = hx/hy
532: hydhx = hy/hx
533: lambda = 6.d0
535: ! Scatter ghost points to local vector, using the 2-step process
536: ! DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
537: ! By placing code between these two statements, computations can be
538: ! done while messages are in transition.
540: call DAGlobalToLocalBegin(da,X,INSERT_VALUES,localX,ierr)
541: call DAGlobalToLocalEnd(da,X,INSERT_VALUES,localX,ierr)
543: ! Get pointer to vector data
545: call VecGetArray(localX,xx,idx,ierr)
547: ! Get local grid boundaries
549: call DAGetCorners(da,xs,ys,PETSC_NULL_INTEGER,xm,ym, &
550: & PETSC_NULL_INTEGER,ierr)
551: call DAGetGhostCorners(da,gxs,gys,PETSC_NULL_INTEGER,gxm,gym, &
552: & PETSC_NULL_INTEGER,ierr)
554: ! Get the global node numbers for all local nodes, including ghost points
556: call DAGetGlobalIndices(da,nloc,ltog,idltog,ierr)
558: ! Compute entries for the locally owned part of the Jacobian.
559: ! - Currently, all PETSc parallel matrix formats are partitioned by
560: ! contiguous chunks of rows across the processors. The 'grow'
561: ! parameter computed below specifies the global row number
562: ! corresponding to each local grid point.
563: ! - Each processor needs to insert only elements that it owns
564: ! locally (but any non-local elements will be sent to the
565: ! appropriate processor during matrix assembly).
566: ! - Always specify global row and columns of matrix entries.
567: ! - Here, we set all entries for a particular row at once.
569: do 10 j=ys,ys+ym-1
570: row = (j - gys)*gxm + xs - gxs
571: do 20 i=xs,xs+xm-1
572: row = row + 1
573: grow = ltog(idltog+row)
574: if (i .eq. 0 .or. j .eq. 0 .or. i .eq. (mx-1) .or. &
575: & j .eq. (my-1)) then
576: call MatSetValues(jac,ione,grow,ione,grow,one, &
577: & INSERT_VALUES,ierr)
578: go to 20
579: endif
580: v(1) = -hxdhy
581: col(1) = ltog(idltog+row - gxm)
582: v(2) = -hydhx
583: col(2) = ltog(idltog+row - 1)
584: v(3) = two*(hydhx + hxdhy) - sc*lambda*exp(xx(idx+row))
585: col(3) = grow
586: v(4) = -hydhx
587: col(4) = ltog(idltog+row + 1)
588: v(5) = -hxdhy
589: col(5) = ltog(idltog+row + gxm)
590: call MatSetValues(jac,ione,grow,ifive,col,v,INSERT_VALUES, &
591: & ierr)
592: 20 continue
593: 10 continue
595: ! Assemble matrix, using the 2-step process:
596: ! MatAssemblyBegin(), MatAssemblyEnd().
597: ! By placing code between these two statements, computations can be
598: ! done while messages are in transition.
600: call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
601: call VecRestoreArray(localX,xx,idx,ierr)
602: call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
603: return
604: end
607: ! -------------------------------------------------------------------
608: !
609: ! MyMult - user provided matrix multiply
610: !
611: ! Input Parameters:
612: !. X - input vector
613: !
614: ! Output Parameter:
615: !. F - function vector
616: !
617: subroutine MyMult(J,X,F,ierr)
618: implicit none
619: Mat J,B
620: Vec X,F
621: PetscErrorCode ierr
622: PetscInt mx,my
623: DA da
624: Vec localX,localF
626: common /mycommon/ mx,my,B,localX,localF,da
627: !
628: ! Here we use the actual formed matrix B; users would
629: ! instead write their own matrix vector product routine
630: !
631: call MatMult(B,X,F,ierr)
632: return
633: end