Actual source code: ex4.c
1: /*$Id: ex4.c,v 1.29 2001/08/10 03:34:17 bsmith Exp $*/
3: /* Program usage: mpirun -np <procs> ex4 [-help] [all PETSc options] */
5: static char help[] ="Solves a simple time-dependent linear PDE (the heat equation).n
6: Input parameters include:n
7: -m <points>, where <points> = number of grid pointsn
8: -time_dependent_rhs : Treat the problem as having a time-dependent right-hand siden
9: -debug : Activate debugging printoutsn
10: -nox : Deactivate x-window graphicsnn";
12: /*
13: Concepts: TS^time-dependent linear problems
14: Concepts: TS^heat equation
15: Concepts: TS^diffusion equation
16: Processors: n
17: */
19: /* ------------------------------------------------------------------------
21: This program solves the one-dimensional heat equation (also called the
22: diffusion equation),
23: u_t = u_xx,
24: on the domain 0 <= x <= 1, with the boundary conditions
25: u(t,0) = 0, u(t,1) = 0,
26: and the initial condition
27: u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
28: This is a linear, second-order, parabolic equation.
30: We discretize the right-hand side using finite differences with
31: uniform grid spacing h:
32: u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
33: We then demonstrate time evolution using the various TS methods by
34: running the program via
35: mpirun -np <procs> ex3 -ts_type <timestepping solver>
37: We compare the approximate solution with the exact solution, given by
38: u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
39: 3*exp(-4*pi*pi*t) * sin(2*pi*x)
41: Notes:
42: This code demonstrates the TS solver interface to two variants of
43: linear problems, u_t = f(u,t), namely
44: - time-dependent f: f(u,t) is a function of t
45: - time-independent f: f(u,t) is simply f(u)
47: The uniprocessor version of this code is ts/examples/tutorials/ex3.c
49: ------------------------------------------------------------------------- */
51: /*
52: Include "petscda.h" so that we can use distributed arrays (DAs) to manage
53: the parallel grid. Include "petscts.h" so that we can use TS solvers.
54: Note that this file automatically includes:
55: petsc.h - base PETSc routines petscvec.h - vectors
56: petscsys.h - system routines petscmat.h - matrices
57: petscis.h - index sets petscksp.h - Krylov subspace methods
58: petscviewer.h - viewers petscpc.h - preconditioners
59: petscsles.h - linear solvers petscsnes.h - nonlinear solvers
60: */
62: #include petscda.h
63: #include petscts.h
65: /*
66: User-defined application context - contains data needed by the
67: application-provided call-back routines.
68: */
69: typedef struct {
70: MPI_Comm comm; /* communicator */
71: DA da; /* distributed array data structure */
72: Vec localwork; /* local ghosted work vector */
73: Vec u_local; /* local ghosted approximate solution vector */
74: Vec solution; /* global exact solution vector */
75: int m; /* total number of grid points */
76: PetscReal h; /* mesh width h = 1/(m-1) */
77: PetscTruth debug; /* flag (1 indicates activation of debugging printouts) */
78: PetscViewer viewer1,viewer2; /* viewers for the solution and error */
79: PetscReal norm_2,norm_max; /* error norms */
80: } AppCtx;
82: /*
83: User-defined routines
84: */
85: extern int InitialConditions(Vec,AppCtx*);
86: extern int RHSMatrixHeat(TS,PetscReal,Mat*,Mat*,MatStructure*,void*);
87: extern int Monitor(TS,int,PetscReal,Vec,void*);
88: extern int ExactSolution(PetscReal,Vec,AppCtx*);
90: int main(int argc,char **argv)
91: {
92: AppCtx appctx; /* user-defined application context */
93: TS ts; /* timestepping context */
94: Mat A; /* matrix data structure */
95: Vec u; /* approximate solution vector */
96: PetscReal time_total_max = 100.0; /* default max total time */
97: int time_steps_max = 100; /* default max timesteps */
98: PetscDraw draw; /* drawing context */
99: int ierr,steps,size,m;
100: PetscReal dt,ftime;
101: PetscTruth flg;
103: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
104: Initialize program and set problem parameters
105: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
106:
107: PetscInitialize(&argc,&argv,(char*)0,help);
108: appctx.comm = PETSC_COMM_WORLD;
110: m = 60;
111: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
112: PetscOptionsHasName(PETSC_NULL,"-debug",&appctx.debug);
113: appctx.m = m;
114: appctx.h = 1.0/(m-1.0);
115: appctx.norm_2 = 0.0;
116: appctx.norm_max = 0.0;
117: MPI_Comm_size(PETSC_COMM_WORLD,&size);
118: PetscPrintf(PETSC_COMM_WORLD,"Solving a linear TS problem, number of processors = %dn",size);
120: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
121: Create vector data structures
122: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
123: /*
124: Create distributed array (DA) to manage parallel grid and vectors
125: and to set up the ghost point communication pattern. There are M
126: total grid values spread equally among all the processors.
127: */
129: DACreate1d(PETSC_COMM_WORLD,DA_NONPERIODIC,m,1,1,PETSC_NULL,&appctx.da);
131: /*
132: Extract global and local vectors from DA; we use these to store the
133: approximate solution. Then duplicate these for remaining vectors that
134: have the same types.
135: */
136: DACreateGlobalVector(appctx.da,&u);
137: DACreateLocalVector(appctx.da,&appctx.u_local);
139: /*
140: Create local work vector for use in evaluating right-hand-side function;
141: create global work vector for storing exact solution.
142: */
143: VecDuplicate(appctx.u_local,&appctx.localwork);
144: VecDuplicate(u,&appctx.solution);
146: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
147: Set up displays to show graphs of the solution and error
148: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
150: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,380,400,160,&appctx.viewer1);
151: PetscViewerDrawGetDraw(appctx.viewer1,0,&draw);
152: PetscDrawSetDoubleBuffer(draw);
153: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,0,400,160,&appctx.viewer2);
154: PetscViewerDrawGetDraw(appctx.viewer2,0,&draw);
155: PetscDrawSetDoubleBuffer(draw);
157: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
158: Create timestepping solver context
159: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
161: TSCreate(PETSC_COMM_WORLD,&ts);
162: TSSetProblemType(ts,TS_LINEAR);
164: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
165: Set optional user-defined monitoring routine
166: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168: TSSetMonitor(ts,Monitor,&appctx,PETSC_NULL);
170: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
172: Create matrix data structure; set matrix evaluation routine.
173: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
175: MatCreate(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,m,m,&A);
176: MatSetFromOptions(A);
178: PetscOptionsHasName(PETSC_NULL,"-time_dependent_rhs",&flg);
179: if (flg) {
180: /*
181: For linear problems with a time-dependent f(u,t) in the equation
182: u_t = f(u,t), the user provides the discretized right-hand-side
183: as a time-dependent matrix.
184: */
185: TSSetRHSMatrix(ts,A,A,RHSMatrixHeat,&appctx);
186: } else {
187: /*
188: For linear problems with a time-independent f(u) in the equation
189: u_t = f(u), the user provides the discretized right-hand-side
190: as a matrix only once, and then sets a null matrix evaluation
191: routine.
192: */
193: MatStructure A_structure;
194: RHSMatrixHeat(ts,0.0,&A,&A,&A_structure,&appctx);
195: TSSetRHSMatrix(ts,A,A,PETSC_NULL,&appctx);
196: }
198: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
199: Set solution vector and initial timestep
200: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
202: dt = appctx.h*appctx.h/2.0;
203: TSSetInitialTimeStep(ts,0.0,dt);
204: TSSetSolution(ts,u);
206: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
207: Customize timestepping solver:
208: - Set the solution method to be the Backward Euler method.
209: - Set timestepping duration info
210: Then set runtime options, which can override these defaults.
211: For example,
212: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
213: to override the defaults set by TSSetDuration().
214: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
216: TSSetDuration(ts,time_steps_max,time_total_max);
217: TSSetFromOptions(ts);
219: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
220: Solve the problem
221: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
223: /*
224: Evaluate initial conditions
225: */
226: InitialConditions(u,&appctx);
228: /*
229: Run the timestepping solver
230: */
231: TSStep(ts,&steps,&ftime);
233: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
234: View timestepping solver info
235: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
237: PetscPrintf(PETSC_COMM_WORLD,"avg. error (2 norm) = %g, avg. error (max norm) = %gn",
238: appctx.norm_2/steps,appctx.norm_max/steps);
239: TSView(ts,PETSC_VIEWER_STDOUT_WORLD);
241: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
242: Free work space. All PETSc objects should be destroyed when they
243: are no longer needed.
244: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
246: TSDestroy(ts);
247: MatDestroy(A);
248: VecDestroy(u);
249: PetscViewerDestroy(appctx.viewer1);
250: PetscViewerDestroy(appctx.viewer2);
251: VecDestroy(appctx.localwork);
252: VecDestroy(appctx.solution);
253: VecDestroy(appctx.u_local);
254: DADestroy(appctx.da);
256: /*
257: Always call PetscFinalize() before exiting a program. This routine
258: - finalizes the PETSc libraries as well as MPI
259: - provides summary and diagnostic information if certain runtime
260: options are chosen (e.g., -log_summary).
261: */
262: PetscFinalize();
263: return 0;
264: }
265: /* --------------------------------------------------------------------- */
266: /*
267: InitialConditions - Computes the solution at the initial time.
269: Input Parameter:
270: u - uninitialized solution vector (global)
271: appctx - user-defined application context
273: Output Parameter:
274: u - vector with solution at initial time (global)
275: */
276: int InitialConditions(Vec u,AppCtx *appctx)
277: {
278: PetscScalar *u_localptr,h = appctx->h;
279: int i,mybase,myend,ierr;
281: /*
282: Determine starting point of each processor's range of
283: grid values.
284: */
285: VecGetOwnershipRange(u,&mybase,&myend);
287: /*
288: Get a pointer to vector data.
289: - For default PETSc vectors, VecGetArray() returns a pointer to
290: the data array. Otherwise, the routine is implementation dependent.
291: - You MUST call VecRestoreArray() when you no longer need access to
292: the array.
293: - Note that the Fortran interface to VecGetArray() differs from the
294: C version. See the users manual for details.
295: */
296: VecGetArray(u,&u_localptr);
298: /*
299: We initialize the solution array by simply writing the solution
300: directly into the array locations. Alternatively, we could use
301: VecSetValues() or VecSetValuesLocal().
302: */
303: for (i=mybase; i<myend; i++) {
304: u_localptr[i-mybase] = PetscSinScalar(PETSC_PI*i*6.*h) + 3.*PetscSinScalar(PETSC_PI*i*2.*h);
305: }
307: /*
308: Restore vector
309: */
310: VecRestoreArray(u,&u_localptr);
312: /*
313: Print debugging information if desired
314: */
315: if (appctx->debug) {
316: PetscPrintf(appctx->comm,"initial guess vectorn");
317: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
318: }
320: return 0;
321: }
322: /* --------------------------------------------------------------------- */
323: /*
324: ExactSolution - Computes the exact solution at a given time.
326: Input Parameters:
327: t - current time
328: solution - vector in which exact solution will be computed
329: appctx - user-defined application context
331: Output Parameter:
332: solution - vector with the newly computed exact solution
333: */
334: int ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
335: {
336: PetscScalar *s_localptr,h = appctx->h,ex1,ex2,sc1,sc2;
337: int i,mybase,myend,ierr;
339: /*
340: Determine starting and ending points of each processor's
341: range of grid values
342: */
343: VecGetOwnershipRange(solution,&mybase,&myend);
345: /*
346: Get a pointer to vector data.
347: */
348: VecGetArray(solution,&s_localptr);
350: /*
351: Simply write the solution directly into the array locations.
352: Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
353: */
354: ex1 = exp(-36.*PETSC_PI*PETSC_PI*t); ex2 = exp(-4.*PETSC_PI*PETSC_PI*t);
355: sc1 = PETSC_PI*6.*h; sc2 = PETSC_PI*2.*h;
356: for (i=mybase; i<myend; i++) {
357: s_localptr[i-mybase] = PetscSinScalar(sc1*(PetscReal)i)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i)*ex2;
358: }
360: /*
361: Restore vector
362: */
363: VecRestoreArray(solution,&s_localptr);
364: return 0;
365: }
366: /* --------------------------------------------------------------------- */
367: /*
368: Monitor - User-provided routine to monitor the solution computed at
369: each timestep. This example plots the solution and computes the
370: error in two different norms.
372: Input Parameters:
373: ts - the timestep context
374: step - the count of the current step (with 0 meaning the
375: initial condition)
376: time - the current time
377: u - the solution at this timestep
378: ctx - the user-provided context for this monitoring routine.
379: In this case we use the application context which contains
380: information about the problem size, workspace and the exact
381: solution.
382: */
383: int Monitor(TS ts,int step,PetscReal time,Vec u,void *ctx)
384: {
385: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
386: int ierr;
387: PetscReal norm_2,norm_max;
388: PetscScalar mone = -1.0;
390: /*
391: View a graph of the current iterate
392: */
393: VecView(u,appctx->viewer2);
395: /*
396: Compute the exact solution
397: */
398: ExactSolution(time,appctx->solution,appctx);
400: /*
401: Print debugging information if desired
402: */
403: if (appctx->debug) {
404: PetscPrintf(appctx->comm,"Computed solution vectorn");
405: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
406: PetscPrintf(appctx->comm,"Exact solution vectorn");
407: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
408: }
410: /*
411: Compute the 2-norm and max-norm of the error
412: */
413: VecAXPY(&mone,u,appctx->solution);
414: VecNorm(appctx->solution,NORM_2,&norm_2);
415: norm_2 = sqrt(appctx->h)*norm_2;
416: VecNorm(appctx->solution,NORM_MAX,&norm_max);
418: /*
419: PetscPrintf() causes only the first processor in this
420: communicator to print the timestep information.
421: */
422: PetscPrintf(appctx->comm,"Timestep %d: time = %g, 2-norm error = %g, max norm error = %gn",
423: step,time,norm_2,norm_max);
424: appctx->norm_2 += norm_2;
425: appctx->norm_max += norm_max;
427: /*
428: View a graph of the error
429: */
430: VecView(appctx->solution,appctx->viewer1);
432: /*
433: Print debugging information if desired
434: */
435: if (appctx->debug) {
436: PetscPrintf(appctx->comm,"Error vectorn");
437: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
438: }
440: return 0;
441: }
442: /* --------------------------------------------------------------------- */
443: /*
444: RHSMatrixHeat - User-provided routine to compute the right-hand-side
445: matrix for the heat equation.
447: Input Parameters:
448: ts - the TS context
449: t - current time
450: global_in - global input vector
451: dummy - optional user-defined context, as set by TSetRHSJacobian()
453: Output Parameters:
454: AA - Jacobian matrix
455: BB - optionally different preconditioning matrix
456: str - flag indicating matrix structure
458: Notes:
459: RHSMatrixHeat computes entries for the locally owned part of the system.
460: - Currently, all PETSc parallel matrix formats are partitioned by
461: contiguous chunks of rows across the processors.
462: - Each processor needs to insert only elements that it owns
463: locally (but any non-local elements will be sent to the
464: appropriate processor during matrix assembly).
465: - Always specify global row and columns of matrix entries when
466: using MatSetValues(); we could alternatively use MatSetValuesLocal().
467: - Here, we set all entries for a particular row at once.
468: - Note that MatSetValues() uses 0-based row and column numbers
469: in Fortran as well as in C.
470: */
471: int RHSMatrixHeat(TS ts,PetscReal t,Mat *AA,Mat *BB,MatStructure *str,void *ctx)
472: {
473: Mat A = *AA; /* Jacobian matrix */
474: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
475: int ierr,i,mstart,mend,idx[3];
476: PetscScalar v[3],stwo = -2./(appctx->h*appctx->h),sone = -.5*stwo;
478: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
479: Compute entries for the locally owned part of the matrix
480: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
482: MatGetOwnershipRange(A,&mstart,&mend);
484: /*
485: Set matrix rows corresponding to boundary data
486: */
488: if (mstart == 0) { /* first processor only */
489: v[0] = 1.0;
490: MatSetValues(A,1,&mstart,1,&mstart,v,INSERT_VALUES);
491: mstart++;
492: }
494: if (mend == appctx->m) { /* last processor only */
495: mend--;
496: v[0] = 1.0;
497: MatSetValues(A,1,&mend,1,&mend,v,INSERT_VALUES);
498: }
500: /*
501: Set matrix rows corresponding to interior data. We construct the
502: matrix one row at a time.
503: */
504: v[0] = sone; v[1] = stwo; v[2] = sone;
505: for (i=mstart; i<mend; i++) {
506: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
507: MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES);
508: }
510: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
511: Complete the matrix assembly process and set some options
512: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
513: /*
514: Assemble matrix, using the 2-step process:
515: MatAssemblyBegin(), MatAssemblyEnd()
516: Computations can be done while messages are in transition
517: by placing code between these two statements.
518: */
519: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
520: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
522: /*
523: Set flag to indicate that the Jacobian matrix retains an identical
524: nonzero structure throughout all timestepping iterations (although the
525: values of the entries change). Thus, we can save some work in setting
526: up the preconditioner (e.g., no need to redo symbolic factorization for
527: ILU/ICC preconditioners).
528: - If the nonzero structure of the matrix is different during
529: successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
530: must be used instead. If you are unsure whether the matrix
531: structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
532: - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
533: believes your assertion and does not check the structure
534: of the matrix. If you erroneously claim that the structure
535: is the same when it actually is not, the new preconditioner
536: will not function correctly. Thus, use this optimization
537: feature with caution!
538: */
539: *str = SAME_NONZERO_PATTERN;
541: /*
542: Set and option to indicate that we will never add a new nonzero location
543: to the matrix. If we do, it will generate an error.
544: */
545: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR);
547: return 0;
548: }