Actual source code: ex4.c
2: /* Program usage: mpiexec -n <procs> ex4 [-help] [all PETSc options] */
4: static char help[] ="Solves a simple time-dependent linear PDE (the heat equation).\n\
5: Input parameters include:\n\
6: -m <points>, where <points> = number of grid points\n\
7: -time_dependent_rhs : Treat the problem as having a time-dependent right-hand side\n\
8: -debug : Activate debugging printouts\n\
9: -nox : Deactivate x-window graphics\n\n";
11: /*
12: Concepts: TS^time-dependent linear problems
13: Concepts: TS^heat equation
14: Concepts: TS^diffusion equation
15: Processors: n
16: */
18: /* ------------------------------------------------------------------------
20: This program solves the one-dimensional heat equation (also called the
21: diffusion equation),
22: u_t = u_xx,
23: on the domain 0 <= x <= 1, with the boundary conditions
24: u(t,0) = 0, u(t,1) = 0,
25: and the initial condition
26: u(0,x) = sin(6*pi*x) + 3*sin(2*pi*x).
27: This is a linear, second-order, parabolic equation.
29: We discretize the right-hand side using finite differences with
30: uniform grid spacing h:
31: u_xx = (u_{i+1} - 2u_{i} + u_{i-1})/(h^2)
32: We then demonstrate time evolution using the various TS methods by
33: running the program via
34: mpiexec -n <procs> ex3 -ts_type <timestepping solver>
36: We compare the approximate solution with the exact solution, given by
37: u_exact(x,t) = exp(-36*pi*pi*t) * sin(6*pi*x) +
38: 3*exp(-4*pi*pi*t) * sin(2*pi*x)
40: Notes:
41: This code demonstrates the TS solver interface to two variants of
42: linear problems, u_t = f(u,t), namely
43: - time-dependent f: f(u,t) is a function of t
44: - time-independent f: f(u,t) is simply f(u)
46: The uniprocessor version of this code is ts/examples/tutorials/ex3.c
48: ------------------------------------------------------------------------- */
50: /*
51: Include "petscda.h" so that we can use distributed arrays (DAs) to manage
52: the parallel grid. Include "petscts.h" so that we can use TS solvers.
53: Note that this file automatically includes:
54: petscsys.h - base PETSc routines petscvec.h - vectors
55: petscmat.h - matrices
56: petscis.h - index sets petscksp.h - Krylov subspace methods
57: petscviewer.h - viewers petscpc.h - preconditioners
58: petscksp.h - linear solvers petscsnes.h - nonlinear solvers
59: */
61: #include petscda.h
62: #include petscts.h
64: /*
65: User-defined application context - contains data needed by the
66: application-provided call-back routines.
67: */
68: typedef struct {
69: MPI_Comm comm; /* communicator */
70: DA da; /* distributed array data structure */
71: Vec localwork; /* local ghosted work vector */
72: Vec u_local; /* local ghosted approximate solution vector */
73: Vec solution; /* global exact solution vector */
74: PetscInt m; /* total number of grid points */
75: PetscReal h; /* mesh width h = 1/(m-1) */
76: PetscTruth debug; /* flag (1 indicates activation of debugging printouts) */
77: PetscViewer viewer1,viewer2; /* viewers for the solution and error */
78: PetscReal norm_2,norm_max; /* error norms */
79: } AppCtx;
81: /*
82: User-defined routines
83: */
92: int main(int argc,char **argv)
93: {
94: AppCtx appctx; /* user-defined application context */
95: TS ts; /* timestepping context */
96: Mat A; /* matrix data structure */
97: Vec u; /* approximate solution vector */
98: PetscReal time_total_max = 100.0; /* default max total time */
99: PetscInt time_steps_max = 100; /* default max timesteps */
100: PetscDraw draw; /* drawing context */
102: PetscInt steps,m;
103: PetscMPIInt size;
104: PetscReal dt,ftime;
105: PetscTruth flg;
106: TSProblemType tsproblem = TS_LINEAR;
108: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
109: Initialize program and set problem parameters
110: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
111:
112: PetscInitialize(&argc,&argv,(char*)0,help);
113: appctx.comm = PETSC_COMM_WORLD;
115: m = 60;
116: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
117: PetscOptionsHasName(PETSC_NULL,"-debug",&appctx.debug);
118: appctx.m = m;
119: appctx.h = 1.0/(m-1.0);
120: appctx.norm_2 = 0.0;
121: appctx.norm_max = 0.0;
122: MPI_Comm_size(PETSC_COMM_WORLD,&size);
123: PetscPrintf(PETSC_COMM_WORLD,"Solving a linear TS problem, number of processors = %d\n",size);
125: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126: Create vector data structures
127: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
128: /*
129: Create distributed array (DA) to manage parallel grid and vectors
130: and to set up the ghost point communication pattern. There are M
131: total grid values spread equally among all the processors.
132: */
134: DACreate1d(PETSC_COMM_WORLD,DA_NONPERIODIC,m,1,1,PETSC_NULL,&appctx.da);
136: /*
137: Extract global and local vectors from DA; we use these to store the
138: approximate solution. Then duplicate these for remaining vectors that
139: have the same types.
140: */
141: DACreateGlobalVector(appctx.da,&u);
142: DACreateLocalVector(appctx.da,&appctx.u_local);
144: /*
145: Create local work vector for use in evaluating right-hand-side function;
146: create global work vector for storing exact solution.
147: */
148: VecDuplicate(appctx.u_local,&appctx.localwork);
149: VecDuplicate(u,&appctx.solution);
151: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
152: Set up displays to show graphs of the solution and error
153: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
155: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,380,400,160,&appctx.viewer1);
156: PetscViewerDrawGetDraw(appctx.viewer1,0,&draw);
157: PetscDrawSetDoubleBuffer(draw);
158: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",80,0,400,160,&appctx.viewer2);
159: PetscViewerDrawGetDraw(appctx.viewer2,0,&draw);
160: PetscDrawSetDoubleBuffer(draw);
162: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
163: Create timestepping solver context
164: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
166: TSCreate(PETSC_COMM_WORLD,&ts);
168: PetscOptionsHasName(PETSC_NULL,"-nonlinear",&flg);
169: if (flg) tsproblem = TS_NONLINEAR;
170: #if defined(PETSC_HAVE_SUNDIALS)
171: tsproblem = TS_NONLINEAR;
172: #endif
173: TSSetProblemType(ts,tsproblem);
175: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
176: Set optional user-defined monitoring routine
177: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
179: TSMonitorSet(ts,Monitor,&appctx,PETSC_NULL);
181: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
183: Create matrix data structure; set matrix evaluation routine.
184: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
186: MatCreate(PETSC_COMM_WORLD,&A);
187: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m,m);
188: MatSetFromOptions(A);
190: PetscOptionsHasName(PETSC_NULL,"-time_dependent_rhs",&flg);
191: if (flg) {
192: /*
193: For linear problems with a time-dependent f(u,t) in the equation
194: u_t = f(u,t), the user provides the discretized right-hand-side
195: as a time-dependent matrix.
196: */
197: TSSetMatrices(ts,A,RHSMatrixHeat,PETSC_NULL,PETSC_NULL,DIFFERENT_NONZERO_PATTERN,&appctx);
198: } else {
199: /*
200: For linear problems with a time-independent f(u) in the equation
201: u_t = f(u), the user provides the discretized right-hand-side
202: as a matrix only once, and then sets a null matrix evaluation
203: routine.
204: */
205: MatStructure A_structure;
206: RHSMatrixHeat(ts,0.0,&A,&A,&A_structure,&appctx);
207: TSSetMatrices(ts,A,PETSC_NULL,PETSC_NULL,PETSC_NULL,DIFFERENT_NONZERO_PATTERN,&appctx);
208: }
209:
210: if ( tsproblem == TS_NONLINEAR){
211: TSSetRHSFunction(ts,RHSFunctionHeat,&appctx);
212: TSSetRHSJacobian(ts,A,A,TSDefaultComputeJacobian,&appctx);
213: }
215: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
216: Set solution vector and initial timestep
217: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
219: dt = appctx.h*appctx.h/2.0;
220: TSSetInitialTimeStep(ts,0.0,dt);
221: TSSetSolution(ts,u);
223: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
224: Customize timestepping solver:
225: - Set the solution method to be the Backward Euler method.
226: - Set timestepping duration info
227: Then set runtime options, which can override these defaults.
228: For example,
229: -ts_max_steps <maxsteps> -ts_max_time <maxtime>
230: to override the defaults set by TSSetDuration().
231: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
233: TSSetDuration(ts,time_steps_max,time_total_max);
234: TSSetFromOptions(ts);
236: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
237: Solve the problem
238: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
240: /*
241: Evaluate initial conditions
242: */
243: InitialConditions(u,&appctx);
245: /*
246: Run the timestepping solver
247: */
248: TSStep(ts,&steps,&ftime);
250: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
251: View timestepping solver info
252: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
254: PetscPrintf(PETSC_COMM_WORLD,"avg. error (2 norm) = %G, avg. error (max norm) = %G\n",
255: appctx.norm_2/steps,appctx.norm_max/steps);
256: TSView(ts,PETSC_VIEWER_STDOUT_WORLD);
258: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
259: Free work space. All PETSc objects should be destroyed when they
260: are no longer needed.
261: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
263: TSDestroy(ts);
264: MatDestroy(A);
265: VecDestroy(u);
266: PetscViewerDestroy(appctx.viewer1);
267: PetscViewerDestroy(appctx.viewer2);
268: VecDestroy(appctx.localwork);
269: VecDestroy(appctx.solution);
270: VecDestroy(appctx.u_local);
271: DADestroy(appctx.da);
273: /*
274: Always call PetscFinalize() before exiting a program. This routine
275: - finalizes the PETSc libraries as well as MPI
276: - provides summary and diagnostic information if certain runtime
277: options are chosen (e.g., -log_summary).
278: */
279: PetscFinalize();
280: return 0;
281: }
282: /* --------------------------------------------------------------------- */
285: /*
286: InitialConditions - Computes the solution at the initial time.
288: Input Parameter:
289: u - uninitialized solution vector (global)
290: appctx - user-defined application context
292: Output Parameter:
293: u - vector with solution at initial time (global)
294: */
295: PetscErrorCode InitialConditions(Vec u,AppCtx *appctx)
296: {
297: PetscScalar *u_localptr,h = appctx->h;
298: PetscInt i,mybase,myend;
301: /*
302: Determine starting point of each processor's range of
303: grid values.
304: */
305: VecGetOwnershipRange(u,&mybase,&myend);
307: /*
308: Get a pointer to vector data.
309: - For default PETSc vectors, VecGetArray() returns a pointer to
310: the data array. Otherwise, the routine is implementation dependent.
311: - You MUST call VecRestoreArray() when you no longer need access to
312: the array.
313: - Note that the Fortran interface to VecGetArray() differs from the
314: C version. See the users manual for details.
315: */
316: VecGetArray(u,&u_localptr);
318: /*
319: We initialize the solution array by simply writing the solution
320: directly into the array locations. Alternatively, we could use
321: VecSetValues() or VecSetValuesLocal().
322: */
323: for (i=mybase; i<myend; i++) {
324: u_localptr[i-mybase] = PetscSinScalar(PETSC_PI*i*6.*h) + 3.*PetscSinScalar(PETSC_PI*i*2.*h);
325: }
327: /*
328: Restore vector
329: */
330: VecRestoreArray(u,&u_localptr);
332: /*
333: Print debugging information if desired
334: */
335: if (appctx->debug) {
336: PetscPrintf(appctx->comm,"initial guess vector\n");
337: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
338: }
340: return 0;
341: }
342: /* --------------------------------------------------------------------- */
345: /*
346: ExactSolution - Computes the exact solution at a given time.
348: Input Parameters:
349: t - current time
350: solution - vector in which exact solution will be computed
351: appctx - user-defined application context
353: Output Parameter:
354: solution - vector with the newly computed exact solution
355: */
356: PetscErrorCode ExactSolution(PetscReal t,Vec solution,AppCtx *appctx)
357: {
358: PetscScalar *s_localptr,h = appctx->h,ex1,ex2,sc1,sc2;
359: PetscInt i,mybase,myend;
362: /*
363: Determine starting and ending points of each processor's
364: range of grid values
365: */
366: VecGetOwnershipRange(solution,&mybase,&myend);
368: /*
369: Get a pointer to vector data.
370: */
371: VecGetArray(solution,&s_localptr);
373: /*
374: Simply write the solution directly into the array locations.
375: Alternatively, we culd use VecSetValues() or VecSetValuesLocal().
376: */
377: ex1 = exp(-36.*PETSC_PI*PETSC_PI*t); ex2 = exp(-4.*PETSC_PI*PETSC_PI*t);
378: sc1 = PETSC_PI*6.*h; sc2 = PETSC_PI*2.*h;
379: for (i=mybase; i<myend; i++) {
380: s_localptr[i-mybase] = PetscSinScalar(sc1*(PetscReal)i)*ex1 + 3.*PetscSinScalar(sc2*(PetscReal)i)*ex2;
381: }
383: /*
384: Restore vector
385: */
386: VecRestoreArray(solution,&s_localptr);
387: return 0;
388: }
389: /* --------------------------------------------------------------------- */
392: /*
393: Monitor - User-provided routine to monitor the solution computed at
394: each timestep. This example plots the solution and computes the
395: error in two different norms.
397: Input Parameters:
398: ts - the timestep context
399: step - the count of the current step (with 0 meaning the
400: initial condition)
401: time - the current time
402: u - the solution at this timestep
403: ctx - the user-provided context for this monitoring routine.
404: In this case we use the application context which contains
405: information about the problem size, workspace and the exact
406: solution.
407: */
408: PetscErrorCode Monitor(TS ts,PetscInt step,PetscReal time,Vec u,void *ctx)
409: {
410: AppCtx *appctx = (AppCtx*) ctx; /* user-defined application context */
412: PetscReal norm_2,norm_max;
414: /*
415: View a graph of the current iterate
416: */
417: VecView(u,appctx->viewer2);
419: /*
420: Compute the exact solution
421: */
422: ExactSolution(time,appctx->solution,appctx);
424: /*
425: Print debugging information if desired
426: */
427: if (appctx->debug) {
428: PetscPrintf(appctx->comm,"Computed solution vector\n");
429: VecView(u,PETSC_VIEWER_STDOUT_WORLD);
430: PetscPrintf(appctx->comm,"Exact solution vector\n");
431: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
432: }
434: /*
435: Compute the 2-norm and max-norm of the error
436: */
437: VecAXPY(appctx->solution,-1.0,u);
438: VecNorm(appctx->solution,NORM_2,&norm_2);
439: norm_2 = sqrt(appctx->h)*norm_2;
440: VecNorm(appctx->solution,NORM_MAX,&norm_max);
442: /*
443: PetscPrintf() causes only the first processor in this
444: communicator to print the timestep information.
445: */
446: PetscPrintf(appctx->comm,"Timestep %D: time = %G, 2-norm error = %G, max norm error = %G\n",
447: step,time,norm_2,norm_max);
448: appctx->norm_2 += norm_2;
449: appctx->norm_max += norm_max;
451: /*
452: View a graph of the error
453: */
454: VecView(appctx->solution,appctx->viewer1);
456: /*
457: Print debugging information if desired
458: */
459: if (appctx->debug) {
460: PetscPrintf(appctx->comm,"Error vector\n");
461: VecView(appctx->solution,PETSC_VIEWER_STDOUT_WORLD);
462: }
464: return 0;
465: }
467: /* --------------------------------------------------------------------- */
470: /*
471: RHSMatrixHeat - User-provided routine to compute the right-hand-side
472: matrix for the heat equation.
474: Input Parameters:
475: ts - the TS context
476: t - current time
477: global_in - global input vector
478: dummy - optional user-defined context, as set by TSetRHSJacobian()
480: Output Parameters:
481: AA - Jacobian matrix
482: BB - optionally different preconditioning matrix
483: str - flag indicating matrix structure
485: Notes:
486: RHSMatrixHeat computes entries for the locally owned part of the system.
487: - Currently, all PETSc parallel matrix formats are partitioned by
488: contiguous chunks of rows across the processors.
489: - Each processor needs to insert only elements that it owns
490: locally (but any non-local elements will be sent to the
491: appropriate processor during matrix assembly).
492: - Always specify global row and columns of matrix entries when
493: using MatSetValues(); we could alternatively use MatSetValuesLocal().
494: - Here, we set all entries for a particular row at once.
495: - Note that MatSetValues() uses 0-based row and column numbers
496: in Fortran as well as in C.
497: */
498: PetscErrorCode RHSMatrixHeat(TS ts,PetscReal t,Mat *AA,Mat *BB,MatStructure *str,void *ctx)
499: {
500: Mat A = *AA; /* Jacobian matrix */
501: AppCtx *appctx = (AppCtx*)ctx; /* user-defined application context */
503: PetscInt i,mstart,mend,idx[3];
504: PetscScalar v[3],stwo = -2./(appctx->h*appctx->h),sone = -.5*stwo;
506: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
507: Compute entries for the locally owned part of the matrix
508: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
510: MatGetOwnershipRange(A,&mstart,&mend);
512: /*
513: Set matrix rows corresponding to boundary data
514: */
516: if (mstart == 0) { /* first processor only */
517: v[0] = 1.0;
518: MatSetValues(A,1,&mstart,1,&mstart,v,INSERT_VALUES);
519: mstart++;
520: }
522: if (mend == appctx->m) { /* last processor only */
523: mend--;
524: v[0] = 1.0;
525: MatSetValues(A,1,&mend,1,&mend,v,INSERT_VALUES);
526: }
528: /*
529: Set matrix rows corresponding to interior data. We construct the
530: matrix one row at a time.
531: */
532: v[0] = sone; v[1] = stwo; v[2] = sone;
533: for (i=mstart; i<mend; i++) {
534: idx[0] = i-1; idx[1] = i; idx[2] = i+1;
535: MatSetValues(A,1,&i,3,idx,v,INSERT_VALUES);
536: }
538: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
539: Complete the matrix assembly process and set some options
540: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
541: /*
542: Assemble matrix, using the 2-step process:
543: MatAssemblyBegin(), MatAssemblyEnd()
544: Computations can be done while messages are in transition
545: by placing code between these two statements.
546: */
547: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
548: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
550: /*
551: Set flag to indicate that the Jacobian matrix retains an identical
552: nonzero structure throughout all timestepping iterations (although the
553: values of the entries change). Thus, we can save some work in setting
554: up the preconditioner (e.g., no need to redo symbolic factorization for
555: ILU/ICC preconditioners).
556: - If the nonzero structure of the matrix is different during
557: successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
558: must be used instead. If you are unsure whether the matrix
559: structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
560: - Caution: If you specify SAME_NONZERO_PATTERN, PETSc
561: believes your assertion and does not check the structure
562: of the matrix. If you erroneously claim that the structure
563: is the same when it actually is not, the new preconditioner
564: will not function correctly. Thus, use this optimization
565: feature with caution!
566: */
567: *str = SAME_NONZERO_PATTERN;
569: /*
570: Set and option to indicate that we will never add a new nonzero location
571: to the matrix. If we do, it will generate an error.
572: */
573: MatSetOption(A,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE);
575: return 0;
576: }
580: PetscErrorCode RHSFunctionHeat(TS ts,PetscReal t,Vec globalin,Vec globalout,void *ctx)
581: {
583: Mat A;
584: MatStructure A_structure;
587: TSGetMatrices(ts,&A,PETSC_NULL,&ctx);
588: RHSMatrixHeat(ts,t,&A,PETSC_NULL,&A_structure,ctx);
589: /* MatView(A,PETSC_VIEWER_STDOUT_WORLD); */
590: MatMult(A,globalin,globalout);
591: return(0);
592: }