Actual source code: ex5f90.F

  1: !
  2: !  Description: Solves a nonlinear system in parallel with SNES.
  3: !  We solve the  Bratu (SFI - solid fuel ignition) problem in a 2D rectangular
  4: !  domain, using distributed arrays (DAs) to partition the parallel grid.
  5: !  The command line options include:
  6: !    -par <parameter>, where <parameter> indicates the nonlinearity of the problem
  7: !       problem SFI:  <parameter> = Bratu parameter (0 <= par <= 6.81)
  8: !
  9: !/*T
 10: !  Concepts: SNES^parallel Bratu example
 11: !  Concepts: DA^using distributed arrays;
 12: !  Processors: n
 13: !T*/
 14: !
 15: !  --------------------------------------------------------------------------
 16: !
 17: !  Solid Fuel Ignition (SFI) problem.  This problem is modeled by
 18: !  the partial differential equation
 19: !
 20: !          -Laplacian u - lambda*exp(u) = 0,  0 < x,y < 1,
 21: !
 22: !  with boundary conditions
 23: !
 24: !           u = 0  for  x = 0, x = 1, y = 0, y = 1.
 25: !
 26: !  A finite difference approximation with the usual 5-point stencil
 27: !  is used to discretize the boundary value problem to obtain a nonlinear
 28: !  system of equations.
 29: !
 30: !  The uniprocessor version of this code is snes/examples/tutorials/ex4f.F
 31: !
 32: !  --------------------------------------------------------------------------
 33: !  The following define must be used before including any PETSc include files
 34: !  into a module or interface. This is because they can't handle declarations
 35: !  in them
 36: !

 38:       module f90module
 39:       type userctx
 40:  #include finclude/petscsysdef.h
 41:  #include finclude/petscvecdef.h
 42:  #include finclude/petscdadef.h
 43:         DA      da
 44:         PetscInt xs,xe,xm,gxs,gxe,gxm
 45:         PetscInt ys,ye,ym,gys,gye,gym
 46:         PetscInt mx,my
 47:         PetscMPIInt rank
 48:         double precision lambda
 49:       end type userctx

 51:       contains
 52: ! ---------------------------------------------------------------------
 53: !
 54: !  FormFunction - Evaluates nonlinear function, F(x).
 55: !
 56: !  Input Parameters:
 57: !  snes - the SNES context
 58: !  X - input vector
 59: !  dummy - optional user-defined context, as set by SNESSetFunction()
 60: !          (not used here)
 61: !
 62: !  Output Parameter:
 63: !  F - function vector
 64: !
 65: !  Notes:
 66: !  This routine serves as a wrapper for the lower-level routine
 67: !  "FormFunctionLocal", where the actual computations are
 68: !  done using the standard Fortran style of treating the local
 69: !  vector data as a multidimensional array over the local mesh.
 70: !  This routine merely handles ghost point scatters and accesses
 71: !  the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
 72: !
 73:       subroutine FormFunction(snes,X,F,user,ierr)
 74:       implicit none

 76:  #include finclude/petscsys.h
 77:  #include finclude/petscvec.h
 78:  #include finclude/petscda.h
 79:  #include finclude/petscis.h
 80:  #include finclude/petscmat.h
 81:  #include finclude/petscksp.h
 82:  #include finclude/petscpc.h
 83:  #include finclude/petscsnes.h
 84: #include "finclude/petscvec.h90"

 86: !  Input/output variables:
 87:       SNES           snes
 88:       Vec            X,F
 89:       PetscErrorCode ierr
 90:       type (userctx) user

 92: !  Declarations for use with local arrays:
 93:       PetscScalar,pointer :: lx_v(:),lf_v(:)
 94:       Vec            localX

 96: !  Scatter ghost points to local vector, using the 2-step process
 97: !     DAGlobalToLocalBegin(), DAGlobalToLocalEnd().
 98: !  By placing code between these two statements, computations can
 99: !  be done while messages are in transition.
100:       call DAGetLocalVector(user%da,localX,ierr)
101:       call DAGlobalToLocalBegin(user%da,X,INSERT_VALUES,                &
102:      &     localX,ierr)
103:       call DAGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)

105: !  Get a pointer to vector data.
106: !    - For default PETSc vectors, VecGetArray90() returns a pointer to
107: !      the data array. Otherwise, the routine is implementation dependent.
108: !    - You MUST call VecRestoreArrayF90() when you no longer need access to
109: !      the array.
110: !    - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
111: !      and is useable from Fortran-90 Only.

113:       call VecGetArrayF90(localX,lx_v,ierr)
114:       call VecGetArrayF90(F,lf_v,ierr)

116: !  Compute function over the locally owned part of the grid
117:       call FormFunctionLocal(lx_v,lf_v,user,ierr)

119: !  Restore vectors
120:       call VecRestoreArrayF90(localX,lx_v,ierr)
121:       call VecRestoreArrayF90(F,lf_v,ierr)

123: !  Insert values into global vector

125:       call DARestoreLocalVector(user%da,localX,ierr)
126:       call PetscLogFlops(11.0d0*user%ym*user%xm,ierr)

128: !      call VecView(X,PETSC_VIEWER_STDOUT_WORLD,ierr)
129: !      call VecView(F,PETSC_VIEWER_STDOUT_WORLD,ierr)
130:       return
131:       end subroutine formfunction
132:       end module f90module

134:       module f90moduleinterfaces
135:         use f90module
136: 
137:       Interface SNESSetApplicationContext
138:         Subroutine SNESSetApplicationContext(snes,ctx,ierr)
139:         use f90module
140:           SNES snes
141:           type(userctx) ctx
142:           PetscErrorCode ierr
143:         End Subroutine
144:       End Interface SNESSetApplicationContext

146:       Interface SNESGetApplicationContext
147:         Subroutine SNESGetApplicationContext(snes,ctx,ierr)
148:         use f90module
149:           SNES snes
150:           type(userctx), pointer :: ctx
151:           PetscErrorCode ierr
152:         End Subroutine
153:       End Interface SNESGetApplicationContext
154:       end module f90moduleinterfaces

156:       program main
157:       use f90module
158:       use f90moduleinterfaces
159:       implicit none
160: !
161:  #include finclude/petscsys.h
162:  #include finclude/petscvec.h
163:  #include finclude/petscda.h
164:  #include finclude/petscis.h
165:  #include finclude/petscmat.h
166:  #include finclude/petscksp.h
167:  #include finclude/petscpc.h
168:  #include finclude/petscsnes.h
169: #include "finclude/petscvec.h90"
170: #include "finclude/petscda.h90"

172: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
173: !                   Variable declarations
174: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
175: !
176: !  Variables:
177: !     snes        - nonlinear solver
178: !     x, r        - solution, residual vectors
179: !     J           - Jacobian matrix
180: !     its         - iterations for convergence
181: !     Nx, Ny      - number of preocessors in x- and y- directions
182: !     matrix_free - flag - 1 indicates matrix-free version
183: !
184:       SNES             snes
185:       Vec              x,r
186:       Mat              J
187:       PetscErrorCode   ierr
188:       PetscInt         its
189:       PetscTruth       flg,matrix_free
190:       PetscInt         ione,nfour
191:       double precision lambda_max,lambda_min
192:       type (userctx)   user
193:       type(userctx), pointer:: puser

195: !  Note: Any user-defined Fortran routines (such as FormJacobian)
196: !  MUST be declared as external.
197:       external FormInitialGuess,FormJacobian

199: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
200: !  Initialize program
201: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
202:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
203:       call MPI_Comm_rank(PETSC_COMM_WORLD,user%rank,ierr)

205: !  Initialize problem parameters
206:       lambda_max  = 6.81
207:       lambda_min  = 0.0
208:       user%lambda = 6.0
209:       ione = 1
210:       nfour = -4
211:       call PetscOptionsGetReal(PETSC_NULL_CHARACTER,'-par',             &
212:      &     user%lambda,flg,ierr)
213:       if (user%lambda .ge. lambda_max .or. user%lambda .le. lambda_min) &
214:      &     then
215:          if (user%rank .eq. 0) write(6,*) 'Lambda is out of range'
216:          SETERRQ(1,' ',ierr)
217:       endif

219: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
220: !  Create nonlinear solver context
221: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
222:       call SNESCreate(PETSC_COMM_WORLD,snes,ierr)

224: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
225: !  Create vector data structures; set function evaluation routine
226: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

228: !  Create distributed array (DA) to manage parallel grid and vectors

230: ! This really needs only the star-type stencil, but we use the box
231: ! stencil temporarily.
232:       call DACreate2d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_BOX,   &
233:      &     nfour,nfour,PETSC_DECIDE,PETSC_DECIDE,ione,ione,             &
234:      &     PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,user%da,ierr)
235:       call DAGetInfo(user%da,PETSC_NULL_INTEGER,user%mx,user%my,        &
236:      &               PETSC_NULL_INTEGER,                                &
237:      &               PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,             &
238:      &               PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,             &
239:      &               PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,             &
240:      &               PETSC_NULL_INTEGER,ierr)
241: 
242: !
243: !   Visualize the distribution of the array across the processors
244: !
245: !     call DAView(user%da,PETSC_VIEWER_DRAW_WORLD,ierr)

247: !  Extract global and local vectors from DA; then duplicate for remaining
248: !  vectors that are the same types
249:       call DACreateGlobalVector(user%da,x,ierr)
250:       call VecDuplicate(x,r,ierr)

252: !  Get local grid boundaries (for 2-dimensional DA)
253:       call DAGetCorners(user%da,user%xs,user%ys,PETSC_NULL_INTEGER,     &
254:      &     user%xm,user%ym,PETSC_NULL_INTEGER,ierr)
255:       call DAGetGhostCorners(user%da,user%gxs,user%gys,                 &
256:      &     PETSC_NULL_INTEGER,user%gxm,user%gym,                        &
257:      &     PETSC_NULL_INTEGER,ierr)

259: !  Here we shift the starting indices up by one so that we can easily
260: !  use the Fortran convention of 1-based indices (rather 0-based indices).
261:       user%xs  = user%xs+1
262:       user%ys  = user%ys+1
263:       user%gxs = user%gxs+1
264:       user%gys = user%gys+1

266:       user%ye  = user%ys+user%ym-1
267:       user%xe  = user%xs+user%xm-1
268:       user%gye = user%gys+user%gym-1
269:       user%gxe = user%gxs+user%gxm-1

271:       call SNESSetApplicationContext(snes,user,ierr)

273: !  Set function evaluation routine and vector
274:       call SNESSetFunction(snes,r,FormFunction,user,ierr)

276: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
277: !  Create matrix data structure; set Jacobian evaluation routine
278: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

280: !  Set Jacobian matrix data structure and default Jacobian evaluation
281: !  routine. User can override with:
282: !     -snes_fd : default finite differencing approximation of Jacobian
283: !     -snes_mf : matrix-free Newton-Krylov method with no preconditioning
284: !                (unless user explicitly sets preconditioner)
285: !     -snes_mf_operator : form preconditioning matrix as set by the user,
286: !                         but use matrix-free approx for Jacobian-vector
287: !                         products within Newton-Krylov method
288: !
289: !  Note:  For the parallel case, vectors and matrices MUST be partitioned
290: !     accordingly.  When using distributed arrays (DAs) to create vectors,
291: !     the DAs determine the problem partitioning.  We must explicitly
292: !     specify the local matrix dimensions upon its creation for compatibility
293: !     with the vector distribution.  Thus, the generic MatCreate() routine
294: !     is NOT sufficient when working with distributed arrays.
295: !
296: !     Note: Here we only approximately preallocate storage space for the
297: !     Jacobian.  See the users manual for a discussion of better techniques
298: !     for preallocating matrix memory.
299: 
300:       call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-snes_mf',         &
301:      &     matrix_free,ierr)
302:       if (.not. matrix_free) then
303:         call DAGetMatrix(user%da,MATAIJ,J,ierr)
304:         call SNESSetJacobian(snes,J,J,FormJacobian,user,ierr)
305:       endif

307: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
308: !  Customize nonlinear solver; set runtime options
309: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
310: !  Set runtime options (e.g., -snes_monitor -snes_rtol <rtol> -ksp_type <type>)
311:       call SNESSetFromOptions(snes,ierr)

313: !     Test Fortran90 wrapper for SNESSet/Get ApplicationContext()
314:       call PetscOptionsGetTruth(PETSC_NULL_CHARACTER,'-test_appctx',             &
315:      &     flg,PETSC_NULL_CHARACTER,ierr)
316:       if (flg) then
317:         call SNESGetApplicationContext(snes,puser,ierr)
318:         if (user%da .ne. puser%da) then
319:           write(*,*) "Error: uesr != puesr"
320:           write(*,*) "user: ", user
321:           write(*,*) "puesr: ", puser
322:         endif
323:       endif

325: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
326: !  Evaluate initial guess; then solve nonlinear system.
327: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
328: !  Note: The user should initialize the vector, x, with the initial guess
329: !  for the nonlinear solver prior to calling SNESSolve().  In particular,
330: !  to employ an initial guess of zero, the user should explicitly set
331: !  this vector to zero by calling VecSet().

333:       call FormInitialGuess(snes,x,ierr)
334:       call SNESSolve(snes,PETSC_NULL_OBJECT,x,ierr)
335:       call SNESGetIterationNumber(snes,its,ierr);
336:       if (user%rank .eq. 0) then
337:          write(6,100) its
338:       endif
339:   100 format('Number of Newton iterations = ',i5)

341: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
342: !  Free work space.  All PETSc objects should be destroyed when they
343: !  are no longer needed.
344: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
345:       if (.not. matrix_free) call MatDestroy(J,ierr)
346:       call VecDestroy(x,ierr)
347:       call VecDestroy(r,ierr)
348:       call SNESDestroy(snes,ierr)
349:       call DADestroy(user%da,ierr)

351:       call PetscFinalize(ierr)
352:       end

354: ! ---------------------------------------------------------------------
355: !
356: !  FormInitialGuess - Forms initial approximation.
357: !
358: !  Input Parameters:
359: !  X - vector
360: !
361: !  Output Parameter:
362: !  X - vector
363: !
364: !  Notes:
365: !  This routine serves as a wrapper for the lower-level routine
366: !  "InitialGuessLocal", where the actual computations are
367: !  done using the standard Fortran style of treating the local
368: !  vector data as a multidimensional array over the local mesh.
369: !  This routine merely handles ghost point scatters and accesses
370: !  the local vector data via VecGetArrayF90() and VecRestoreArrayF90().
371: !
372:       subroutine FormInitialGuess(snes,X,ierr)
373:       use f90module
374:       use f90moduleinterfaces
375:       implicit none

377: #include "finclude/petscvec.h90"
378:  #include finclude/petscsys.h
379:  #include finclude/petscvec.h
380:  #include finclude/petscda.h
381:  #include finclude/petscis.h
382:  #include finclude/petscmat.h
383:  #include finclude/petscksp.h
384:  #include finclude/petscpc.h
385:  #include finclude/petscsnes.h

387: !  Input/output variables:
388:       SNES           snes
389:       type(userctx), pointer:: puser
390:       Vec            X
391:       PetscErrorCode ierr
392: 
393: !  Declarations for use with local arrays:
394:       PetscScalar,pointer :: lx_v(:)
395:       Vec               localX

397:       0
398:       call SNESGetApplicationContext(snes,puser,ierr)
399: !  Get a pointer to vector data.
400: !    - For default PETSc vectors, VecGetArray90() returns a pointer to
401: !      the data array. Otherwise, the routine is implementation dependent.
402: !    - You MUST call VecRestoreArrayF90() when you no longer need access to
403: !      the array.
404: !    - Note that the interface to VecGetArrayF90() differs from VecGetArray(),
405: !      and is useable from Fortran-90 Only.

407:       call DAGetLocalVector(puser%da,localX,ierr)
408:       call VecGetArrayF90(localX,lx_v,ierr)

410: !  Compute initial guess over the locally owned part of the grid
411:       call InitialGuessLocal(puser,lx_v,ierr)

413: !  Restore vector
414:       call VecRestoreArrayF90(localX,lx_v,ierr)

416: !  Insert values into global vector
417:       call DALocalToGlobal(puser%da,localX,INSERT_VALUES,X,ierr)
418:       call DARestoreLocalVector(puser%da,localX,ierr)

420:       return
421:       end

423: ! ---------------------------------------------------------------------
424: !
425: !  InitialGuessLocal - Computes initial approximation, called by
426: !  the higher level routine FormInitialGuess().
427: !
428: !  Input Parameter:
429: !  x - local vector data
430: !
431: !  Output Parameters:
432: !  x - local vector data
433: !  ierr - error code
434: !
435: !  Notes:
436: !  This routine uses standard Fortran-style computations over a 2-dim array.
437: !
438:       subroutine InitialGuessLocal(user,x,ierr)
439:       use f90module
440:       implicit none

442:  #include finclude/petscsys.h
443:  #include finclude/petscvec.h
444:  #include finclude/petscda.h
445:  #include finclude/petscis.h
446:  #include finclude/petscmat.h
447:  #include finclude/petscksp.h
448:  #include finclude/petscpc.h
449:  #include finclude/petscsnes.h

451: !  Input/output variables:
452:       type (userctx)         user
453:       PetscScalar  x(user%gxs:user%gxe,                                 &
454:      &              user%gys:user%gye)
455:       PetscErrorCode ierr

457: !  Local variables:
458:       PetscInt  i,j
459:       PetscScalar   temp1,temp,hx,hy
460:       PetscScalar   one

462: !  Set parameters

464:       0
465:       one    = 1.0
466:       hx     = one/(dble(user%mx-1))
467:       hy     = one/(dble(user%my-1))
468:       temp1  = user%lambda/(user%lambda + one)

470:       do 20 j=user%ys,user%ye
471:          temp = dble(min(j-1,user%my-j))*hy
472:          do 10 i=user%xs,user%xe
473:             if (i .eq. 1 .or. j .eq. 1                                  &
474:      &             .or. i .eq. user%mx .or. j .eq. user%my) then
475:               x(i,j) = 0.0
476:             else
477:               x(i,j) = temp1 *                                          &
478:      &          sqrt(min(dble(min(i-1,user%mx-i)*hx),dble(temp)))
479:             endif
480:  10      continue
481:  20   continue

483:       return
484:       end

486: ! ---------------------------------------------------------------------
487: !
488: !  FormFunctionLocal - Computes nonlinear function, called by
489: !  the higher level routine FormFunction().
490: !
491: !  Input Parameter:
492: !  x - local vector data
493: !
494: !  Output Parameters:
495: !  f - local vector data, f(x)
496: !  ierr - error code
497: !
498: !  Notes:
499: !  This routine uses standard Fortran-style computations over a 2-dim array.
500: !
501:       subroutine FormFunctionLocal(x,f,user,ierr)
502:       use f90module

504:       implicit none

506: !  Input/output variables:
507:       type (userctx) user
508:       PetscScalar  x(user%gxs:user%gxe,                                         &
509:      &              user%gys:user%gye)
510:       PetscScalar  f(user%xs:user%xe,                                           &
511:      &              user%ys:user%ye)
512:       PetscErrorCode ierr

514: !  Local variables:
515:       PetscScalar two,one,hx,hy,hxdhy,hydhx,sc
516:       PetscScalar u,uxx,uyy
517:       PetscInt  i,j

519:       one    = 1.0
520:       two    = 2.0
521:       hx     = one/dble(user%mx-1)
522:       hy     = one/dble(user%my-1)
523:       sc     = hx*hy*user%lambda
524:       hxdhy  = hx/hy
525:       hydhx  = hy/hx

527: !  Compute function over the locally owned part of the grid

529:       do 20 j=user%ys,user%ye
530:          do 10 i=user%xs,user%xe
531:             if (i .eq. 1 .or. j .eq. 1                                  &
532:      &             .or. i .eq. user%mx .or. j .eq. user%my) then
533:                f(i,j) = x(i,j)
534:             else
535:                u = x(i,j)
536:                uxx = hydhx * (two*u                                     &
537:      &                - x(i-1,j) - x(i+1,j))
538:                uyy = hxdhy * (two*u - x(i,j-1) - x(i,j+1))
539:                f(i,j) = uxx + uyy - sc*exp(u)
540:             endif
541:  10      continue
542:  20   continue

544:       return
545:       end

547: ! ---------------------------------------------------------------------
548: !
549: !  FormJacobian - Evaluates Jacobian matrix.
550: !
551: !  Input Parameters:
552: !  snes     - the SNES context
553: !  x        - input vector
554: !  dummy    - optional user-defined context, as set by SNESSetJacobian()
555: !             (not used here)
556: !
557: !  Output Parameters:
558: !  jac      - Jacobian matrix
559: !  jac_prec - optionally different preconditioning matrix (not used here)
560: !  flag     - flag indicating matrix structure
561: !
562: !  Notes:
563: !  This routine serves as a wrapper for the lower-level routine
564: !  "FormJacobianLocal", where the actual computations are
565: !  done using the standard Fortran style of treating the local
566: !  vector data as a multidimensional array over the local mesh.
567: !  This routine merely accesses the local vector data via
568: !  VecGetArrayF90() and VecRestoreArrayF90().
569: !
570: !  Notes:
571: !  Due to grid point reordering with DAs, we must always work
572: !  with the local grid points, and then transform them to the new
573: !  global numbering with the "ltog" mapping (via DAGetGlobalIndicesF90()).
574: !  We cannot work directly with the global numbers for the original
575: !  uniprocessor grid!
576: !
577: !  Two methods are available for imposing this transformation
578: !  when setting matrix entries:
579: !    (A) MatSetValuesLocal(), using the local ordering (including
580: !        ghost points!)
581: !        - Use DAGetGlobalIndicesF90() to extract the local-to-global map
582: !        - Associate this map with the matrix by calling
583: !          MatSetLocalToGlobalMapping() once
584: !        - Set matrix entries using the local ordering
585: !          by calling MatSetValuesLocal()
586: !    (B) MatSetValues(), using the global ordering
587: !        - Use DAGetGlobalIndicesF90() to extract the local-to-global map
588: !        - Then apply this map explicitly yourself
589: !        - Set matrix entries using the global ordering by calling
590: !          MatSetValues()
591: !  Option (A) seems cleaner/easier in many cases, and is the procedure
592: !  used in this example.
593: !
594:       subroutine FormJacobian(snes,X,jac,jac_prec,flag,user,ierr)
595:       use f90module
596:       implicit none

598:  #include finclude/petscsys.h
599:  #include finclude/petscvec.h
600:  #include finclude/petscda.h
601:  #include finclude/petscis.h
602:  #include finclude/petscmat.h
603:  #include finclude/petscksp.h
604:  #include finclude/petscpc.h
605:  #include finclude/petscsnes.h

607: #include "finclude/petscvec.h90"

609: !  Input/output variables:
610:       SNES         snes
611:       Vec          X
612:       Mat          jac,jac_prec
613:       MatStructure flag
614:       type(userctx)  user
615:       PetscErrorCode ierr

617: !  Declarations for use with local arrays:
618:       PetscScalar,pointer :: lx_v(:)
619:       Vec            localX

621: !  Scatter ghost points to local vector, using the 2-step process
622: !     DAGlobalToLocalBegin(), DAGlobalToLocalEnd()
623: !  Computations can be done while messages are in transition,
624: !  by placing code between these two statements.

626:       call DAGetLocalVector(user%da,localX,ierr)
627:       call DAGlobalToLocalBegin(user%da,X,INSERT_VALUES,localX,            &
628:      &     ierr)
629:       call DAGlobalToLocalEnd(user%da,X,INSERT_VALUES,localX,ierr)

631: !  Get a pointer to vector data
632:       call VecGetArrayF90(localX,lx_v,ierr)

634: !  Compute entries for the locally owned part of the Jacobian preconditioner.
635:       call FormJacobianLocal(lx_v,jac_prec,user,ierr)

637: !  Assemble matrix, using the 2-step process:
638: !     MatAssemblyBegin(), MatAssemblyEnd()
639: !  Computations can be done while messages are in transition,
640: !  by placing code between these two statements.

642:       call MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY,ierr)
643:       if (jac .ne. jac_prec) then
644:          call MatAssemblyBegin(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
645:       endif
646:       call VecRestoreArrayF90(localX,lx_v,ierr)
647:       call DARestoreLocalVector(user%da,localX,ierr)
648:       call MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY,ierr)
649:       if (jac .ne. jac_prec) then
650:         call MatAssemblyEnd(jac_prec,MAT_FINAL_ASSEMBLY,ierr)
651:       endif
652: 
653: !  Set flag to indicate that the Jacobian matrix retains an identical
654: !  nonzero structure throughout all nonlinear iterations (although the
655: !  values of the entries change). Thus, we can save some work in setting
656: !  up the preconditioner (e.g., no need to redo symbolic factorization for
657: !  ILU/ICC preconditioners).
658: !   - If the nonzero structure of the matrix is different during
659: !     successive linear solves, then the flag DIFFERENT_NONZERO_PATTERN
660: !     must be used instead.  If you are unsure whether the matrix
661: !     structure has changed or not, use the flag DIFFERENT_NONZERO_PATTERN.
662: !   - Caution:  If you specify SAME_NONZERO_PATTERN, PETSc
663: !     believes your assertion and does not check the structure
664: !     of the matrix.  If you erroneously claim that the structure
665: !     is the same when it actually is not, the new preconditioner
666: !     will not function correctly.  Thus, use this optimization
667: !     feature with caution!

669:       flag = SAME_NONZERO_PATTERN

671: !  Tell the matrix we will never add a new nonzero location to the
672: !  matrix. If we do it will generate an error.

674:       call MatSetOption(jac,MAT_NEW_NONZERO_LOCATION_ERR,PETSC_TRUE,      &
675:      &                  ierr)

677:       return
678:       end

680: ! ---------------------------------------------------------------------
681: !
682: !  FormJacobianLocal - Computes Jacobian preconditioner matrix,
683: !  called by the higher level routine FormJacobian().
684: !
685: !  Input Parameters:
686: !  x        - local vector data
687: !
688: !  Output Parameters:
689: !  jac_prec - Jacobian preconditioner matrix
690: !  ierr     - error code
691: !
692: !  Notes:
693: !  This routine uses standard Fortran-style computations over a 2-dim array.
694: !
695: !  Notes:
696: !  Due to grid point reordering with DAs, we must always work
697: !  with the local grid points, and then transform them to the new
698: !  global numbering with the "ltog" mapping (via DAGetGlobalIndicesF90()).
699: !  We cannot work directly with the global numbers for the original
700: !  uniprocessor grid!
701: !
702: !  Two methods are available for imposing this transformation
703: !  when setting matrix entries:
704: !    (A) MatSetValuesLocal(), using the local ordering (including
705: !        ghost points!)
706: !        - Use DAGetGlobalIndicesF90() to extract the local-to-global map
707: !        - Associate this map with the matrix by calling
708: !          MatSetLocalToGlobalMapping() once
709: !        - Set matrix entries using the local ordering
710: !          by calling MatSetValuesLocal()
711: !    (B) MatSetValues(), using the global ordering
712: !        - Use DAGetGlobalIndicesF90() to extract the local-to-global map
713: !        - Then apply this map explicitly yourself
714: !        - Set matrix entries using the global ordering by calling
715: !          MatSetValues()
716: !  Option (A) seems cleaner/easier in many cases, and is the procedure
717: !  used in this example.
718: !
719:       subroutine FormJacobianLocal(x,jac_prec,user,ierr)
720:       use f90module
721:       implicit none

723:  #include finclude/petscsys.h
724:  #include finclude/petscvec.h
725:  #include finclude/petscda.h
726:  #include finclude/petscis.h
727:  #include finclude/petscmat.h
728:  #include finclude/petscksp.h
729:  #include finclude/petscpc.h
730:  #include finclude/petscsnes.h

732: !  Input/output variables:
733:       type (userctx) user
734:       PetscScalar    x(user%gxs:user%gxe,                                      &
735:      &               user%gys:user%gye)
736:       Mat            jac_prec
737:       PetscErrorCode ierr

739: !  Local variables:
740:       PetscInt    row,col(5),i,j
741:       PetscInt    ione,ifive
742:       PetscScalar two,one,hx,hy,hxdhy
743:       PetscScalar hydhx,sc,v(5)

745: !  Set parameters
746:       ione   = 1
747:       ifive  = 5
748:       one    = 1.0
749:       two    = 2.0
750:       hx     = one/dble(user%mx-1)
751:       hy     = one/dble(user%my-1)
752:       sc     = hx*hy
753:       hxdhy  = hx/hy
754:       hydhx  = hy/hx

756: !  Compute entries for the locally owned part of the Jacobian.
757: !   - Currently, all PETSc parallel matrix formats are partitioned by
758: !     contiguous chunks of rows across the processors.
759: !   - Each processor needs to insert only elements that it owns
760: !     locally (but any non-local elements will be sent to the
761: !     appropriate processor during matrix assembly).
762: !   - Here, we set all entries for a particular row at once.
763: !   - We can set matrix entries either using either
764: !     MatSetValuesLocal() or MatSetValues(), as discussed above.
765: !   - Note that MatSetValues() uses 0-based row and column numbers
766: !     in Fortran as well as in C.

768:       do 20 j=user%ys,user%ye
769:          row = (j - user%gys)*user%gxm + user%xs - user%gxs - 1
770:          do 10 i=user%xs,user%xe
771:             row = row + 1
772: !           boundary points
773:             if (i .eq. 1 .or. j .eq. 1                                  &
774:      &             .or. i .eq. user%mx .or. j .eq. user%my) then
775:                col(1) = row
776:                v(1)   = one
777:                call MatSetValuesLocal(jac_prec,ione,row,ione,col,v,          &
778:      &                           INSERT_VALUES,ierr)
779: !           interior grid points
780:             else
781:                v(1) = -hxdhy
782:                v(2) = -hydhx
783:                v(3) = two*(hydhx + hxdhy)                               &
784:      &                  - sc*user%lambda*exp(x(i,j))
785:                v(4) = -hydhx
786:                v(5) = -hxdhy
787:                col(1) = row - user%gxm
788:                col(2) = row - 1
789:                col(3) = row
790:                col(4) = row + 1
791:                col(5) = row + user%gxm
792:                call MatSetValuesLocal(jac_prec,ione,row,ifive,col,v,         &
793:      &                                INSERT_VALUES,ierr)
794:             endif
795:  10      continue
796:  20   continue

798:       return
799:       end