Actual source code: ex8f.F

petsc-3.3-p6 2013-02-11
  1: !
  2: !   Tests PCMGSetResidual
  3: !
  4: ! -----------------------------------------------------------------------

  6:       program main
  7:       implicit none

  9: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 10: !                    Include files
 11: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 12: !
 13: !
 14: #include <finclude/petscsys.h>
 15: #include <finclude/petscvec.h>
 16: #include <finclude/petscmat.h>
 17: #include <finclude/petscpc.h>
 18: #include <finclude/petscksp.h>
 19: #include <finclude/petscpcmg.h>
 20: !
 21: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 22: !                   Variable declarations
 23: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 24: !
 25: !  Variables:
 26: !     ksp     - linear solver context
 27: !     x, b, u  - approx solution, right-hand-side, exact solution vectors
 28: !     A        - matrix that defines linear system
 29: !     its      - iterations for convergence
 30: !     norm     - norm of error in solution
 31: !     rctx     - random number context
 32: !

 34:       Mat              A
 35:       Vec              x,b,u
 36:       PC               pc
 37:       PetscInt  n,dim,istart,iend
 38:       PetscInt  i,j,jj,ii,one,zero
 39:       PetscErrorCode ierr
 40:       PetscScalar v
 41:       external         MyResidual
 42:       PetscScalar      pfive
 43:       KSP              ksp

 45: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 46: !                 Beginning of program
 47: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 49:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 50:       pfive = .5d0
 51:       n      = 6
 52:       dim    = n*n
 53:       one    = 1
 54:       zero   = 0

 56: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 57: !      Compute the matrix and right-hand-side vector that define
 58: !      the linear system, Ax = b.
 59: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 61: !  Create parallel matrix, specifying only its global dimensions.
 62: !  When using MatCreate(), the matrix format can be specified at
 63: !  runtime. Also, the parallel partitioning of the matrix is
 64: !  determined by PETSc at runtime.

 66:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
 67:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,dim,dim,ierr)
 68:       call MatSetFromOptions(A,ierr)
 69:       call MatSetUp(A,ierr)

 71: !  Currently, all PETSc parallel matrix formats are partitioned by
 72: !  contiguous chunks of rows across the processors.  Determine which
 73: !  rows of the matrix are locally owned.

 75:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

 77: !  Set matrix elements in parallel.
 78: !   - Each processor needs to insert only elements that it owns
 79: !     locally (but any non-local elements will be sent to the
 80: !     appropriate processor during matrix assembly).
 81: !   - Always specify global rows and columns of matrix entries.

 83:       do 10, II=Istart,Iend-1
 84:         v = -1.0
 85:         i = II/n
 86:         j = II - i*n
 87:         if (i.gt.0) then
 88:           JJ = II - n
 89:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 90:         endif
 91:         if (i.lt.n-1) then
 92:           JJ = II + n
 93:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 94:         endif
 95:         if (j.gt.0) then
 96:           JJ = II - 1
 97:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 98:         endif
 99:         if (j.lt.n-1) then
100:           JJ = II + 1
101:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
102:         endif
103:         v = 4.0
104:         call  MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
105:  10   continue

107: !  Assemble matrix, using the 2-step process:
108: !       MatAssemblyBegin(), MatAssemblyEnd()
109: !  Computations can be done while messages are in transition
110: !  by placing code between these two statements.

112:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
113:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

115: !  Create parallel vectors.
116: !   - Here, the parallel partitioning of the vector is determined by
117: !     PETSc at runtime.  We could also specify the local dimensions
118: !     if desired.
119: !   - Note: We form 1 vector from scratch and then duplicate as needed.

121:       call VecCreate(PETSC_COMM_WORLD,u,ierr)
122:       call VecSetSizes(u,PETSC_DECIDE,dim,ierr)
123:       call VecSetFromOptions(u,ierr)
124:       call VecDuplicate(u,b,ierr)
125:       call VecDuplicate(b,x,ierr)

127: !  Set exact solution; then compute right-hand-side vector.

129:       call VecSet(u,pfive,ierr)
130:       call MatMult(A,u,b,ierr)

132: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
133: !         Create the linear solver and set various options
134: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

136: !  Create linear solver context

138:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
139:       call KSPGetPC(ksp,pc,ierr)
140:       call PCSetType(pc,PCMG,ierr)
141:       call PCMGSetLevels(pc,one,PETSC_NULL_OBJECT,ierr)
142:       call PCMGSetResidual(pc,zero,PCMGDefaultResidual,A,ierr)

144:       call PCMGSetResidual(pc,zero,MyResidual,A,ierr)

146: !  Set operators. Here the matrix that defines the linear system
147: !  also serves as the preconditioning matrix.

149:       call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)


152:       call KSPDestroy(ksp,ierr)
153:       call VecDestroy(u,ierr)
154:       call VecDestroy(x,ierr)
155:       call VecDestroy(b,ierr)
156:       call MatDestroy(A,ierr)

158:       call PetscFinalize(ierr)
159:       end

161:       subroutine MyResidual(A,b,x,r,ierr)
162:       Mat A
163:       Vec b,x,r
164:       integer ierr
165:       return
166:       end