Actual source code: ex8f.F

  1: !
  2: !   Tests PCMGSetResidual
  3: !
  4: ! -----------------------------------------------------------------------

  6:       program main
  7:       implicit none

  9: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 10: !                    Include files
 11: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 12: !
 13: !
 14:  #include finclude/petscsys.h
 15:  #include finclude/petscvec.h
 16:  #include finclude/petscmat.h
 17:  #include finclude/petscpc.h
 18:  #include finclude/petscksp.h
 19:  #include finclude/petscmg.h
 20: !
 21: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 22: !                   Variable declarations
 23: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 24: !
 25: !  Variables:
 26: !     ksp     - linear solver context
 27: !     x, b, u  - approx solution, right-hand-side, exact solution vectors
 28: !     A        - matrix that defines linear system
 29: !     its      - iterations for convergence
 30: !     norm     - norm of error in solution
 31: !     rctx     - random number context
 32: !

 34:       Mat              A
 35:       Vec              x,b,u
 36:       PC               pc
 37:       PetscInt  n,dim,istart,iend
 38:       PetscInt  i,j,jj,ii,one,zero
 39:       PetscErrorCode ierr
 40:       double precision v
 41:       external         MyResidual
 42:       PetscScalar      pfive
 43:       KSP              ksp

 45: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 46: !                 Beginning of program
 47: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 49:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 50:       pfive = .5d0
 51:       n      = 6
 52:       dim    = n*n
 53:       one    = 1
 54:       zero   = 0

 56: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 57: !      Compute the matrix and right-hand-side vector that define
 58: !      the linear system, Ax = b.
 59: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 61: !  Create parallel matrix, specifying only its global dimensions.
 62: !  When using MatCreate(), the matrix format can be specified at
 63: !  runtime. Also, the parallel partitioning of the matrix is
 64: !  determined by PETSc at runtime.

 66:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
 67:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,dim,dim,ierr)
 68:       call MatSetFromOptions(A,ierr)

 70: !  Currently, all PETSc parallel matrix formats are partitioned by
 71: !  contiguous chunks of rows across the processors.  Determine which
 72: !  rows of the matrix are locally owned.

 74:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

 76: !  Set matrix elements in parallel.
 77: !   - Each processor needs to insert only elements that it owns
 78: !     locally (but any non-local elements will be sent to the
 79: !     appropriate processor during matrix assembly).
 80: !   - Always specify global rows and columns of matrix entries.

 82:       do 10, II=Istart,Iend-1
 83:         v = -1.0
 84:         i = II/n
 85:         j = II - i*n
 86:         if (i.gt.0) then
 87:           JJ = II - n
 88:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 89:         endif
 90:         if (i.lt.n-1) then
 91:           JJ = II + n
 92:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 93:         endif
 94:         if (j.gt.0) then
 95:           JJ = II - 1
 96:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 97:         endif
 98:         if (j.lt.n-1) then
 99:           JJ = II + 1
100:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
101:         endif
102:         v = 4.0
103:         call  MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
104:  10   continue

106: !  Assemble matrix, using the 2-step process:
107: !       MatAssemblyBegin(), MatAssemblyEnd()
108: !  Computations can be done while messages are in transition
109: !  by placing code between these two statements.

111:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
112:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

114: !  Create parallel vectors.
115: !   - Here, the parallel partitioning of the vector is determined by
116: !     PETSc at runtime.  We could also specify the local dimensions
117: !     if desired.
118: !   - Note: We form 1 vector from scratch and then duplicate as needed.

120:       call VecCreate(PETSC_COMM_WORLD,u,ierr)
121:       call VecSetSizes(u,PETSC_DECIDE,dim,ierr)
122:       call VecSetFromOptions(u,ierr)
123:       call VecDuplicate(u,b,ierr)
124:       call VecDuplicate(b,x,ierr)

126: !  Set exact solution; then compute right-hand-side vector.

128:       call VecSet(u,pfive,ierr)
129:       call MatMult(A,u,b,ierr)

131: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
132: !         Create the linear solver and set various options
133: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

135: !  Create linear solver context

137:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
138:       call KSPGetPC(ksp,pc,ierr)
139:       call PCSetType(pc,PCMG,ierr)
140:       call PCMGSetLevels(pc,one,PETSC_NULL_OBJECT,ierr)
141:       call PCMGSetResidual(pc,zero,PCMGDefaultResidual,A,ierr)

143:       call PCMGSetResidual(pc,zero,MyResidual,A,ierr)

145: !  Set operators. Here the matrix that defines the linear system
146: !  also serves as the preconditioning matrix.

148:       call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)


151:       call KSPDestroy(ksp,ierr)
152:       call VecDestroy(u,ierr)
153:       call VecDestroy(x,ierr)
154:       call VecDestroy(b,ierr)
155:       call MatDestroy(A,ierr)

157:       call PetscFinalize(ierr)
158:       end

160:       subroutine MyResidual(A,b,x,r,ierr)
161:       Mat A
162:       Vec b,x,r
163:       integer ierr
164:       return
165:       end