Actual source code: ex6f.F

  1: !
  2: !  Description: This example demonstrates repeated linear solves as
  3: !  well as the use of different preconditioner and linear system
  4: !  matrices.  This example also illustrates how to save PETSc objects
  5: !  in common blocks.
  6: !
  7: !/*T
  8: !  Concepts: KSP^repeatedly solving linear systems;
  9: !  Concepts: KSP^different matrices for linear system and preconditioner;
 10: !  Processors: n
 11: !T*/
 12: !
 13: !  The following include statements are required for KSP Fortran programs:
 14: !     petscsys.h       - base PETSc routines
 15: !     petscvec.h    - vectors
 16: !     petscmat.h    - matrices
 17: !     petscpc.h     - preconditioners
 18: !     petscksp.h    - Krylov subspace methods
 19: !  Other include statements may be needed if using additional PETSc
 20: !  routines in a Fortran program, e.g.,
 21: !     petscviewer.h - viewers
 22: !     petscis.h     - index sets
 23: !
 24:       program main
 25:  #include finclude/petscsys.h
 26:  #include finclude/petscvec.h
 27:  #include finclude/petscmat.h
 28:  #include finclude/petscpc.h
 29:  #include finclude/petscksp.h

 31: !  Variables:
 32: !
 33: !  A       - matrix that defines linear system
 34: !  ksp    - KSP context
 35: !  ksp     - KSP context
 36: !  x, b, u - approx solution, RHS, exact solution vectors
 37: !
 38:       Vec     x,u,b
 39:       Mat     A
 40:       KSP    ksp
 41:       PetscInt i,j,II,JJ,m,n
 42:       PetscInt Istart,Iend
 43:       PetscInt nsteps,one
 44:       PetscErrorCode ierr
 45:       PetscTruth flg
 46:       PetscScalar  v
 47: 

 49:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 50:       m      = 3
 51:       n      = 3
 52:       nsteps = 2
 53:       one    = 1
 54:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
 55:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
 56:       call PetscOptionsGetInt(PETSC_NULL_CHARACTER,'-nsteps',nsteps,    &
 57:      &     flg,ierr)

 59: !  Create parallel matrix, specifying only its global dimensions.
 60: !  When using MatCreate(), the matrix format can be specified at
 61: !  runtime. Also, the parallel partitioning of the matrix is
 62: !  determined by PETSc at runtime.

 64:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
 65:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
 66:       call MatSetFromOptions(A,ierr)

 68: !  The matrix is partitioned by contiguous chunks of rows across the
 69: !  processors.  Determine which rows of the matrix are locally owned.

 71:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

 73: !  Set matrix elements.
 74: !   - Each processor needs to insert only elements that it owns
 75: !     locally (but any non-local elements will be sent to the
 76: !     appropriate processor during matrix assembly).
 77: !   - Always specify global rows and columns of matrix entries.

 79:       do 10, II=Istart,Iend-1
 80:         v = -1.0
 81:         i = II/n
 82:         j = II - i*n
 83:         if (i.gt.0) then
 84:           JJ = II - n
 85:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 86:         endif
 87:         if (i.lt.m-1) then
 88:           JJ = II + n
 89:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 90:         endif
 91:         if (j.gt.0) then
 92:           JJ = II - 1
 93:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 94:         endif
 95:         if (j.lt.n-1) then
 96:           JJ = II + 1
 97:           call MatSetValues(A,one,II,one,JJ,v,ADD_VALUES,ierr)
 98:         endif
 99:         v = 4.0
100:         call  MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
101:  10   continue

103: !  Assemble matrix, using the 2-step process:
104: !       MatAssemblyBegin(), MatAssemblyEnd()
105: !  Computations can be done while messages are in transition
106: !  by placing code between these two statements.

108:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
109:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

111: !  Create parallel vectors.
112: !   - When using VecCreate(), the parallel partitioning of the vector
113: !     is determined by PETSc at runtime.
114: !   - Note: We form 1 vector from scratch and then duplicate as needed.

116:       call VecCreate(PETSC_COMM_WORLD,u,ierr)
117:       call VecSetSizes(u,PETSC_DECIDE,m*n,ierr)
118:       call VecSetFromOptions(u,ierr)
119:       call VecDuplicate(u,b,ierr)
120:       call VecDuplicate(b,x,ierr)

122: !  Create linear solver context

124:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)

126: !  Set runtime options (e.g., -ksp_type <type> -pc_type <type>)

128:       call KSPSetFromOptions(ksp,ierr)

130: !  Solve several linear systems in succession

132:       do 100 i=1,nsteps
133:          call solve1(ksp,A,x,b,u,i,nsteps,ierr)
134:  100  continue

136: !  Free work space.  All PETSc objects should be destroyed when they
137: !  are no longer needed.

139:       call VecDestroy(u,ierr)
140:       call VecDestroy(x,ierr)
141:       call VecDestroy(b,ierr)
142:       call MatDestroy(A,ierr)
143:       call KSPDestroy(ksp,ierr)

145:       call PetscFinalize(ierr)
146:       end

148: ! -----------------------------------------------------------------------
149: !
150:       subroutine solve1(ksp,A,x,b,u,count,nsteps,ierr)

152:  #include finclude/petscsys.h
153:  #include finclude/petscvec.h
154:  #include finclude/petscmat.h
155:  #include finclude/petscpc.h
156:  #include finclude/petscksp.h

158: !
159: !   solve1 - This routine is used for repeated linear system solves.
160: !   We update the linear system matrix each time, but retain the same
161: !   preconditioning matrix for all linear solves.
162: !
163: !      A - linear system matrix
164: !      A2 - preconditioning matrix
165: !
166:       PetscScalar  v,val
167:       PetscInt II,Istart,Iend
168:       PetscInt count,nsteps,one
169:       PetscErrorCode ierr
170:       Mat     A
171:       KSP     ksp
172:       Vec     x,b,u

174: ! Use common block to retain matrix between successive subroutine calls
175:       Mat              A2
176:       PetscMPIInt      rank
177:       PetscTruth       pflag
178:       common /my_data/ A2,pflag,rank

180:       one = 1
181: ! First time thorough: Create new matrix to define the linear system
182:       if (count .eq. 1) then
183:         call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
184:         pflag = .false.
185:         call PetscOptionsHasName(PETSC_NULL_CHARACTER,'-mat_view',       &
186:      &       pflag,ierr)
187:         if (pflag) then
188:           if (rank .eq. 0) write(6,100)
189:         endif
190:         call MatConvert(A,MATSAME,MAT_INITIAL_MATRIX,A2,ierr)
191: ! All other times: Set previous solution as initial guess for next solve.
192:       else
193:         call KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr)
194:       endif

196: ! Alter the matrix A a bit
197:       call MatGetOwnershipRange(A,Istart,Iend,ierr)
198:       do 20, II=Istart,Iend-1
199:         v = 2.0
200:         call MatSetValues(A,one,II,one,II,v,ADD_VALUES,ierr)
201:  20   continue
202:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
203:       if (pflag) then
204:         if (rank .eq. 0) write(6,110)
205:       endif
206:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

208: ! Set the exact solution; compute the right-hand-side vector
209:       val = 1.0*count
210:       call VecSet(u,val,ierr)
211:       call MatMult(A,u,b,ierr)

213: ! Set operators, keeping the identical preconditioner matrix for
214: ! all linear solves.  This approach is often effective when the
215: ! linear systems do not change very much between successive steps.
216:       call KSPSetOperators(ksp,A,A2,SAME_PRECONDITIONER,ierr)

218: ! Solve linear system
219:       call KSPSolve(ksp,b,x,ierr)

221: ! Destroy the preconditioner matrix on the last time through
222:       if (count .eq. nsteps) call MatDestroy(A2,ierr)

224:  100  format('previous matrix: preconditioning')
225:  110  format('next matrix: defines linear system')

227:       end