Actual source code: ex26.c

  1: static char help[] ="Solvers Laplacian with multigrid, bad way.\n\
  2:   -mx <xg>, where <xg> = number of grid points in the x-direction\n\
  3:   -my <yg>, where <yg> = number of grid points in the y-direction\n\
  4:   -Nx <npx>, where <npx> = number of processors in the x-direction\n\
  5:   -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";

  7: /*  Modified from ~src/ksp/examples/tests/ex19.c. Used for testing ML 6.2 interface.

  9:     This problem is modeled by
 10:     the partial differential equation
 11:   
 12:             -Laplacian u  = g,  0 < x,y < 1,
 13:   
 14:     with boundary conditions
 15:    
 16:              u = 0  for  x = 0, x = 1, y = 0, y = 1.
 17:   
 18:     A finite difference approximation with the usual 5-point stencil
 19:     is used to discretize the boundary value problem to obtain a nonlinear 
 20:     system of equations.

 22:     Usage: ./ex26 -ksp_monitor_short -pc_type ml
 23:            -mg_coarse_ksp_max_it 10  
 24:            -mg_levels_1_ksp_max_it 10 -mg_levels_2_ksp_max_it 10 
 25:            -mg_fine_ksp_max_it 10
 26: */

 28:  #include petscksp.h
 29:  #include petscda.h

 31: /* User-defined application contexts */
 32: typedef struct {
 33:   PetscInt   mx,my;            /* number grid points in x and y direction */
 34:   Vec        localX,localF;    /* local vectors with ghost region */
 35:   DA         da;
 36:   Vec        x,b,r;            /* global vectors */
 37:   Mat        J;                /* Jacobian on grid */
 38:   Mat        A,P,R;
 39:   KSP        ksp;
 40: } GridCtx;

 45: int main(int argc,char **argv)
 46: {
 48:   PetscInt       its,n,Nx=PETSC_DECIDE,Ny=PETSC_DECIDE,nlocal;
 49:   PetscMPIInt    size;
 50:   PetscScalar    one = 1.0;
 51:   PetscInt       mx,my;
 52:   Mat            A;
 53:   GridCtx        fine_ctx;
 54:   KSP            ksp;
 55:   PetscTruth     flg;

 57:   PetscInitialize(&argc,&argv,(char *)0,help);
 58:   /* set up discretization matrix for fine grid */
 59:   fine_ctx.mx = 9; fine_ctx.my = 9;
 60:   PetscOptionsGetInt(PETSC_NULL,"-mx",&mx,&flg);
 61:   if (flg) fine_ctx.mx = mx;
 62:   PetscOptionsGetInt(PETSC_NULL,"-my",&my,&flg);
 63:   if (flg) fine_ctx.my = my;
 64:   PetscPrintf(PETSC_COMM_WORLD,"Fine grid size %D by %D\n",fine_ctx.mx,fine_ctx.my);
 65:   n = fine_ctx.mx*fine_ctx.my;

 67:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 68:   PetscOptionsGetInt(PETSC_NULL,"-Nx",&Nx,PETSC_NULL);
 69:   PetscOptionsGetInt(PETSC_NULL,"-Ny",&Ny,PETSC_NULL);

 71:   /* Set up distributed array for fine grid */
 72:   DACreate2d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_STAR,fine_ctx.mx,
 73:                     fine_ctx.my,Nx,Ny,1,1,PETSC_NULL,PETSC_NULL,&fine_ctx.da);
 74:   DACreateGlobalVector(fine_ctx.da,&fine_ctx.x);
 75:   VecDuplicate(fine_ctx.x,&fine_ctx.b);
 76:   VecGetLocalSize(fine_ctx.x,&nlocal);
 77:   DACreateLocalVector(fine_ctx.da,&fine_ctx.localX);
 78:   VecDuplicate(fine_ctx.localX,&fine_ctx.localF);
 79:   MatCreateMPIAIJ(PETSC_COMM_WORLD,nlocal,nlocal,n,n,5,PETSC_NULL,3,PETSC_NULL,&A);
 80:   FormJacobian_Grid(&fine_ctx,&A);

 82:   /* create linear solver */
 83:   KSPCreate(PETSC_COMM_WORLD,&ksp);

 85:   /* set values for rhs vector */
 86:   VecSet(fine_ctx.b,one);
 87:   {
 88:     PetscRandom rdm;
 89:     PetscRandomCreate(PETSC_COMM_WORLD,&rdm);
 90:     PetscRandomSetFromOptions(rdm);
 91:     VecSetRandom(fine_ctx.b,rdm);
 92:     PetscRandomDestroy(rdm);
 93:   }

 95:   /* set options, then solve system */
 96:   KSPSetFromOptions(ksp); /* calls PCSetFromOptions_ML if 'pc_type=ml' */
 97:   KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN);
 98:   KSPSolve(ksp,fine_ctx.b,fine_ctx.x);
 99:   KSPGetIterationNumber(ksp,&its);
100:   PetscPrintf(PETSC_COMM_WORLD,"Number of iterations = %D\n",its);

102:   /* free data structures */
103:   VecDestroy(fine_ctx.x);
104:   VecDestroy(fine_ctx.b);
105:   DADestroy(fine_ctx.da);
106:   VecDestroy(fine_ctx.localX);
107:   VecDestroy(fine_ctx.localF);
108:   MatDestroy(A);
109:   KSPDestroy(ksp);

111:   PetscFinalize();
112:   return 0;
113: }

117: int FormJacobian_Grid(GridCtx *grid,Mat *J)
118: {
119:   Mat            jac = *J;
121:   PetscInt       i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
122:   PetscInt       nloc,*ltog,grow;
123:   PetscScalar    two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;

125:   mx = grid->mx;            my = grid->my;
126:   hx = one/(PetscReal)(mx-1);  hy = one/(PetscReal)(my-1);
127:   hxdhy = hx/hy;            hydhx = hy/hx;

129:   /* Get ghost points */
130:   DAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);
131:   DAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);
132:   DAGetGlobalIndices(grid->da,&nloc,&ltog);

134:   /* Evaluate Jacobian of function */
135:   for (j=ys; j<ys+ym; j++) {
136:     row = (j - Ys)*Xm + xs - Xs - 1;
137:     for (i=xs; i<xs+xm; i++) {
138:       row++;
139:       grow = ltog[row];
140:       if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
141:         v[0] = -hxdhy; col[0] = ltog[row - Xm];
142:         v[1] = -hydhx; col[1] = ltog[row - 1];
143:         v[2] = two*(hydhx + hxdhy); col[2] = grow;
144:         v[3] = -hydhx; col[3] = ltog[row + 1];
145:         v[4] = -hxdhy; col[4] = ltog[row + Xm];
146:         MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);
147:       } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)){
148:         value = .5*two*(hydhx + hxdhy);
149:         MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
150:       } else {
151:         value = .25*two*(hydhx + hxdhy);
152:         MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
153:       }
154:     }
155:   }
156:   MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
157:   MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
158:   return 0;
159: }