Actual source code: ex6.c

petsc-3.3-p6 2013-02-11
  1: static char help[] = "Tests various 3-dimensional DMDA routines.\n\n";

  3: #include <petscdmda.h>
  4: #include <petscao.h>

  8: int main(int argc,char **argv)
  9: {
 10:   PetscMPIInt    rank;
 11:   PetscInt       M = 3,N = 5,P=3,s=1,w=2,nloc,l,i,j,k,kk,m = PETSC_DECIDE,n = PETSC_DECIDE,p = PETSC_DECIDE;
 13:   PetscInt       Xs,Xm,Ys,Ym,Zs,Zm,iloc,*ltog,*iglobal;
 14:   PetscInt       *lx = PETSC_NULL,*ly = PETSC_NULL,*lz = PETSC_NULL;
 15:   PetscBool      test_order = PETSC_FALSE;
 16:   DM             da;
 17:   PetscViewer    viewer;
 18:   Vec            local,global;
 19:   PetscScalar    value;
 20:   DMDABoundaryType bx = DMDA_BOUNDARY_NONE,by = DMDA_BOUNDARY_NONE,bz = DMDA_BOUNDARY_NONE;
 21:   DMDAStencilType  stencil_type = DMDA_STENCIL_BOX;
 22:   AO             ao;
 23:   PetscBool      flg = PETSC_FALSE;

 25:   PetscInitialize(&argc,&argv,(char*)0,help);
 26:   PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,300,&viewer);

 28:   /* Read options */
 29:   PetscOptionsGetInt(PETSC_NULL,"-NX",&M,PETSC_NULL);
 30:   PetscOptionsGetInt(PETSC_NULL,"-NY",&N,PETSC_NULL);
 31:   PetscOptionsGetInt(PETSC_NULL,"-NZ",&P,PETSC_NULL);
 32:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 33:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
 34:   PetscOptionsGetInt(PETSC_NULL,"-p",&p,PETSC_NULL);
 35:   PetscOptionsGetInt(PETSC_NULL,"-s",&s,PETSC_NULL);
 36:   PetscOptionsGetInt(PETSC_NULL,"-w",&w,PETSC_NULL);
 37:   flg = PETSC_FALSE;
 38:   PetscOptionsGetBool(PETSC_NULL,"-star",&flg,PETSC_NULL);
 39:   if (flg) stencil_type =  DMDA_STENCIL_STAR;
 40:   flg = PETSC_FALSE;
 41:   PetscOptionsGetBool(PETSC_NULL,"-box",&flg,PETSC_NULL);
 42:   if (flg) stencil_type =  DMDA_STENCIL_BOX;

 44:   flg = PETSC_FALSE;
 45:   PetscOptionsGetBool(PETSC_NULL,"-xperiodic",&flg,PETSC_NULL);
 46:   if (flg) bx = DMDA_BOUNDARY_PERIODIC;
 47:   flg = PETSC_FALSE;
 48:   PetscOptionsGetBool(PETSC_NULL,"-xghosted",&flg,PETSC_NULL);
 49:   if (flg) bx = DMDA_BOUNDARY_GHOSTED;
 50:   flg = PETSC_FALSE;
 51:   PetscOptionsGetBool(PETSC_NULL,"-xnonghosted",&flg,PETSC_NULL);

 53:   flg = PETSC_FALSE;
 54:   PetscOptionsGetBool(PETSC_NULL,"-yperiodic",&flg,PETSC_NULL);
 55:   if (flg) by = DMDA_BOUNDARY_PERIODIC;
 56:   flg = PETSC_FALSE;
 57:   PetscOptionsGetBool(PETSC_NULL,"-yghosted",&flg,PETSC_NULL);
 58:   if (flg) by = DMDA_BOUNDARY_GHOSTED;
 59:   flg = PETSC_FALSE;
 60:   PetscOptionsGetBool(PETSC_NULL,"-ynonghosted",&flg,PETSC_NULL);

 62:   flg = PETSC_FALSE;
 63:   PetscOptionsGetBool(PETSC_NULL,"-zperiodic",&flg,PETSC_NULL);
 64:   if (flg) bz = DMDA_BOUNDARY_PERIODIC;
 65:   flg = PETSC_FALSE;
 66:   PetscOptionsGetBool(PETSC_NULL,"-zghosted",&flg,PETSC_NULL);
 67:   if (flg) bz = DMDA_BOUNDARY_GHOSTED;
 68:   flg = PETSC_FALSE;
 69:   PetscOptionsGetBool(PETSC_NULL,"-znonghosted",&flg,PETSC_NULL);

 71:   PetscOptionsGetBool(PETSC_NULL,"-testorder",&test_order,PETSC_NULL);

 73:   flg  = PETSC_FALSE;
 74:   PetscOptionsGetBool(PETSC_NULL,"-distribute",&flg,PETSC_NULL);
 75:   if (flg) {
 76:     if (m == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -m option with -distribute option");
 77:     PetscMalloc(m*sizeof(PetscInt),&lx);
 78:     for (i=0; i<m-1; i++) { lx[i] = 4;}
 79:     lx[m-1] = M - 4*(m-1);
 80:     if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -n option with -distribute option");
 81:     PetscMalloc(n*sizeof(PetscInt),&ly);
 82:     for (i=0; i<n-1; i++) { ly[i] = 2;}
 83:     ly[n-1] = N - 2*(n-1);
 84:     if (p == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -p option with -distribute option");
 85:     PetscMalloc(p*sizeof(PetscInt),&lz);
 86:     for (i=0; i<p-1; i++) { lz[i] = 2;}
 87:     lz[p-1] = P - 2*(p-1);
 88:   }

 90:   /* Create distributed array and get vectors */
 91:   DMDACreate3d(PETSC_COMM_WORLD,bx,by,bz,stencil_type,M,N,P,m,n,p,w,s,lx,ly,lz,&da);
 92:   PetscFree(lx);
 93:   PetscFree(ly);
 94:   PetscFree(lz);
 95:   DMView(da,viewer);
 96:   DMCreateGlobalVector(da,&global);
 97:   DMCreateLocalVector(da,&local);

 99:   /* Set global vector; send ghost points to local vectors */
100:   value = 1;
101:   VecSet(global,value);
102:   DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
103:   DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);

105:   /* Scale local vectors according to processor rank; pass to global vector */
106:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
107:   value = rank;
108:   VecScale(local,value);
109:   DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);
110:   DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);

112:   if (!test_order) { /* turn off printing when testing ordering mappings */
113:     if (M*N*P<40) {
114:       PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vector:\n");
115:       VecView(global,PETSC_VIEWER_STDOUT_WORLD);
116:       PetscPrintf(PETSC_COMM_WORLD,"\n");
117:     }
118:   }

120:   /* Send ghost points to local vectors */
121:   DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
122:   DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);

124:   flg  = PETSC_FALSE;
125:   PetscOptionsGetBool(PETSC_NULL,"-local_print",&flg,PETSC_NULL);
126:   if (flg) {
127:     PetscViewer sviewer;
128:     PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);
129:     PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);
130:     VecView(local,sviewer);
131:     PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);
132:     PetscSynchronizedFlush(PETSC_COMM_WORLD);
133:   }

135:   /* Tests mappings betweeen application/PETSc orderings */
136:   if (test_order) {
137:     DMDAGetGhostCorners(da,&Xs,&Ys,&Zs,&Xm,&Ym,&Zm);
138:     DMDAGetGlobalIndices(da,&nloc,&ltog);
139:     DMDAGetAO(da,&ao);
140:     /* AOView(ao,PETSC_VIEWER_STDOUT_WORLD); */
141:     PetscMalloc(nloc*sizeof(PetscInt),&iglobal);

143:     /* Set iglobal to be global indices for each processor's local and ghost nodes,
144:        using the DMDA ordering of grid points */
145:     kk = 0;
146:     for (k=Zs; k<Zs+Zm; k++) {
147:       for (j=Ys; j<Ys+Ym; j++) {
148:         for (i=Xs; i<Xs+Xm; i++) {
149:           iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
150:           for (l=0; l<w; l++) {
151:             iglobal[kk++] = ltog[iloc+l];
152:           }
153:         }
154:       }
155:     }

157:     /* Map this to the application ordering (which for DMDAs is just the natural ordering
158:        that would be used for 1 processor, numbering most rapidly by x, then y, then z) */
159:     AOPetscToApplication(ao,nloc,iglobal);

161:     /* Then map the application ordering back to the PETSc DMDA ordering */
162:     AOApplicationToPetsc(ao,nloc,iglobal);

164:     /* Verify the mappings */
165:     kk=0;
166:     for (k=Zs; k<Zs+Zm; k++) {
167:       for (j=Ys; j<Ys+Ym; j++) {
168:         for (i=Xs; i<Xs+Xm; i++) {
169:           iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
170:           for (l=0; l<w; l++) {
171:             if (iglobal[kk] != ltog[iloc+l]) {
172:               PetscPrintf(MPI_COMM_WORLD,"[%D] Problem with mapping: z=%D, j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",
173:                       rank,k,j,i,l,ltog[iloc+l],iglobal[kk]);
174:             }
175:             kk++;
176:           }
177:         }
178:       }
179:     }
180:     PetscFree(iglobal);
181:   }

183:   /* Free memory */
184:   PetscViewerDestroy(&viewer);
185:   VecDestroy(&local);
186:   VecDestroy(&global);
187:   DMDestroy(&da);
188:   PetscFinalize();
189:   return 0;
190: }
191: