Actual source code: ex6.c

  1: 
  2: static char help[] = "Tests various 3-dimensional DA routines.\n\n";

 4:  #include petscda.h
 5:  #include petscao.h

  9: int main(int argc,char **argv)
 10: {
 11:   PetscMPIInt    rank;
 12:   PetscInt       M = 3,N = 5,P=3,s=1,w=2,nloc,l,i,j,k,kk,m = PETSC_DECIDE,n = PETSC_DECIDE,p = PETSC_DECIDE;
 14:   PetscInt       Xs,Xm,Ys,Ym,Zs,Zm,iloc,*ltog,*iglobal;
 15:   PetscInt       *lx = PETSC_NULL,*ly = PETSC_NULL,*lz = PETSC_NULL;
 16:   PetscTruth     test_order = PETSC_FALSE;
 17:   DA             da;
 18:   PetscViewer    viewer;
 19:   Vec            local,global;
 20:   PetscScalar    value;
 21:   DAPeriodicType wrap = DA_XYPERIODIC;
 22:   DAStencilType  stencil_type = DA_STENCIL_BOX;
 23:   AO             ao;
 24:   PetscTruth     flg = PETSC_FALSE;

 26:   PetscInitialize(&argc,&argv,(char*)0,help);
 27:   PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,300,&viewer);

 29:   /* Read options */
 30:   PetscOptionsGetInt(PETSC_NULL,"-M",&M,PETSC_NULL);
 31:   PetscOptionsGetInt(PETSC_NULL,"-N",&N,PETSC_NULL);
 32:   PetscOptionsGetInt(PETSC_NULL,"-P",&P,PETSC_NULL);
 33:   PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
 34:   PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
 35:   PetscOptionsGetInt(PETSC_NULL,"-p",&p,PETSC_NULL);
 36:   PetscOptionsGetInt(PETSC_NULL,"-s",&s,PETSC_NULL);
 37:   PetscOptionsGetInt(PETSC_NULL,"-w",&w,PETSC_NULL);
 38:   PetscOptionsGetTruth(PETSC_NULL,"-star",&flg,PETSC_NULL);
 39:   if (flg) stencil_type =  DA_STENCIL_STAR;
 40:   PetscOptionsGetTruth(PETSC_NULL,"-test_order",&test_order,PETSC_NULL);

 42:   flg  = PETSC_FALSE;
 43:   PetscOptionsGetTruth(PETSC_NULL,"-distribute",&flg,PETSC_NULL);
 44:   if (flg) {
 45:     if (m == PETSC_DECIDE) SETERRQ(1,"Must set -m option with -distribute option");
 46:     PetscMalloc(m*sizeof(PetscInt),&lx);
 47:     for (i=0; i<m-1; i++) { lx[i] = 4;}
 48:     lx[m-1] = M - 4*(m-1);
 49:     if (n == PETSC_DECIDE) SETERRQ(1,"Must set -n option with -distribute option");
 50:     PetscMalloc(n*sizeof(PetscInt),&ly);
 51:     for (i=0; i<n-1; i++) { ly[i] = 2;}
 52:     ly[n-1] = N - 2*(n-1);
 53:     if (p == PETSC_DECIDE) SETERRQ(1,"Must set -p option with -distribute option");
 54:     PetscMalloc(p*sizeof(PetscInt),&lz);
 55:     for (i=0; i<p-1; i++) { lz[i] = 2;}
 56:     lz[p-1] = P - 2*(p-1);
 57:   }

 59:   /* Create distributed array and get vectors */
 60:   DACreate3d(PETSC_COMM_WORLD,wrap,stencil_type,M,N,P,m,n,p,w,s,
 61:                     lx,ly,lz,&da);
 62:   if (lx) {
 63:     PetscFree(lx);
 64:     PetscFree(ly);
 65:     PetscFree(lz);
 66:   }
 67:   DAView(da,viewer);
 68:   DACreateGlobalVector(da,&global);
 69:   DACreateLocalVector(da,&local);

 71:   /* Set global vector; send ghost points to local vectors */
 72:   value = 1;
 73:   VecSet(global,value);
 74:   DAGlobalToLocalBegin(da,global,INSERT_VALUES,local);
 75:   DAGlobalToLocalEnd(da,global,INSERT_VALUES,local);

 77:   /* Scale local vectors according to processor rank; pass to global vector */
 78:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
 79:   value = rank;
 80:   VecScale(local,value);
 81:   DALocalToGlobal(da,local,INSERT_VALUES,global);

 83:   if (!test_order) { /* turn off printing when testing ordering mappings */
 84:     if (M*N*P<40) {
 85:       PetscPrintf(PETSC_COMM_WORLD,"\nGlobal Vector:\n");
 86:       VecView(global,PETSC_VIEWER_STDOUT_WORLD);
 87:       PetscPrintf(PETSC_COMM_WORLD,"\n");
 88:     }
 89:   }

 91:   /* Send ghost points to local vectors */
 92:   DAGlobalToLocalBegin(da,global,INSERT_VALUES,local);
 93:   DAGlobalToLocalEnd(da,global,INSERT_VALUES,local);

 95:   flg  = PETSC_FALSE;
 96:   PetscOptionsGetTruth(PETSC_NULL,"-local_print",&flg,PETSC_NULL);
 97:   if (flg) {
 98:     PetscViewer sviewer;
 99:     PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);
100:     PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);
101:     VecView(local,sviewer);
102:     PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);
103:     PetscSynchronizedFlush(PETSC_COMM_WORLD);
104:   }

106:   /* Tests mappings betweeen application/PETSc orderings */
107:   if (test_order) {
108:     DAGetGhostCorners(da,&Xs,&Ys,&Zs,&Xm,&Ym,&Zm);
109:     DAGetGlobalIndices(da,&nloc,&ltog);
110:     DAGetAO(da,&ao);
111:     /* AOView(ao,PETSC_VIEWER_STDOUT_WORLD); */
112:     PetscMalloc(nloc*sizeof(PetscInt),&iglobal);

114:     /* Set iglobal to be global indices for each processor's local and ghost nodes,
115:        using the DA ordering of grid points */
116:     kk = 0;
117:     for (k=Zs; k<Zs+Zm; k++) {
118:       for (j=Ys; j<Ys+Ym; j++) {
119:         for (i=Xs; i<Xs+Xm; i++) {
120:           iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
121:           for (l=0; l<w; l++) {
122:             iglobal[kk++] = ltog[iloc+l];
123:           }
124:         }
125:       }
126:     }

128:     /* Map this to the application ordering (which for DAs is just the natural ordering
129:        that would be used for 1 processor, numbering most rapidly by x, then y, then z) */
130:     AOPetscToApplication(ao,nloc,iglobal);

132:     /* Then map the application ordering back to the PETSc DA ordering */
133:     AOApplicationToPetsc(ao,nloc,iglobal);

135:     /* Verify the mappings */
136:     kk=0;
137:     for (k=Zs; k<Zs+Zm; k++) {
138:       for (j=Ys; j<Ys+Ym; j++) {
139:         for (i=Xs; i<Xs+Xm; i++) {
140:           iloc = w*((k-Zs)*Xm*Ym + (j-Ys)*Xm + i-Xs);
141:           for (l=0; l<w; l++) {
142:             if (iglobal[kk] != ltog[iloc+l]) {
143:               PetscPrintf(MPI_COMM_WORLD,"[%D] Problem with mapping: z=%D, j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",
144:                       rank,k,j,i,l,ltog[iloc+l],iglobal[kk]);
145:             }
146:             kk++;
147:           }
148:         }
149:       }
150:     }
151:     PetscFree(iglobal);
152:   }

154:   /* Free memory */
155:   PetscViewerDestroy(viewer);
156:   VecDestroy(local);
157:   VecDestroy(global);
158:   DADestroy(da);
159:   PetscFinalize();
160:   return 0;
161: }
162: