Actual source code: ex4.c
petsc-3.3-p6 2013-02-11
1:
2: static char help[] = "Tests various 2-dimensional DMDA routines.\n\n";
4: #include <petscdmda.h>
8: int main(int argc,char **argv)
9: {
10: PetscMPIInt rank;
12: PetscInt M = 10,N = 8,m = PETSC_DECIDE;
13: PetscInt s=2,w=2,n = PETSC_DECIDE,nloc,l,i,j,kk;
14: PetscInt Xs,Xm,Ys,Ym,iloc,*iglobal,*ltog;
15: PetscInt *lx = PETSC_NULL,*ly = PETSC_NULL;
16: PetscBool testorder = PETSC_FALSE,flg;
17: DMDABoundaryType bx = DMDA_BOUNDARY_NONE,by= DMDA_BOUNDARY_NONE;
18: DM da;
19: PetscViewer viewer;
20: Vec local,global;
21: PetscScalar value;
22: DMDAStencilType st = DMDA_STENCIL_BOX;
23: AO ao;
24:
25: PetscInitialize(&argc,&argv,(char*)0,help);
26: PetscViewerDrawOpen(PETSC_COMM_WORLD,0,"",300,0,400,400,&viewer);
27:
28: /* Readoptions */
29: PetscOptionsGetInt(PETSC_NULL,"-NX",&M,PETSC_NULL);
30: PetscOptionsGetInt(PETSC_NULL,"-NY",&N,PETSC_NULL);
31: PetscOptionsGetInt(PETSC_NULL,"-m",&m,PETSC_NULL);
32: PetscOptionsGetInt(PETSC_NULL,"-n",&n,PETSC_NULL);
33: PetscOptionsGetInt(PETSC_NULL,"-s",&s,PETSC_NULL);
34: PetscOptionsGetInt(PETSC_NULL,"-w",&w,PETSC_NULL);
36: flg = PETSC_FALSE;
37: PetscOptionsGetBool(PETSC_NULL,"-xperiodic",&flg,PETSC_NULL); if (flg) bx = DMDA_BOUNDARY_PERIODIC;
38: flg = PETSC_FALSE;
39: PetscOptionsGetBool(PETSC_NULL,"-yperiodic",&flg,PETSC_NULL); if (flg) by = DMDA_BOUNDARY_PERIODIC;
40: flg = PETSC_FALSE;
41: PetscOptionsGetBool(PETSC_NULL,"-xghosted",&flg,PETSC_NULL); if (flg) bx = DMDA_BOUNDARY_GHOSTED;
42: flg = PETSC_FALSE;
43: PetscOptionsGetBool(PETSC_NULL,"-yghosted",&flg,PETSC_NULL); if (flg) by = DMDA_BOUNDARY_GHOSTED;
44: flg = PETSC_FALSE;
45: PetscOptionsGetBool(PETSC_NULL,"-star",&flg,PETSC_NULL); if (flg) st = DMDA_STENCIL_STAR;
46: flg = PETSC_FALSE;
47: PetscOptionsGetBool(PETSC_NULL,"-box",&flg,PETSC_NULL); if (flg) st = DMDA_STENCIL_BOX;
48: flg = PETSC_FALSE;
49: PetscOptionsGetBool(PETSC_NULL,"-testorder",&testorder,PETSC_NULL);
50: /*
51: Test putting two nodes in x and y on each processor, exact last processor
52: in x and y gets the rest.
53: */
54: flg = PETSC_FALSE;
55: PetscOptionsGetBool(PETSC_NULL,"-distribute",&flg,PETSC_NULL);
56: if (flg) {
57: if (m == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -m option with -distribute option");
58: PetscMalloc(m*sizeof(PetscInt),&lx);
59: for (i=0; i<m-1; i++) { lx[i] = 4;}
60: lx[m-1] = M - 4*(m-1);
61: if (n == PETSC_DECIDE) SETERRQ(PETSC_COMM_WORLD,1,"Must set -n option with -distribute option");
62: PetscMalloc(n*sizeof(PetscInt),&ly);
63: for (i=0; i<n-1; i++) { ly[i] = 2;}
64: ly[n-1] = N - 2*(n-1);
65: }
68: /* Create distributed array and get vectors */
69: DMDACreate2d(PETSC_COMM_WORLD,bx,by,st,M,N,m,n,w,s,lx,ly,&da);
70: PetscFree(lx);
71: PetscFree(ly);
73: DMView(da,viewer);
74: DMCreateGlobalVector(da,&global);
75: DMCreateLocalVector(da,&local);
77: /* Set global vector; send ghost points to local vectors */
78: value = 1;
79: VecSet(global,value);
80: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
81: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
83: /* Scale local vectors according to processor rank; pass to global vector */
84: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
85: value = rank;
86: VecScale(local,value);
87: DMLocalToGlobalBegin(da,local,INSERT_VALUES,global);
88: DMLocalToGlobalEnd(da,local,INSERT_VALUES,global);
90: if (!testorder) { /* turn off printing when testing ordering mappings */
91: PetscPrintf (PETSC_COMM_WORLD,"\nGlobal Vectors:\n");
92: VecView(global,PETSC_VIEWER_STDOUT_WORLD);
93: PetscPrintf (PETSC_COMM_WORLD,"\n\n");
94: }
96: /* Send ghost points to local vectors */
97: DMGlobalToLocalBegin(da,global,INSERT_VALUES,local);
98: DMGlobalToLocalEnd(da,global,INSERT_VALUES,local);
100: flg = PETSC_FALSE;
101: PetscOptionsGetBool(PETSC_NULL,"-local_print",&flg,PETSC_NULL);
102: if (flg) {
103: PetscViewer sviewer;
104: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nLocal Vector: processor %d\n",rank);
105: PetscViewerGetSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);
106: VecView(local,sviewer);
107: PetscViewerRestoreSingleton(PETSC_VIEWER_STDOUT_WORLD,&sviewer);
108: }
110: /* Tests mappings betweeen application/PETSc orderings */
111: if (testorder) {
112: DMDAGetGhostCorners(da,&Xs,&Ys,PETSC_NULL,&Xm,&Ym,PETSC_NULL);
113: DMDAGetGlobalIndices(da,&nloc,<og);
114: DMDAGetAO(da,&ao);
115: PetscMalloc(nloc*sizeof(PetscInt),&iglobal);
117: /* Set iglobal to be global indices for each processor's local and ghost nodes,
118: using the DMDA ordering of grid points */
119: kk = 0;
120: for (j=Ys; j<Ys+Ym; j++) {
121: for (i=Xs; i<Xs+Xm; i++) {
122: iloc = w*((j-Ys)*Xm + i-Xs);
123: for (l=0; l<w; l++) {
124: iglobal[kk++] = ltog[iloc+l];
125: }
126: }
127: }
129: /* Map this to the application ordering (which for DMDAs is just the natural ordering
130: that would be used for 1 processor, numbering most rapidly by x, then y) */
131: AOPetscToApplication(ao,nloc,iglobal);
133: /* Then map the application ordering back to the PETSc DMDA ordering */
134: AOApplicationToPetsc(ao,nloc,iglobal);
136: /* Verify the mappings */
137: kk=0;
138: for (j=Ys; j<Ys+Ym; j++) {
139: for (i=Xs; i<Xs+Xm; i++) {
140: iloc = w*((j-Ys)*Xm + i-Xs);
141: for (l=0; l<w; l++) {
142: if (iglobal[kk] != ltog[iloc+l]) {
143: PetscFPrintf(PETSC_COMM_SELF,stdout,"[%d] Problem with mapping: j=%D, i=%D, l=%D, petsc1=%D, petsc2=%D\n",
144: rank,j,i,l,ltog[iloc+l],iglobal[kk]);}
145: kk++;
146: }
147: }
148: }
149: PetscFree(iglobal);
150: }
152: /* Free memory */
153: PetscViewerDestroy(&viewer);
154: VecDestroy(&local);
155: VecDestroy(&global);
156: DMDestroy(&da);
158: PetscFinalize();
159: return 0;
160: }