Actual source code: ex19.c
2: static char help[] ="Solvers Laplacian with multigrid, bad way.\n\
3: -mx <xg>, where <xg> = number of grid points in the x-direction\n\
4: -my <yg>, where <yg> = number of grid points in the y-direction\n\
5: -Nx <npx>, where <npx> = number of processors in the x-direction\n\
6: -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";
8: /*
9: This problem is modeled by
10: the partial differential equation
12: -Laplacian u = g, 0 < x,y < 1,
14: with boundary conditions
16: u = 0 for x = 0, x = 1, y = 0, y = 1.
18: A finite difference approximation with the usual 5-point stencil
19: is used to discretize the boundary value problem to obtain a nonlinear
20: system of equations.
21: */
23: #include <petscksp.h>
24: #include <petscdm.h>
25: #include <petscdmda.h>
27: /* User-defined application contexts */
29: typedef struct {
30: PetscInt mx,my; /* number grid points in x and y direction */
31: Vec localX,localF; /* local vectors with ghost region */
32: DM da;
33: Vec x,b,r; /* global vectors */
34: Mat J; /* Jacobian on grid */
35: } GridCtx;
37: typedef struct {
38: GridCtx fine;
39: GridCtx coarse;
40: KSP ksp_coarse;
41: PetscInt ratio;
42: Mat Ii; /* interpolation from coarse to fine */
43: } AppCtx;
45: #define COARSE_LEVEL 0
46: #define FINE_LEVEL 1
48: extern PetscErrorCode FormJacobian_Grid(AppCtx*,GridCtx*,Mat*);
50: /*
51: Mm_ratio - ration of grid lines between fine and coarse grids.
52: */
53: int main(int argc,char **argv)
54: {
55: AppCtx user;
56: PetscInt its,N,n,Nx = PETSC_DECIDE,Ny = PETSC_DECIDE,nlocal,Nlocal;
57: PetscMPIInt size;
58: KSP ksp,ksp_fine;
59: PC pc;
60: PetscScalar one = 1.0;
62: PetscInitialize(&argc,&argv,NULL,help);
63: user.ratio = 2;
64: user.coarse.mx = 5; user.coarse.my = 5;
66: PetscOptionsGetInt(NULL,NULL,"-Mx",&user.coarse.mx,NULL);
67: PetscOptionsGetInt(NULL,NULL,"-My",&user.coarse.my,NULL);
68: PetscOptionsGetInt(NULL,NULL,"-ratio",&user.ratio,NULL);
70: user.fine.mx = user.ratio*(user.coarse.mx-1)+1; user.fine.my = user.ratio*(user.coarse.my-1)+1;
72: PetscPrintf(PETSC_COMM_WORLD,"Coarse grid size %D by %D\n",user.coarse.mx,user.coarse.my);
73: PetscPrintf(PETSC_COMM_WORLD,"Fine grid size %D by %D\n",user.fine.mx,user.fine.my);
75: n = user.fine.mx*user.fine.my; N = user.coarse.mx*user.coarse.my;
77: MPI_Comm_size(PETSC_COMM_WORLD,&size);
78: PetscOptionsGetInt(NULL,NULL,"-Nx",&Nx,NULL);
79: PetscOptionsGetInt(NULL,NULL,"-Ny",&Ny,NULL);
81: /* Set up distributed array for fine grid */
82: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,user.fine.my,Nx,Ny,1,1,NULL,NULL,&user.fine.da);
83: DMSetFromOptions(user.fine.da);
84: DMSetUp(user.fine.da);
85: DMCreateGlobalVector(user.fine.da,&user.fine.x);
86: VecDuplicate(user.fine.x,&user.fine.r);
87: VecDuplicate(user.fine.x,&user.fine.b);
88: VecGetLocalSize(user.fine.x,&nlocal);
89: DMCreateLocalVector(user.fine.da,&user.fine.localX);
90: VecDuplicate(user.fine.localX,&user.fine.localF);
91: MatCreateAIJ(PETSC_COMM_WORLD,nlocal,nlocal,n,n,5,NULL,3,NULL,&user.fine.J);
93: /* Set up distributed array for coarse grid */
94: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,user.coarse.my,Nx,Ny,1,1,NULL,NULL,&user.coarse.da);
95: DMSetFromOptions(user.coarse.da);
96: DMSetUp(user.coarse.da);
97: DMCreateGlobalVector(user.coarse.da,&user.coarse.x);
98: VecDuplicate(user.coarse.x,&user.coarse.b);
99: VecGetLocalSize(user.coarse.x,&Nlocal);
100: DMCreateLocalVector(user.coarse.da,&user.coarse.localX);
101: VecDuplicate(user.coarse.localX,&user.coarse.localF);
102: MatCreateAIJ(PETSC_COMM_WORLD,Nlocal,Nlocal,N,N,5,NULL,3,NULL,&user.coarse.J);
104: /* Create linear solver */
105: KSPCreate(PETSC_COMM_WORLD,&ksp);
107: /* set two level additive Schwarz preconditioner */
108: KSPGetPC(ksp,&pc);
109: PCSetType(pc,PCMG);
110: PCMGSetLevels(pc,2,NULL);
111: PCMGSetType(pc,PC_MG_ADDITIVE);
113: FormJacobian_Grid(&user,&user.coarse,&user.coarse.J);
114: FormJacobian_Grid(&user,&user.fine,&user.fine.J);
116: /* Create coarse level */
117: PCMGGetCoarseSolve(pc,&user.ksp_coarse);
118: KSPSetOptionsPrefix(user.ksp_coarse,"coarse_");
119: KSPSetFromOptions(user.ksp_coarse);
120: KSPSetOperators(user.ksp_coarse,user.coarse.J,user.coarse.J);
121: PCMGSetX(pc,COARSE_LEVEL,user.coarse.x);
122: PCMGSetRhs(pc,COARSE_LEVEL,user.coarse.b);
124: /* Create fine level */
125: PCMGGetSmoother(pc,FINE_LEVEL,&ksp_fine);
126: KSPSetOptionsPrefix(ksp_fine,"fine_");
127: KSPSetFromOptions(ksp_fine);
128: KSPSetOperators(ksp_fine,user.fine.J,user.fine.J);
129: PCMGSetR(pc,FINE_LEVEL,user.fine.r);
131: /* Create interpolation between the levels */
132: DMCreateInterpolation(user.coarse.da,user.fine.da,&user.Ii,NULL);
133: PCMGSetInterpolation(pc,FINE_LEVEL,user.Ii);
134: PCMGSetRestriction(pc,FINE_LEVEL,user.Ii);
136: KSPSetOperators(ksp,user.fine.J,user.fine.J);
138: VecSet(user.fine.b,one);
140: /* Set options, then solve nonlinear system */
141: KSPSetFromOptions(ksp);
143: KSPSolve(ksp,user.fine.b,user.fine.x);
144: KSPGetIterationNumber(ksp,&its);
145: PetscPrintf(PETSC_COMM_WORLD,"Number of iterations = %D\n",its);
147: /* Free data structures */
148: MatDestroy(&user.fine.J);
149: VecDestroy(&user.fine.x);
150: VecDestroy(&user.fine.r);
151: VecDestroy(&user.fine.b);
152: DMDestroy(&user.fine.da);
153: VecDestroy(&user.fine.localX);
154: VecDestroy(&user.fine.localF);
156: MatDestroy(&user.coarse.J);
157: VecDestroy(&user.coarse.x);
158: VecDestroy(&user.coarse.b);
159: DMDestroy(&user.coarse.da);
160: VecDestroy(&user.coarse.localX);
161: VecDestroy(&user.coarse.localF);
163: KSPDestroy(&ksp);
164: MatDestroy(&user.Ii);
165: PetscFinalize();
166: return 0;
167: }
169: PetscErrorCode FormJacobian_Grid(AppCtx *user,GridCtx *grid,Mat *J)
170: {
171: Mat jac = *J;
172: PetscInt i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
173: PetscInt grow;
174: const PetscInt *ltog;
175: PetscScalar two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;
176: ISLocalToGlobalMapping ltogm;
178: mx = grid->mx; my = grid->my;
179: hx = one/(PetscReal)(mx-1); hy = one/(PetscReal)(my-1);
180: hxdhy = hx/hy; hydhx = hy/hx;
182: /* Get ghost points */
183: DMDAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);
184: DMDAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);
185: DMGetLocalToGlobalMapping(grid->da,<ogm);
186: ISLocalToGlobalMappingGetIndices(ltogm,<og);
188: /* Evaluate Jacobian of function */
189: for (j=ys; j<ys+ym; j++) {
190: row = (j - Ys)*Xm + xs - Xs - 1;
191: for (i=xs; i<xs+xm; i++) {
192: row++;
193: grow = ltog[row];
194: if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
195: v[0] = -hxdhy; col[0] = ltog[row - Xm];
196: v[1] = -hydhx; col[1] = ltog[row - 1];
197: v[2] = two*(hydhx + hxdhy); col[2] = grow;
198: v[3] = -hydhx; col[3] = ltog[row + 1];
199: v[4] = -hxdhy; col[4] = ltog[row + Xm];
200: MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);
201: } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
202: value = .5*two*(hydhx + hxdhy);
203: MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
204: } else {
205: value = .25*two*(hydhx + hxdhy);
206: MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
207: }
208: }
209: }
210: ISLocalToGlobalMappingRestoreIndices(ltogm,<og);
211: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
212: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
214: return 0;
215: }
217: /*TEST
219: test:
220: args: -ksp_gmres_cgs_refinement_type refine_always -pc_type jacobi -ksp_monitor_short -ksp_type gmres
222: test:
223: suffix: 2
224: nsize: 3
225: args: -ksp_monitor_short
227: TEST*/