Actual source code: ex26.c
petsc-3.7.3 2016-07-24
1: static char help[] ="Solvers Laplacian with multigrid, bad way.\n\
2: -mx <xg>, where <xg> = number of grid points in the x-direction\n\
3: -my <yg>, where <yg> = number of grid points in the y-direction\n\
4: -Nx <npx>, where <npx> = number of processors in the x-direction\n\
5: -Ny <npy>, where <npy> = number of processors in the y-direction\n\n";
7: /* Modified from ~src/ksp/examples/tests/ex19.c. Used for testing ML 6.2 interface.
9: This problem is modeled by
10: the partial differential equation
12: -Laplacian u = g, 0 < x,y < 1,
14: with boundary conditions
16: u = 0 for x = 0, x = 1, y = 0, y = 1.
18: A finite difference approximation with the usual 5-point stencil
19: is used to discretize the boundary value problem to obtain a nonlinear
20: system of equations.
22: Usage: ./ex26 -ksp_monitor_short -pc_type ml
23: -mg_coarse_ksp_max_it 10
24: -mg_levels_1_ksp_max_it 10 -mg_levels_2_ksp_max_it 10
25: -mg_fine_ksp_max_it 10
26: */
28: #include <petscksp.h>
29: #include <petscdm.h>
30: #include <petscdmda.h>
32: /* User-defined application contexts */
33: typedef struct {
34: PetscInt mx,my; /* number grid points in x and y direction */
35: Vec localX,localF; /* local vectors with ghost region */
36: DM da;
37: Vec x,b,r; /* global vectors */
38: Mat J; /* Jacobian on grid */
39: Mat A,P,R;
40: KSP ksp;
41: } GridCtx;
42: extern int FormJacobian_Grid(GridCtx*,Mat*);
46: int main(int argc,char **argv)
47: {
49: PetscInt its,n,Nx=PETSC_DECIDE,Ny=PETSC_DECIDE,nlocal;
50: PetscMPIInt size;
51: PetscScalar one = 1.0;
52: PetscInt mx,my;
53: Mat A;
54: GridCtx fine_ctx;
55: KSP ksp;
56: PetscBool flg;
58: PetscInitialize(&argc,&argv,(char*)0,help);
59: /* set up discretization matrix for fine grid */
60: fine_ctx.mx = 9; fine_ctx.my = 9;
61: PetscOptionsGetInt(NULL,NULL,"-mx",&mx,&flg);
62: if (flg) fine_ctx.mx = mx;
63: PetscOptionsGetInt(NULL,NULL,"-my",&my,&flg);
64: if (flg) fine_ctx.my = my;
65: PetscPrintf(PETSC_COMM_WORLD,"Fine grid size %D by %D\n",fine_ctx.mx,fine_ctx.my);
66: n = fine_ctx.mx*fine_ctx.my;
68: MPI_Comm_size(PETSC_COMM_WORLD,&size);
69: PetscOptionsGetInt(NULL,NULL,"-Nx",&Nx,NULL);
70: PetscOptionsGetInt(NULL,NULL,"-Ny",&Ny,NULL);
72: /* Set up distributed array for fine grid */
73: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,fine_ctx.mx,
74: fine_ctx.my,Nx,Ny,1,1,NULL,NULL,&fine_ctx.da);
75: DMCreateGlobalVector(fine_ctx.da,&fine_ctx.x);
76: VecDuplicate(fine_ctx.x,&fine_ctx.b);
77: VecGetLocalSize(fine_ctx.x,&nlocal);
78: DMCreateLocalVector(fine_ctx.da,&fine_ctx.localX);
79: VecDuplicate(fine_ctx.localX,&fine_ctx.localF);
80: MatCreateAIJ(PETSC_COMM_WORLD,nlocal,nlocal,n,n,5,NULL,3,NULL,&A);
81: FormJacobian_Grid(&fine_ctx,&A);
83: /* create linear solver */
84: KSPCreate(PETSC_COMM_WORLD,&ksp);
86: /* set values for rhs vector */
87: VecSet(fine_ctx.b,one);
89: /* set options, then solve system */
90: KSPSetFromOptions(ksp); /* calls PCSetFromOptions_ML if 'pc_type=ml' */
91: KSPSetOperators(ksp,A,A);
92: KSPSolve(ksp,fine_ctx.b,fine_ctx.x);
93: KSPGetIterationNumber(ksp,&its);
94: PetscPrintf(PETSC_COMM_WORLD,"Number of iterations = %D\n",its);
96: /* free data structures */
97: VecDestroy(&fine_ctx.x);
98: VecDestroy(&fine_ctx.b);
99: DMDestroy(&fine_ctx.da);
100: VecDestroy(&fine_ctx.localX);
101: VecDestroy(&fine_ctx.localF);
102: MatDestroy(&A);
103: KSPDestroy(&ksp);
105: PetscFinalize();
106: return 0;
107: }
111: int FormJacobian_Grid(GridCtx *grid,Mat *J)
112: {
113: Mat jac = *J;
114: PetscErrorCode ierr;
115: PetscInt i,j,row,mx,my,xs,ys,xm,ym,Xs,Ys,Xm,Ym,col[5];
116: PetscInt grow;
117: const PetscInt *ltog;
118: PetscScalar two = 2.0,one = 1.0,v[5],hx,hy,hxdhy,hydhx,value;
119: ISLocalToGlobalMapping ltogm;
121: mx = grid->mx; my = grid->my;
122: hx = one/(PetscReal)(mx-1); hy = one/(PetscReal)(my-1);
123: hxdhy = hx/hy; hydhx = hy/hx;
125: /* Get ghost points */
126: DMDAGetCorners(grid->da,&xs,&ys,0,&xm,&ym,0);
127: DMDAGetGhostCorners(grid->da,&Xs,&Ys,0,&Xm,&Ym,0);
128: DMGetLocalToGlobalMapping(grid->da,<ogm);
129: ISLocalToGlobalMappingGetIndices(ltogm,<og);
131: /* Evaluate Jacobian of function */
132: for (j=ys; j<ys+ym; j++) {
133: row = (j - Ys)*Xm + xs - Xs - 1;
134: for (i=xs; i<xs+xm; i++) {
135: row++;
136: grow = ltog[row];
137: if (i > 0 && i < mx-1 && j > 0 && j < my-1) {
138: v[0] = -hxdhy; col[0] = ltog[row - Xm];
139: v[1] = -hydhx; col[1] = ltog[row - 1];
140: v[2] = two*(hydhx + hxdhy); col[2] = grow;
141: v[3] = -hydhx; col[3] = ltog[row + 1];
142: v[4] = -hxdhy; col[4] = ltog[row + Xm];
143: MatSetValues(jac,1,&grow,5,col,v,INSERT_VALUES);
144: } else if ((i > 0 && i < mx-1) || (j > 0 && j < my-1)) {
145: value = .5*two*(hydhx + hxdhy);
146: MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
147: } else {
148: value = .25*two*(hydhx + hxdhy);
149: MatSetValues(jac,1,&grow,1,&grow,&value,INSERT_VALUES);
150: }
151: }
152: }
153: ISLocalToGlobalMappingRestoreIndices(ltogm,<og);
154: MatAssemblyBegin(jac,MAT_FINAL_ASSEMBLY);
155: MatAssemblyEnd(jac,MAT_FINAL_ASSEMBLY);
156: return 0;
157: }