Actual source code: ex38.c
petsc-3.7.3 2016-07-24
1: /*
3: mpiexec -n 8 ./ex38 -ksp_type fbcgs -ksp_rtol 1.e-6 -sub_ksp_type bcgs -sub_ksp_rtol 1.e-3 -pc_type bjacobi -ksp_converged_reason -ksp_monitor -n1 64 -n2 64
5: Contributed by Jie Chen for testing flexible BiCGStab algorithm
6: */
8: static char help[] = "Solves the PDE (in 2D) -laplacian(u) + gamma x dot grad(u) + beta u = 1\n\
9: with zero Dirichlet condition. The discretization is standard centered\n\
10: difference. Input parameters include:\n\
11: -n1 : number of mesh points in 1st dimension (default 64)\n\
12: -n2 : number of mesh points in 2nd dimension (default 64)\n\
13: -h : spacing between mesh points (default 1/n1)\n\
14: -gamma : gamma (default 4/h)\n\
15: -beta : beta (default 0.01/h^2)\n\n";
17: /*T
18: Concepts: KSP^basic parallel example;
19: Concepts: KSP^Laplacian, 2d
20: Concepts: Laplacian, 2d
21: Processors: n
22: T*/
24: /*
25: Include "petscksp.h" so that we can use KSP solvers. Note that this file
26: automatically includes:
27: petscsys.h - base PETSc routines petscvec.h - vectors
28: petscmat.h - matrices
29: petscis.h - index sets petscksp.h - Krylov subspace methods
30: petscviewer.h - viewers petscpc.h - preconditioners
31: */
32: #include <petscksp.h>
36: int main(int argc,char **args)
37: {
38: Vec x,b,u; /* approx solution, RHS, working vector */
39: Mat A; /* linear system matrix */
40: KSP ksp; /* linear solver context */
41: PetscInt n1, n2; /* parameters */
42: PetscReal h, gamma, beta; /* parameters */
43: PetscInt i,j,Ii,J,Istart,Iend;
45: PetscScalar v, co1, co2;
46: #if defined(PETSC_USE_LOG)
47: PetscLogStage stage;
48: #endif
50: PetscInitialize(&argc,&args,(char*)0,help);
52: n1 = 64;
53: n2 = 64;
55: PetscOptionsGetInt(NULL,NULL,"-n1",&n1,NULL);
56: PetscOptionsGetInt(NULL,NULL,"-n2",&n2,NULL);
58: h = 1.0/n1;
59: gamma = 4.0;
60: beta = 0.01;
62: PetscOptionsGetReal(NULL,NULL,"-h",&h,NULL);
63: PetscOptionsGetReal(NULL,NULL,"-gamma",&gamma,NULL);
64: PetscOptionsGetReal(NULL,NULL,"-beta",&beta,NULL);
65: gamma = gamma/h;
66: beta = beta/(h*h);
68: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
69: Compute the matrix and set right-hand-side vector.
70: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
71: /*
72: Create parallel matrix, specifying only its global dimensions.
73: When using MatCreate(), the matrix format can be specified at
74: runtime. Also, the parallel partitioning of the matrix is
75: determined by PETSc at runtime.
77: Performance tuning note: For problems of substantial size,
78: preallocation of matrix memory is crucial for attaining good
79: performance. See the matrix chapter of the users manual for details.
80: */
81: MatCreate(PETSC_COMM_WORLD,&A);
82: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,n1*n2,n1*n2);
83: MatSetFromOptions(A);
84: MatMPIAIJSetPreallocation(A,5,NULL,5,NULL);
85: MatSeqAIJSetPreallocation(A,5,NULL);
86: MatSetUp(A);
88: /*
89: Currently, all PETSc parallel matrix formats are partitioned by
90: contiguous chunks of rows across the processors. Determine which
91: rows of the matrix are locally owned.
92: */
93: MatGetOwnershipRange(A,&Istart,&Iend);
95: /*
96: Set matrix elements for the 2-D, five-point stencil in parallel.
97: - Each processor needs to insert only elements that it owns
98: locally (but any non-local elements will be sent to the
99: appropriate processor during matrix assembly).
100: - Always specify global rows and columns of matrix entries.
101: */
102: PetscLogStageRegister("Assembly", &stage);
103: PetscLogStagePush(stage);
104: co1 = gamma * h * h / 2.0;
105: co2 = beta * h * h;
106: for (Ii=Istart; Ii<Iend; Ii++) {
107: i = Ii/n2; j = Ii - i*n2;
108: if (i>0) {
109: J = Ii - n2; v = -1.0 + co1*(PetscScalar)i;
110: MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);
111: }
112: if (i<n1-1) {
113: J = Ii + n2; v = -1.0 + co1*(PetscScalar)i;
114: MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);
115: }
116: if (j>0) {
117: J = Ii - 1; v = -1.0 + co1*(PetscScalar)j;
118: MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);
119: }
120: if (j<n2-1) {
121: J = Ii + 1; v = -1.0 + co1*(PetscScalar)j;
122: MatSetValues(A,1,&Ii,1,&J,&v,INSERT_VALUES);
123: }
124: v = 4.0 + co2;
125: MatSetValues(A,1,&Ii,1,&Ii,&v,INSERT_VALUES);
126: }
128: /*
129: Assemble matrix, using the 2-step process:
130: MatAssemblyBegin(), MatAssemblyEnd()
131: Computations can be done while messages are in transition
132: by placing code between these two statements.
133: */
134: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
135: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
136: PetscLogStagePop();
138: /*
139: Create parallel vectors.
140: - We form 1 vector from scratch and then duplicate as needed.
141: - When using VecCreate(), VecSetSizes and VecSetFromOptions()
142: in this example, we specify only the
143: vector's global dimension; the parallel partitioning is determined
144: at runtime.
145: - When solving a linear system, the vectors and matrices MUST
146: be partitioned accordingly. PETSc automatically generates
147: appropriately partitioned matrices and vectors when MatCreate()
148: and VecCreate() are used with the same communicator.
149: - The user can alternatively specify the local vector and matrix
150: dimensions when more sophisticated partitioning is needed
151: (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
152: below).
153: */
154: VecCreate(PETSC_COMM_WORLD,&b);
155: VecSetSizes(b,PETSC_DECIDE,n1*n2);
156: VecSetFromOptions(b);
157: VecDuplicate(b,&x);
158: VecDuplicate(b,&u);
160: /*
161: Set right-hand side.
162: */
163: VecSet(b,1.0);
165: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
166: Create the linear solver and set various options
167: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
168: /*
169: Create linear solver context
170: */
171: KSPCreate(PETSC_COMM_WORLD,&ksp);
173: /*
174: Set operators. Here the matrix that defines the linear system
175: also serves as the preconditioning matrix.
176: */
177: KSPSetOperators(ksp,A,A);
179: /*
180: Set linear solver defaults for this problem (optional).
181: - By extracting the KSP and PC contexts from the KSP context,
182: we can then directly call any KSP and PC routines to set
183: various options.
184: */
185: KSPSetTolerances(ksp,1.e-6,1.e-50,PETSC_DEFAULT,200);
187: /*
188: Set runtime options, e.g.,
189: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
190: These options will override those specified above as long as
191: KSPSetFromOptions() is called _after_ any other customization
192: routines.
193: */
194: KSPSetFromOptions(ksp);
196: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
197: Solve the linear system
198: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
200: KSPSolve(ksp,b,x);
202: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
203: Clean up
204: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
205: /*
206: Free work space. All PETSc objects should be destroyed when they
207: are no longer needed.
208: */
209: KSPDestroy(&ksp);
210: VecDestroy(&u); VecDestroy(&x);
211: VecDestroy(&b); MatDestroy(&A);
213: /*
214: Always call PetscFinalize() before exiting a program. This routine
215: - finalizes the PETSc libraries as well as MPI
216: - provides summary and diagnostic information if certain runtime
217: options are chosen (e.g., -log_summary).
218: */
219: PetscFinalize();
220: return 0;
221: }