Actual source code: ex40.c
2: static char help[] = "Solves a linear system in parallel with KSP.\n\
3: Input parameters include:\n\
4: -random_exact_sol : use a random exact solution vector\n\
5: -view_exact_sol : write exact solution vector to stdout\n\
6: -m <mesh_x> : number of mesh points in x-direction\n\
7: -n <mesh_y> : number of mesh points in y-direction\n\n";
9: /*T
10: Concepts: KSP^basic parallel example;
11: Concepts: KSP^Laplacian, 2d
12: Concepts: Laplacian, 2d
13: Processors: n
14: T*/
16: /*
17: Include "petscksp.h" so that we can use KSP solvers. Note that this file
18: automatically includes:
19: petscsys.h - base PETSc routines petscvec.h - vectors
20: petscmat.h - matrices
21: petscis.h - index sets petscksp.h - Krylov subspace methods
22: petscviewer.h - viewers petscpc.h - preconditioners
23: */
24: #include <petscksp.h>
26: int main(int argc,char **args)
27: {
28: Vec x,b,u; /* approx solution, RHS, exact solution */
29: Mat A; /* linear system matrix */
30: KSP ksp; /* linear solver context */
31: PetscRandom rctx; /* random number generator context */
32: PetscReal norm; /* norm of solution error */
33: PetscInt i,j,Ii,J,m = 8,n = 7,its;
35: PetscBool flg = PETSC_FALSE;
36: PetscScalar v;
37: PetscMPIInt rank;
39: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
40: PetscOptionsGetInt(NULL,NULL,"-m",&m,NULL);
41: PetscOptionsGetInt(NULL,NULL,"-n",&n,NULL);
42: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
43: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
44: Compute the matrix and right-hand-side vector that define
45: the linear system, Ax = b.
46: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
47: /*
48: Create parallel matrix, specifying only its global dimensions.
49: When using MatCreate(), the matrix format can be specified at
50: runtime. Also, the parallel partitioning of the matrix is
51: determined by PETSc at runtime.
53: Performance tuning note: For problems of substantial size,
54: preallocation of matrix memory is crucial for attaining good
55: performance. See the matrix chapter of the users manual for details.
56: */
57: MatCreate(PETSC_COMM_WORLD,&A);
58: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n);
59: MatSetType(A,MATELEMENTAL);
60: MatSetFromOptions(A);
61: MatSetUp(A);
62: if (rank==0) {
63: PetscInt M,N;
64: MatGetSize(A,&M,&N);
65: for (Ii=0; Ii<M; Ii++) {
66: v = -1.0; i = Ii/n; j = Ii - i*n;
67: if (i>0) {J = Ii - n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
68: if (i<m-1) {J = Ii + n; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
69: if (j>0) {J = Ii - 1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
70: if (j<n-1) {J = Ii + 1; MatSetValues(A,1,&Ii,1,&J,&v,ADD_VALUES);}
71: v = 4.0; MatSetValues(A,1,&Ii,1,&Ii,&v,ADD_VALUES);
72: }
73: }
74: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
75: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
77: /* MatSetOption(A,MAT_SYMMETRIC,PETSC_TRUE); */
79: /*
80: Create parallel vectors.
81: - We form 1 vector from scratch and then duplicate as needed.
82: - When using VecCreate(), VecSetSizes and VecSetFromOptions()
83: in this example, we specify only the
84: vector's global dimension; the parallel partitioning is determined
85: at runtime.
86: - When solving a linear system, the vectors and matrices MUST
87: be partitioned accordingly. PETSc automatically generates
88: appropriately partitioned matrices and vectors when MatCreate()
89: and VecCreate() are used with the same communicator.
90: - The user can alternatively specify the local vector and matrix
91: dimensions when more sophisticated partitioning is needed
92: (replacing the PETSC_DECIDE argument in the VecSetSizes() statement
93: below).
94: */
95: VecCreate(PETSC_COMM_WORLD,&u);
96: VecSetSizes(u,PETSC_DECIDE,m*n);
97: VecSetFromOptions(u);
98: VecDuplicate(u,&b);
99: VecDuplicate(b,&x);
101: /*
102: Set exact solution; then compute right-hand-side vector.
103: By default we use an exact solution of a vector with all
104: elements of 1.0; Alternatively, using the runtime option
105: -random_sol forms a solution vector with random components.
106: */
107: PetscOptionsGetBool(NULL,NULL,"-random_exact_sol",&flg,NULL);
108: if (flg) {
109: PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
110: PetscRandomSetFromOptions(rctx);
111: VecSetRandom(u,rctx);
112: PetscRandomDestroy(&rctx);
113: } else {
114: VecSet(u,1.0);
115: }
116: MatMult(A,u,b);
118: /*
119: View the exact solution vector if desired
120: */
121: flg = PETSC_FALSE;
122: PetscOptionsGetBool(NULL,NULL,"-view_exact_sol",&flg,NULL);
123: if (flg) {VecView(u,PETSC_VIEWER_STDOUT_WORLD);}
125: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
126: Create the linear solver and set various options
127: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
129: /*
130: Create linear solver context
131: */
132: KSPCreate(PETSC_COMM_WORLD,&ksp);
134: /*
135: Set operators. Here the matrix that defines the linear system
136: also serves as the preconditioning matrix.
137: */
138: KSPSetOperators(ksp,A,A);
140: /*
141: Set linear solver defaults for this problem (optional).
142: - By extracting the KSP and PC contexts from the KSP context,
143: we can then directly call any KSP and PC routines to set
144: various options.
145: - The following two statements are optional; all of these
146: parameters could alternatively be specified at runtime via
147: KSPSetFromOptions(). All of these defaults can be
148: overridden at runtime, as indicated below.
149: */
150: KSPSetTolerances(ksp,1.e-2/((m+1)*(n+1)),PETSC_DEFAULT,PETSC_DEFAULT,
151: PETSC_DEFAULT);
153: /*
154: Set runtime options, e.g.,
155: -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
156: These options will override those specified above as long as
157: KSPSetFromOptions() is called _after_ any other customization
158: routines.
159: */
160: KSPSetFromOptions(ksp);
162: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
163: Solve the linear system
164: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
166: KSPSolve(ksp,b,x);
168: /* - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
169: Check solution and clean up
170: - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */
172: /*
173: Check the error
174: */
175: VecAXPY(x,-1.0,u);
176: VecNorm(x,NORM_2,&norm);
177: KSPGetIterationNumber(ksp,&its);
179: /*
180: Print convergence information. PetscPrintf() produces a single
181: print statement from all processes that share a communicator.
182: An alternative is PetscFPrintf(), which prints to a file.
183: */
184: PetscPrintf(PETSC_COMM_WORLD,"Norm of error %g iterations %D\n",(double)norm,its);
186: /*
187: Free work space. All PETSc objects should be destroyed when they
188: are no longer needed.
189: */
190: KSPDestroy(&ksp);
191: VecDestroy(&u); VecDestroy(&x);
192: VecDestroy(&b); MatDestroy(&A);
194: /*
195: Always call PetscFinalize() before exiting a program. This routine
196: - finalizes the PETSc libraries as well as MPI
197: - provides summary and diagnostic information if certain runtime
198: options are chosen (e.g., -log_view).
199: */
200: PetscFinalize();
201: return ierr;
202: }
204: /*TEST
206: test:
207: nsize: 6
208: args: -pc_type none
209: requires: elemental
211: test:
212: suffix: 2
213: nsize: 6
214: args: -pc_type lu -pc_factor_mat_solver_type elemental
215: requires: elemental
217: TEST*/