Actual source code: ex15f.F90
1: !
2: ! Solves a linear system in parallel with KSP. Also indicates
3: ! use of a user-provided preconditioner. Input parameters include:
4: ! -user_defined_pc : Activate a user-defined preconditioner
5: !
6: !
7: !!/*T
8: ! Concepts: KSP^basic parallel example
9: ! Concepts: PC^setting a user-defined shell preconditioner
10: ! Processors: n
11: !T*/
13: !
14: ! -------------------------------------------------------------------------
15: !
16: ! Module contains diag needed by shell preconditioner
17: !
18: module mymoduleex15f
19: #include <petsc/finclude/petscksp.h>
20: use petscksp
21: Vec diag
22: end module
24: program main
25: use mymoduleex15f
26: implicit none
28: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
29: ! Variable declarations
30: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
31: !
32: ! Variables:
33: ! ksp - linear solver context
34: ! ksp - Krylov subspace method context
35: ! pc - preconditioner context
36: ! x, b, u - approx solution, right-hand-side, exact solution vectors
37: ! A - matrix that defines linear system
38: ! its - iterations for convergence
39: ! norm - norm of solution error
41: Vec x,b,u
42: Mat A
43: PC pc
44: KSP ksp
45: PetscScalar v,one,neg_one
46: PetscReal norm,tol
47: PetscErrorCode ierr
48: PetscInt i,j,II,JJ,Istart
49: PetscInt Iend,m,n,i1,its,five
50: PetscMPIInt rank
51: PetscBool user_defined_pc,flg
53: ! Note: Any user-defined Fortran routines MUST be declared as external.
55: external SampleShellPCSetUp, SampleShellPCApply
56: external SampleShellPCDestroy
58: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
59: ! Beginning of program
60: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
62: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
63: if (ierr .ne. 0) then
64: print*,'Unable to initialize PETSc'
65: stop
66: endif
67: one = 1.0
68: neg_one = -1.0
69: i1 = 1
70: m = 8
71: n = 7
72: five = 5
73: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
74: call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
75: call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
77: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
78: ! Compute the matrix and right-hand-side vector that define
79: ! the linear system, Ax = b.
80: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
82: ! Create parallel matrix, specifying only its global dimensions.
83: ! When using MatCreate(), the matrix format can be specified at
84: ! runtime. Also, the parallel partitioning of the matrix is
85: ! determined by PETSc at runtime.
87: call MatCreate(PETSC_COMM_WORLD,A,ierr)
88: call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
89: call MatSetType(A, MATAIJ,ierr)
90: call MatSetFromOptions(A,ierr)
91: call MatMPIAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,five,PETSC_NULL_INTEGER,ierr)
92: call MatSeqAIJSetPreallocation(A,five,PETSC_NULL_INTEGER,ierr)
94: ! Currently, all PETSc parallel matrix formats are partitioned by
95: ! contiguous chunks of rows across the processors. Determine which
96: ! rows of the matrix are locally owned.
98: call MatGetOwnershipRange(A,Istart,Iend,ierr)
100: ! Set matrix elements for the 2-D, five-point stencil in parallel.
101: ! - Each processor needs to insert only elements that it owns
102: ! locally (but any non-local elements will be sent to the
103: ! appropriate processor during matrix assembly).
104: ! - Always specify global row and columns of matrix entries.
105: ! - Note that MatSetValues() uses 0-based row and column numbers
106: ! in Fortran as well as in C.
108: do 10, II=Istart,Iend-1
109: v = -1.0
110: i = II/n
111: j = II - i*n
112: if (i.gt.0) then
113: JJ = II - n
114: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
115: endif
116: if (i.lt.m-1) then
117: JJ = II + n
118: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
119: endif
120: if (j.gt.0) then
121: JJ = II - 1
122: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
123: endif
124: if (j.lt.n-1) then
125: JJ = II + 1
126: call MatSetValues(A,i1,II,i1,JJ,v,ADD_VALUES,ierr)
127: endif
128: v = 4.0
129: call MatSetValues(A,i1,II,i1,II,v,ADD_VALUES,ierr)
130: 10 continue
132: ! Assemble matrix, using the 2-step process:
133: ! MatAssemblyBegin(), MatAssemblyEnd()
134: ! Computations can be done while messages are in transition,
135: ! by placing code between these two statements.
137: call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
138: call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)
140: ! Create parallel vectors.
141: ! - Here, the parallel partitioning of the vector is determined by
142: ! PETSc at runtime. We could also specify the local dimensions
143: ! if desired -- or use the more general routine VecCreate().
144: ! - When solving a linear system, the vectors and matrices MUST
145: ! be partitioned accordingly. PETSc automatically generates
146: ! appropriately partitioned matrices and vectors when MatCreate()
147: ! and VecCreate() are used with the same communicator.
148: ! - Note: We form 1 vector from scratch and then duplicate as needed.
150: call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
151: call VecDuplicate(u,b,ierr)
152: call VecDuplicate(b,x,ierr)
154: ! Set exact solution; then compute right-hand-side vector.
156: call VecSet(u,one,ierr)
157: call MatMult(A,u,b,ierr)
159: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
160: ! Create the linear solver and set various options
161: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
163: ! Create linear solver context
165: call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)
167: ! Set operators. Here the matrix that defines the linear system
168: ! also serves as the preconditioning matrix.
170: call KSPSetOperators(ksp,A,A,ierr)
172: ! Set linear solver defaults for this problem (optional).
173: ! - By extracting the KSP and PC contexts from the KSP context,
174: ! we can then directly directly call any KSP and PC routines
175: ! to set various options.
177: call KSPGetPC(ksp,pc,ierr)
178: tol = 1.e-7
179: call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)
181: !
182: ! Set a user-defined shell preconditioner if desired
183: !
184: call PetscOptionsHasName(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-user_defined_pc',user_defined_pc,ierr)
186: if (user_defined_pc) then
188: ! (Required) Indicate to PETSc that we are using a shell preconditioner
189: call PCSetType(pc,PCSHELL,ierr)
191: ! (Required) Set the user-defined routine for applying the preconditioner
192: call PCShellSetApply(pc,SampleShellPCApply,ierr)
194: ! (Optional) Do any setup required for the preconditioner
195: call PCShellSetSetUp(pc,SampleShellPCSetUp,ierr)
197: ! (Optional) Frees any objects we created for the preconditioner
198: call PCShellSetDestroy(pc,SampleShellPCDestroy,ierr)
200: else
201: call PCSetType(pc,PCJACOBI,ierr)
202: endif
204: ! Set runtime options, e.g.,
205: ! -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
206: ! These options will override those specified above as long as
207: ! KSPSetFromOptions() is called _after_ any other customization
208: ! routines.
210: call KSPSetFromOptions(ksp,ierr)
212: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
213: ! Solve the linear system
214: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
216: call KSPSolve(ksp,b,x,ierr)
218: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
219: ! Check solution and clean up
220: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
222: ! Check the error
224: call VecAXPY(x,neg_one,u,ierr)
225: call VecNorm(x,NORM_2,norm,ierr)
226: call KSPGetIterationNumber(ksp,its,ierr)
228: if (rank .eq. 0) then
229: if (norm .gt. 1.e-12) then
230: write(6,100) norm,its
231: else
232: write(6,110) its
233: endif
234: endif
235: 100 format('Norm of error ',1pe11.4,' iterations ',i5)
236: 110 format('Norm of error < 1.e-12,iterations ',i5)
238: ! Free work space. All PETSc objects should be destroyed when they
239: ! are no longer needed.
241: call KSPDestroy(ksp,ierr)
242: call VecDestroy(u,ierr)
243: call VecDestroy(x,ierr)
244: call VecDestroy(b,ierr)
245: call MatDestroy(A,ierr)
247: ! Always call PetscFinalize() before exiting a program.
249: call PetscFinalize(ierr)
250: end
252: !/***********************************************************************/
253: !/* Routines for a user-defined shell preconditioner */
254: !/***********************************************************************/
256: !
257: ! SampleShellPCSetUp - This routine sets up a user-defined
258: ! preconditioner context.
259: !
260: ! Input Parameters:
261: ! pc - preconditioner object
262: !
263: ! Output Parameter:
264: ! ierr - error code (nonzero if error has been detected)
265: !
266: ! Notes:
267: ! In this example, we define the shell preconditioner to be Jacobi
268: ! method. Thus, here we create a work vector for storing the reciprocal
269: ! of the diagonal of the preconditioner matrix; this vector is then
270: ! used within the routine SampleShellPCApply().
271: !
272: subroutine SampleShellPCSetUp(pc,ierr)
273: use mymoduleex15f
274: use petscksp
275: implicit none
277: PC pc
278: Mat pmat
279: PetscErrorCode ierr
281: call PCGetOperators(pc,PETSC_NULL_MAT,pmat,ierr)
282: call MatCreateVecs(pmat,diag,PETSC_NULL_VEC,ierr)
283: call MatGetDiagonal(pmat,diag,ierr)
284: call VecReciprocal(diag,ierr)
286: end
288: ! -------------------------------------------------------------------
289: !
290: ! SampleShellPCApply - This routine demonstrates the use of a
291: ! user-provided preconditioner.
292: !
293: ! Input Parameters:
294: ! pc - preconditioner object
295: ! x - input vector
296: !
297: ! Output Parameters:
298: ! y - preconditioned vector
299: ! ierr - error code (nonzero if error has been detected)
300: !
301: ! Notes:
302: ! This code implements the Jacobi preconditioner, merely as an
303: ! example of working with a PCSHELL. Note that the Jacobi method
304: ! is already provided within PETSc.
305: !
306: subroutine SampleShellPCApply(pc,x,y,ierr)
307: use mymoduleex15f
308: implicit none
310: PC pc
311: Vec x,y
312: PetscErrorCode ierr
314: call VecPointwiseMult(y,x,diag,ierr)
316: end
318: !/***********************************************************************/
319: !/* Routines for a user-defined shell preconditioner */
320: !/***********************************************************************/
322: !
323: ! SampleShellPCDestroy - This routine destroys (frees the memory of) any
324: ! objects we made for the preconditioner
325: !
326: ! Input Parameters:
327: ! pc - for this example we use the actual PC as our shell context
328: !
329: ! Output Parameter:
330: ! ierr - error code (nonzero if error has been detected)
331: !
333: subroutine SampleShellPCDestroy(pc,ierr)
334: use mymoduleex15f
335: implicit none
337: PC pc
338: PetscErrorCode ierr
340: ! Normally we would recommend storing all the work data (like diag) in
341: ! the context set with PCShellSetContext()
343: call VecDestroy(diag,ierr)
345: end
347: !
348: !/*TEST
349: !
350: ! test:
351: ! nsize: 2
352: ! args: -ksp_view -user_defined_pc -ksp_gmres_cgs_refinement_type refine_always
353: !
354: !TEST*/