Actual source code: ex62f.F90

  1: !
  2: !   Solves a linear system in parallel with KSP.  Also indicates
  3: !   use of a user-provided preconditioner.  Input parameters include:
  4: !
  5: !
  6: !!/*T
  7: !   Concepts: KSP^basic parallel example
  8: !   Concepts: PC^setting a user-defined shell preconditioner
  9: !   Processors: n
 10: !T*/

 12: !
 13: !  -------------------------------------------------------------------------
 14:       module mymoduleex21f
 15: #include <petsc/finclude/petscksp.h>
 16:       use petscksp
 17:       PC jacobi,sor
 18:       Vec work
 19:       end module

 21:       program main
 22:       use mymoduleex21f
 23:       implicit none

 25: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 26: !                   Variable declarations
 27: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 28: !
 29: !  Variables:
 30: !     ksp     - linear solver context
 31: !     ksp      - Krylov subspace method context
 32: !     pc       - preconditioner context
 33: !     x, b, u  - approx solution, right-hand-side, exact solution vectors
 34: !     A        - matrix that defines linear system
 35: !     its      - iterations for convergence
 36: !     norm     - norm of solution error

 38:       Vec              x,b,u
 39:       Mat              A
 40:       PC               pc
 41:       KSP              ksp
 42:       PetscScalar      v,one,neg_one
 43:       PetscReal norm,tol
 44:       PetscInt i,j,II,JJ,Istart
 45:       PetscInt Iend,m,n,its,ione
 46:       PetscMPIInt rank
 47:       PetscBool  flg
 48:       PetscErrorCode ierr

 50: !  Note: Any user-defined Fortran routines MUST be declared as external.

 52:       external SampleShellPCSetUp,SampleShellPCApply

 54: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 55: !                 Beginning of program
 56: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 58:       call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 59:       if (ierr .ne. 0) then
 60:         print*,'Unable to initialize PETSc'
 61:         stop
 62:       endif
 63:       one     = 1.0
 64:       neg_one = -1.0
 65:       m       = 8
 66:       n       = 7
 67:       ione    = 1
 68:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-m',m,flg,ierr)
 69:       call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-n',n,flg,ierr)
 70:       call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)

 72: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 73: !      Compute the matrix and right-hand-side vector that define
 74: !      the linear system, Ax = b.
 75: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 77: !  Create parallel matrix, specifying only its global dimensions.
 78: !  When using MatCreate(), the matrix format can be specified at
 79: !  runtime. Also, the parallel partitioning of the matrix is
 80: !  determined by PETSc at runtime.

 82:       call MatCreate(PETSC_COMM_WORLD,A,ierr)
 83:       call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr)
 84:       call MatSetFromOptions(A,ierr)
 85:       call MatSetUp(A,ierr)

 87: !  Currently, all PETSc parallel matrix formats are partitioned by
 88: !  contiguous chunks of rows across the processors.  Determine which
 89: !  rows of the matrix are locally owned.

 91:       call MatGetOwnershipRange(A,Istart,Iend,ierr)

 93: !  Set matrix elements for the 2-D, five-point stencil in parallel.
 94: !   - Each processor needs to insert only elements that it owns
 95: !     locally (but any non-local elements will be sent to the
 96: !     appropriate processor during matrix assembly).
 97: !   - Always specify global row and columns of matrix entries.
 98: !   - Note that MatSetValues() uses 0-based row and column numbers
 99: !     in Fortran as well as in C.

101:       do 10, II=Istart,Iend-1
102:         v = -1.0
103:         i = II/n
104:         j = II - i*n
105:         if (i.gt.0) then
106:           JJ = II - n
107:           call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
108:         endif
109:         if (i.lt.m-1) then
110:           JJ = II + n
111:           call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
112:         endif
113:         if (j.gt.0) then
114:           JJ = II - 1
115:           call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
116:         endif
117:         if (j.lt.n-1) then
118:           JJ = II + 1
119:           call MatSetValues(A,ione,II,ione,JJ,v,ADD_VALUES,ierr)
120:         endif
121:         v = 4.0
122:         call  MatSetValues(A,ione,II,ione,II,v,ADD_VALUES,ierr)
123:  10   continue

125: !  Assemble matrix, using the 2-step process:
126: !       MatAssemblyBegin(), MatAssemblyEnd()
127: !  Computations can be done while messages are in transition,
128: !  by placing code between these two statements.

130:       call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
131:       call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

133: !  Create parallel vectors.
134: !   - Here, the parallel partitioning of the vector is determined by
135: !     PETSc at runtime.  We could also specify the local dimensions
136: !     if desired -- or use the more general routine VecCreate().
137: !   - When solving a linear system, the vectors and matrices MUST
138: !     be partitioned accordingly.  PETSc automatically generates
139: !     appropriately partitioned matrices and vectors when MatCreate()
140: !     and VecCreate() are used with the same communicator.
141: !   - Note: We form 1 vector from scratch and then duplicate as needed.

143:       call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr)
144:       call VecDuplicate(u,b,ierr)
145:       call VecDuplicate(b,x,ierr)

147: !  Set exact solution; then compute right-hand-side vector.

149:       call VecSet(u,one,ierr)
150:       call MatMult(A,u,b,ierr)

152: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
153: !         Create the linear solver and set various options
154: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

156: !  Create linear solver context

158:       call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)

160: !  Set operators. Here the matrix that defines the linear system
161: !  also serves as the preconditioning matrix.

163:       call KSPSetOperators(ksp,A,A,ierr)

165: !  Set linear solver defaults for this problem (optional).
166: !   - By extracting the KSP and PC contexts from the KSP context,
167: !     we can then directly directly call any KSP and PC routines
168: !     to set various options.

170:       call KSPGetPC(ksp,pc,ierr)
171:       tol = 1.e-7
172:       call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL,PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr)

174: !
175: !  Set a user-defined shell preconditioner
176: !

178: !  (Required) Indicate to PETSc that we are using a shell preconditioner
179:       call PCSetType(pc,PCSHELL,ierr)

181: !  (Required) Set the user-defined routine for applying the preconditioner
182:       call PCShellSetApply(pc,SampleShellPCApply,ierr)

184: !  (Optional) Do any setup required for the preconditioner
185: !     Note: if you use PCShellSetSetUp, this will be done for your
186:       call SampleShellPCSetUp(pc,x,ierr)

188: !  Set runtime options, e.g.,
189: !      -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
190: !  These options will override those specified above as long as
191: !  KSPSetFromOptions() is called _after_ any other customization
192: !  routines.

194:       call KSPSetFromOptions(ksp,ierr)

196: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
197: !                      Solve the linear system
198: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

200:       call KSPSolve(ksp,b,x,ierr)

202: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
203: !                     Check solution and clean up
204: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

206: !  Check the error

208:       call VecAXPY(x,neg_one,u,ierr)
209:       call VecNorm(x,NORM_2,norm,ierr)
210:       call KSPGetIterationNumber(ksp,its,ierr)

212:       if (rank .eq. 0) then
213:         if (norm .gt. 1.e-12) then
214:            write(6,100) norm,its
215:         else
216:            write(6,110) its
217:         endif
218:       endif
219:   100 format('Norm of error ',1pe11.4,' iterations ',i5)
220:   110 format('Norm of error < 1.e-12,iterations ',i5)

222: !  Free work space.  All PETSc objects should be destroyed when they
223: !  are no longer needed.

225:       call KSPDestroy(ksp,ierr)
226:       call VecDestroy(u,ierr)
227:       call VecDestroy(x,ierr)
228:       call VecDestroy(b,ierr)
229:       call MatDestroy(A,ierr)

231: ! Free up PCShell data
232:       call PCDestroy(sor,ierr)
233:       call PCDestroy(jacobi,ierr)
234:       call VecDestroy(work,ierr)

236: !  Always call PetscFinalize() before exiting a program.

238:       call PetscFinalize(ierr)
239:       end

241: !/***********************************************************************/
242: !/*          Routines for a user-defined shell preconditioner           */
243: !/***********************************************************************/

245: !
246: !   SampleShellPCSetUp - This routine sets up a user-defined
247: !   preconditioner context.
248: !
249: !   Input Parameters:
250: !   pc    - preconditioner object
251: !   x     - vector
252: !
253: !   Output Parameter:
254: !   ierr  - error code (nonzero if error has been detected)
255: !
256: !   Notes:
257: !   In this example, we define the shell preconditioner to be Jacobi
258: !   method.  Thus, here we create a work vector for storing the reciprocal
259: !   of the diagonal of the preconditioner matrix; this vector is then
260: !   used within the routine SampleShellPCApply().
261: !
262:       subroutine SampleShellPCSetUp(pc,x,ierr)
263:       use mymoduleex21f
264:       implicit none

266:       PC      pc
267:       Vec     x
268:       Mat     pmat
269:       PetscErrorCode ierr

271:       call PCGetOperators(pc,PETSC_NULL_MAT,pmat,ierr)
272:       call PCCreate(PETSC_COMM_WORLD,jacobi,ierr)
273:       call PCSetType(jacobi,PCJACOBI,ierr)
274:       call PCSetOperators(jacobi,pmat,pmat,ierr)
275:       call PCSetUp(jacobi,ierr)

277:       call PCCreate(PETSC_COMM_WORLD,sor,ierr)
278:       call PCSetType(sor,PCSOR,ierr)
279:       call PCSetOperators(sor,pmat,pmat,ierr)
280: !      call PCSORSetSymmetric(sor,SOR_LOCAL_SYMMETRIC_SWEEP,ierr)
281:       call PCSetUp(sor,ierr)

283:       call VecDuplicate(x,work,ierr)

285:       end

287: ! -------------------------------------------------------------------
288: !
289: !   SampleShellPCApply - This routine demonstrates the use of a
290: !   user-provided preconditioner.
291: !
292: !   Input Parameters:
293: !   pc - preconditioner object
294: !   x - input vector
295: !
296: !   Output Parameters:
297: !   y - preconditioned vector
298: !   ierr  - error code (nonzero if error has been detected)
299: !
300: !   Notes:
301: !   This code implements the Jacobi preconditioner plus the
302: !   SOR preconditioner
303: !
304: ! YOU CAN GET THE EXACT SAME EFFECT WITH THE PCCOMPOSITE preconditioner using
305: ! mpiexec -n 1 ex21f -ksp_monitor -pc_type composite -pc_composite_pcs jacobi,sor -pc_composite_type additive
306: !
307:       subroutine SampleShellPCApply(pc,x,y,ierr)
308:       use mymoduleex21f
309:       implicit none

311:       PC      pc
312:       Vec     x,y
313:       PetscErrorCode ierr
314:       PetscScalar  one

316:       one = 1.0
317:       call PCApply(jacobi,x,y,ierr)
318:       call PCApply(sor,x,work,ierr)
319:       call VecAXPY(y,one,work,ierr)

321:       end

323: !/*TEST
324: !
325: !   test:
326: !     requires: !single
327: !
328: !TEST*/