Actual source code: ex61f.F90
petsc-3.12.5 2020-03-29
1: !
2: ! Demonstrates having each OpenMP thread manage its own PETSc objects and solves
3: ! - each thread is ONLY allowed to access objects that IT created
4: ! that is, threads cannot shared objects
5: !
6: ! Run with "export OMP_NUM_THREADS=16 ./ex61f"
7: ! to use 16 independent threads
8: !
9: ! ./configure --with-threadsafety --with-log=0 --with-openmp
10: !
11: module omp_module
12: implicit none
13: contains
14: subroutine split_indices(total,num_pieces,ibeg,iend)
15: implicit none
17: integer :: total
18: integer :: num_pieces
19: integer :: ibeg(num_pieces), iend(num_pieces)
20: integer :: itmp1, itmp2, ioffset, i
22: itmp1 = total/num_pieces
23: itmp2 = mod(total,num_pieces)
24: ioffset = 0
25: do i=1,itmp2
26: ibeg(i) = ioffset + 1
27: iend(i) = ioffset + (itmp1+1)
28: ioffset = iend(i)
29: enddo
30: do i=itmp2+1,num_pieces
31: ibeg(i) = ioffset + 1
32: if (ibeg(i) > total) then
33: iend(i) = ibeg(i) - 1
34: else
35: iend(i) = ioffset + itmp1
36: ioffset = iend(i)
37: endif
38: enddo
40: end subroutine split_indices
41: end module omp_module
43: module assert_mod
44: implicit none
45: contains
46: subroutine assert(lcond,msg,icase)
47: logical,intent(in) :: lcond
48: character(len=*), intent(in) :: msg
49: integer, intent(in) :: icase
51: if (.not.lcond) then
52: write(*,*) msg, icase
53: stop 'assertion error '
54: endif
55: return
56: end subroutine assert
57: end module assert_mod
59: program tpetsc
61: #include <petsc/finclude/petsc.h>
62: use assert_mod
63: use omp_module
64: use petsc
65: implicit none
66: ! ----------------------------
67: ! test concurrent petsc solver
68: ! ----------------------------
70: integer, parameter :: NCASES=100
71: integer, parameter :: MAXTHREADS=64
72: real(8), parameter :: tol = 1.0d-6
74: integer, dimension(MAXTHREADS) :: ibeg,iend
76: !$ integer, external :: omp_get_num_threads
78: Mat, target :: Mcol_f_mat(MAXTHREADS)
79: Vec, target :: Mcol_f_vecb(MAXTHREADS)
80: Vec, target :: Mcol_f_vecx(MAXTHREADS)
81: KSP, target :: Mcol_f_ksp(MAXTHREADS)
82: PC, target :: MPC(MAXTHREADS)
84: Mat, pointer :: col_f_mat
85: Vec, pointer :: col_f_vecb
86: Vec, pointer :: col_f_vecx
87: KSP, pointer :: col_f_ksp
88: PC, pointer :: pc
90: PetscInt :: ith, nthreads
91: PetscErrorCode ierr
93: integer, parameter :: nz_per_row = 9
94: integer, parameter :: n =100
95: integer :: i,j,ij,ij2,ii,jj,nz,ip, dx,dy,icase
96: integer, dimension(n*n*nz_per_row) :: ilist,jlist
97: PetscScalar :: aij
98: PetscScalar, dimension(n*n*nz_per_row) :: alist
99: logical :: isvalid_ii, isvalid_jj, is_diag
101: PetscInt nrow
102: PetscInt ncol
103: PetscScalar, dimension(0:(n*n-1)) :: x, b
104: real(8) :: err(NCASES)
106: integer :: t1,t2,count_rate
107: real :: ttime
109: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
110: if (ierr .ne. 0) then
111: print*,'Unable to initialize PETSc'
112: stop
113: endif
115: nrow = n*n
116: ncol = nrow
118: nthreads = 1
119: !$omp parallel
120: !$omp master
121: !$ nthreads = omp_get_num_threads()
122: !$omp end master
123: !$omp end parallel
124: print*,'nthreads = ', nthreads,' NCASES = ', NCASES
126: call split_indices(NCASES,nthreads,ibeg,iend)
129: !$omp parallel do &
130: !$omp private(ith,ierr) &
131: !$omp private(col_f_mat,col_f_vecb,col_f_vecx,col_f_ksp)
132: do ith=1,nthreads
133: col_f_mat => Mcol_f_mat(ith)
134: col_f_vecb => Mcol_f_vecb(ith)
135: col_f_vecx => Mcol_f_vecx(ith)
136: col_f_ksp => Mcol_f_ksp(ith)
139: call MatCreateSeqAIJ( PETSC_COMM_SELF, nrow,ncol, nz_per_row,PETSC_NULL_INTEGER, col_f_mat, ierr)
140: call assert(ierr.eq.0,'matcreateseqaij return ',ierr)
142: call VecCreateSeqWithArray(PETSC_COMM_SELF,1,nrow,PETSC_NULL_SCALAR, col_f_vecb, ierr)
143: call assert(ierr.eq.0,'veccreateseqwitharray return ierr',ierr)
145: call VecDuplicate(col_f_vecb, col_f_vecx,ierr)
146: call assert(ierr.eq.0,'vecduplicate return ierr',ierr)
148: call KSPCreate(PETSC_COMM_SELF, col_f_ksp,ierr)
149: call assert(ierr.eq.0,'kspcreate return ierr',ierr)
151: enddo
153: ! -----------------------
154: ! setup sparsity pattern
155: ! -----------------------
156: nz = 0
157: do j=1,n
158: do i=1,n
159: ij = i + (j-1)*n
160: do dx=-1,1
161: do dy=-1,1
162: ii = i + dx
163: jj = j + dy
165: ij2 = ii + (jj-1)*n
166: isvalid_ii = (1 <= ii).and.(ii <= n)
167: isvalid_jj = (1 <= jj).and.(jj <= n)
168: if (isvalid_ii.and.isvalid_jj) then
169: is_diag = (ij .eq. ij2)
170: nz = nz + 1
171: ilist(nz) = ij
172: jlist(nz) = ij2
173: if (is_diag) then
174: aij = 4.0
175: else
176: aij = -1.0
177: endif
178: alist(nz) = aij
179: endif
180: enddo
181: enddo
182: enddo
183: enddo
185: print*,'nz = ', nz
187: ! ---------------------------------
188: ! convert from fortan to c indexing
189: ! ---------------------------------
190: ilist(1:nz) = ilist(1:nz) - 1
191: jlist(1:nz) = jlist(1:nz) - 1
194: ! --------------
195: ! setup matrices
196: ! --------------
197: call system_clock(t1,count_rate)
199: !$omp parallel do &
200: !$omp& private(ith,icase,ip,i,j,ii,jj,aij,ierr,x,b) &
201: !$omp& private(col_f_mat,col_f_vecb,col_f_vecx,col_f_ksp,pc)
202: do ith=1,nthreads
203: col_f_mat => Mcol_f_mat(ith)
204: col_f_vecb => Mcol_f_vecb(ith)
205: col_f_vecx => Mcol_f_vecx(ith)
206: col_f_ksp => Mcol_f_ksp(ith)
207: pc => MPC(ith)
209: do icase=ibeg(ith),iend(ith)
211: do ip=1,nz
212: ii = ilist(ip)
213: jj = jlist(ip)
214: aij = alist(ip)
215: call MatSetValue(col_f_mat,ii,jj,aij,INSERT_VALUES,ierr)
216: call assert(ierr.eq.0,'matsetvalue return ierr',ierr)
217: enddo
218: call MatAssemblyBegin(col_f_mat,MAT_FINAL_ASSEMBLY,ierr)
219: call assert(ierr.eq.0,'matassemblybegin return ierr',ierr)
221: call MatAssemblyEnd(col_f_mat,MAT_FINAL_ASSEMBLY,ierr)
222: call assert(ierr.eq.0,'matassemblyend return ierr',ierr)
224: call KSPSetOperators(col_f_ksp,col_f_mat,col_f_mat,ierr)
225: call assert(ierr.eq.0,'KSPSetOperators return ierr',ierr)
227: ! set linear solver
228: call KSPGetPC(col_f_ksp,pc,ierr)
229: call assert(ierr.eq.0,'KSPGetPC return ierr ', ierr)
231: call PCSetType(pc,PCLU,ierr)
232: call assert(ierr.eq.0,'PCSetType return ierr ',ierr)
234: ! associate petsc vector with allocated array
235: x(:) = 1
236: !$omp critical
237: call VecPlaceArray(col_f_vecx,x,ierr)
238: !$omp end critical
239: call assert(ierr.eq.0,'VecPlaceArray col_f_vecx return ',ierr)
241: b(:) = 0
242: do ip=1,nz
243: i = ilist(ip)
244: j = jlist(ip)
245: aij = alist(ip)
246: b(i) = b(i) + aij*x(j)
247: enddo
248: x = 0
249: !$omp critical
250: call VecPlaceArray(col_f_vecb,b,ierr)
251: !$omp end critical
252: call assert(ierr.eq.0,'VecPlaceArray col_f_vecb return ',ierr)
256: ! -----------------------------------------------------------
257: ! key test, need to solve multiple linear systems in parallel
258: ! -----------------------------------------------------------
259: call KSPSetFromOptions(col_f_ksp,ierr)
260: call assert(ierr.eq.0,'KSPSetFromOptions return ierr ',ierr)
262: call KSPSetUp(col_f_ksp,ierr)
263: call assert(ierr.eq.0,'KSPSetup return ierr ',ierr)
266: call KSPSolve(col_f_ksp,col_f_vecb,col_f_vecx,ierr)
267: call assert(ierr.eq.0,'KSPSolve return ierr ',ierr)
270: ! ------------
271: ! check answer
272: ! ------------
273: err(icase) = maxval(abs(x(:)-1))
275: !$omp critical
276: call VecResetArray(col_f_vecx,ierr)
277: !$omp end critical
278: call assert(ierr.eq.0,'VecResetArray return ierr ',ierr)
280: !$omp critical
281: call VecResetArray(col_f_vecb,ierr)
282: !$omp end critical
283: call assert(ierr.eq.0,'VecResetArray return ierr ',ierr)
285: enddo
286: enddo
288: !$omp parallel do &
289: !$omp private(ith,ierr) &
290: !$omp private(col_f_mat,col_f_vecb,col_f_vecx,col_f_ksp)
291: do ith=1,nthreads
292: col_f_mat => Mcol_f_mat(ith)
293: col_f_vecb => Mcol_f_vecb(ith)
294: col_f_vecx => Mcol_f_vecx(ith)
295: col_f_ksp => Mcol_f_ksp(ith)
298: call MatDestroy(col_f_mat, ierr)
299: call assert(ierr.eq.0,'matdestroy return ',ierr)
301: call VecDestroy(col_f_vecb, ierr)
302: call assert(ierr.eq.0,'vecdestroy return ierr',ierr)
304: call VecDestroy(col_f_vecx,ierr)
305: call assert(ierr.eq.0,'vecdestroy return ierr',ierr)
307: call KSPDestroy(col_f_ksp,ierr)
308: call assert(ierr.eq.0,'kspdestroy return ierr',ierr)
310: enddo
312: call system_clock(t2,count_rate)
313: ttime = real(t2-t1)/real(count_rate)
314: write(*,*) 'total time is ',ttime
316: write(*,*) 'maxval(err) ', maxval(err)
317: do icase=1,NCASES
318: if (err(icase) > tol) then
319: write(*,*) 'icase,err(icase) ',icase,err(icase)
320: endif
321: enddo
323: call PetscFinalize(ierr)
324: call assert(ierr.eq.0,'petscfinalize return ierr',ierr)
326: end program tpetsc
328: !/*TEST
329: !
330: ! build:
331: ! requires: double !complex !define(PETSC_USE_64BIT_INDICES)
332: !
333: ! test:
334: ! output_file: output/ex61f_1.out
335: ! TODO: Need to determine how to test OpenMP code
336: !
337: !TEST*/