1: program main
2: !
3: ! This example intends to show how DMDA is used to solve a PDE on a decomposed
4: ! domain. The equation we are solving is not a PDE, but a toy example: van der
5: ! Pol's 2-variable ODE duplicated onto a 3D grid:
6: ! dx/dt = y
7: ! dy/dt = mu(1-x**2)y - x
8: !
9: ! So we are solving the same equation on all grid points, with no spatial
10: ! dependencies. Still we tell PETSc to communicate (stencil width >0) so we
11: ! have communication between different parts of the domain.
12: !
13: ! The example is structured so that one can replace the RHS function and
14: ! the forw_euler routine with a suitable RHS and a suitable time-integration
15: ! scheme and make little or no modifications to the DMDA parts. In particular,
16: ! the "inner" parts of the RHS and time-integration do not "know about" the
17: ! decomposed domain.
18: !
19: ! See: http://dx.doi.org/10.6084/m9.figshare.1368581
20: !
21: ! Contributed by Aasmund Ervik (asmunder at pvv.org)
22: !
25: use ex13f90aux
26: #include <petsc/finclude/petscdmda.h> 27: use petscdmda
29: PetscErrorCode ierr
30: PetscMPIInt rank,size
31: MPI_Comm comm
32: Vec Lvec,coords
33: DM SolScal,CoordDM
34: DMBoundaryType b_x,b_y,b_z
35: PetscReal, pointer :: array(:,:,:,:)
36: PetscReal :: t,tend,dt,xmin,xmax,ymin,ymax,zmin,zmax,xgmin,xgmax,ygmin,ygmax,zgmin,zgmax
37: PetscReal, allocatable :: f(:,:,:,:), grid(:,:,:,:)
38: PetscInt :: i,j,k,igmax,jgmax,kgmax,ib1,ibn,jb1,jbn,kb1,kbn,imax,jmax,kmax,itime,maxstep,nscreen,dof,stw,ndim
40: ! Fire up PETSc:
41: call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
42: if (ierr .ne. 0) then
43: print*,'Unable to initialize PETSc'
44: stop
45: endif
46: comm = PETSC_COMM_WORLD 47: call MPI_Comm_rank(comm,rank,ierr);CHKERRA(ierr)
48: call MPI_Comm_size(comm,size,ierr);CHKERRA(ierr)
49: if (rank == 0) then
50: write(*,*) 'Hi! We are solving van der Pol using ',size,' processes.'
51: write(*,*) ' '
52: write(*,*) ' t x1 x2'
53: endif
55: ! Set up the global grid
56: igmax = 50
57: jgmax = 50
58: kgmax = 50
59: xgmin = 0.0
60: ygmin = 0.0
61: zgmin = 0.0
62: xgmax = 1.0
63: ygmax = 1.0
64: zgmax = 1.0
65: stw = 1 ! stencil width
66: dof = 2 ! number of variables in this DA
67: ndim = 3 ! 3D code
69: ! Get the BCs and create the DMDA 70: call get_boundary_cond(b_x,b_y,b_z);CHKERRA(ierr)
71: call DMDACreate3d(comm,b_x,b_y,b_z,DMDA_STENCIL_STAR,igmax,jgmax,kgmax,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,stw, &
72: PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,SolScal,ierr);CHKERRA(ierr)
73: call DMSetFromOptions(SolScal,ierr);CHKERRA(ierr)
74: call DMSetUp(SolScal,ierr);CHKERRA(ierr)
76: ! Set global coordinates, get a global and a local work vector
77: call DMDASetUniformCoordinates(SolScal,xgmin,xgmax,ygmin,ygmax,zgmin,zgmax,ierr);CHKERRA(ierr)
78: call DMCreateLocalVector(SolScal,Lvec,ierr);CHKERRA(ierr)
79: 80: ! Get ib1,imax,ibn etc. of the local grid.
81: ! Our convention is:
82: ! the first local ghost cell is ib1
83: ! the first local cell is 1
84: ! the last local cell is imax
85: ! the last local ghost cell is ibn.
86: !
87: ! i,j,k must be in this call, but are not used
88: call DMDAGetCorners(SolScal,i,j,k,imax,jmax,kmax,ierr);CHKERRA(ierr)
89: ib1=1-stw
90: jb1=1-stw
91: kb1=1-stw
92: ibn=imax+stw
93: jbn=jmax+stw
94: kbn=kmax+stw
95: allocate(f(dof,ib1:ibn,jb1:jbn,kb1:kbn))
96: allocate(grid(ndim,ib1:ibn,jb1:jbn,kb1:kbn))
98: ! Get xmin,xmax etc. for the local grid
99: ! The "coords" local vector here is borrowed, so we shall not destroy it.
100: call DMGetCoordinatesLocal(SolScal,coords,ierr);CHKERRA(ierr)
101: ! We need a new DM for coordinate stuff since PETSc supports unstructured grid
102: call DMGetCoordinateDM(SolScal,CoordDM,ierr);CHKERRA(ierr)
103: ! petsc_to_local and local_to_petsc are convenience functions, see
104: ! ex13f90aux.F90.
105: call petsc_to_local(CoordDM,coords,array,grid,ndim,stw);CHKERRA(ierr)
106: xmin=grid(1,1,1,1)
107: ymin=grid(2,1,1,1)
108: zmin=grid(3,1,1,1)
109: xmax=grid(1,imax,jmax,kmax)
110: ymax=grid(2,imax,jmax,kmax)
111: zmax=grid(3,imax,jmax,kmax)
112: call local_to_petsc(CoordDM,coords,array,grid,ndim,stw);CHKERRA(ierr)
113: 114: ! Note that we never use xmin,xmax in this example, but the preceding way of
115: ! getting the local xmin,xmax etc. from PETSc for a structured uniform grid
116: ! is not documented in any other examples I could find.
118: ! Set up the time-stepping
119: t = 0.0
120: tend = 100.0
121: dt = 1e-3
122: maxstep=ceiling((tend-t)/dt)
123: ! Write output every second (of simulation-time)
124: nscreen = int(1.0/dt)+1
126: ! Set initial condition
127: call DMDAVecGetArrayF90(SolScal,Lvec,array,ierr);CHKERRA(ierr)
128: array(0,:,:,:) = 0.5
129: array(1,:,:,:) = 0.5
130: call DMDAVecRestoreArrayF90(SolScal,Lvec,array,ierr);CHKERRA(ierr)
131: 132: ! Initial set-up finished.
133: ! Time loop
134: maxstep = 5
135: do itime=1,maxstep
137: ! Communicate such that everyone has the correct values in ghost cells
138: call DMLocalToLocalBegin(SolScal,Lvec,INSERT_VALUES,Lvec,ierr);CHKERRA(ierr)
139: call DMLocalToLocalEnd(SolScal,Lvec,INSERT_VALUES,Lvec,ierr);CHKERRA(ierr)
140: 141: ! Get the old solution from the PETSc data structures
142: call petsc_to_local(SolScal,Lvec,array,f,dof,stw);CHKERRA(ierr)
143: 144: ! Do the time step
145: call forw_euler(t,dt,ib1,ibn,jb1,jbn,kb1,kbn,imax,jmax,kmax,dof,f,dfdt_vdp)
146: t=t+dt
148: ! Write result to screen (if main process and it's time to)
149: if (rank == 0 .and. mod(itime,nscreen) == 0) then
150: write(*,101) t,f(1,1,1,1),f(2,1,1,1)
151: endif
152: 153: ! Put our new solution in the PETSc data structures
154: call local_to_petsc(SolScal,Lvec,array,f,dof,stw)
155: end do
156: 157: ! Deallocate and finalize
158: call DMRestoreLocalVector(SolScal,Lvec,ierr);CHKERRA(ierr)
159: call DMDestroy(SolScal,ierr);CHKERRA(ierr)
160: deallocate(f)
161: deallocate(grid)
162: call PetscFinalize(ierr)
164: ! Format for writing output to screen
165: 101 format(F5.1,2F11.6)
167: end program main
169: !/*TEST
170: !
171: ! build:
172: ! requires: !complex
173: ! depends: ex13f90aux.F90
174: !
175: !TEST*/