Actual source code: ex13f90.F90

petsc-3.13.6 2020-09-29
Report Typos and Errors
  1: program main
  2: !
  3: ! This example intends to show how DMDA is used to solve a PDE on a decomposed
  4: ! domain. The equation we are solving is not a PDE, but a toy example: van der
  5: ! Pol's 2-variable ODE duplicated onto a 3D grid:
  6: ! dx/dt = y
  7: ! dy/dt = mu(1-x**2)y - x
  8: !
  9: ! So we are solving the same equation on all grid points, with no spatial 
 10: ! dependencies. Still we tell PETSc to communicate (stencil width >0) so we
 11: ! have communication between different parts of the domain. 
 12: !
 13: ! The example is structured so that one can replace the RHS function and
 14: ! the forw_euler routine with a suitable RHS and a suitable time-integration
 15: ! scheme and make little or no modifications to the DMDA parts. In particular,
 16: ! the "inner" parts of the RHS and time-integration do not "know about" the
 17: ! decomposed domain.
 18: !
 19: !     See:     http://dx.doi.org/10.6084/m9.figshare.1368581
 20: !
 21: !     Contributed by Aasmund Ervik (asmunder at pvv.org)
 22: !


 25:   use ex13f90aux

 27:  #include <petsc/finclude/petscdmda.h>
 28:   use petscdmda

 30:   PetscErrorCode   ierr
 31:   PetscMPIInt      rank,size
 32:   MPI_Comm         comm
 33:   Vec              Lvec,coords
 34:   DM               SolScal,CoordDM
 35:   DMBoundaryType b_x,b_y,b_z
 36:   PetscReal, pointer :: array(:,:,:,:)
 37:   PetscReal :: t,tend,dt,xmin,xmax,ymin,ymax,zmin,zmax,xgmin,xgmax,ygmin,ygmax,zgmin,zgmax
 38:   PetscReal, allocatable :: f(:,:,:,:), grid(:,:,:,:)
 39:   PetscInt :: i,j,k,igmax,jgmax,kgmax,ib1,ibn,jb1,jbn,kb1,kbn,imax,jmax,kmax,itime,maxstep,nscreen,dof,stw,ndim

 41:   ! Fire up PETSc:
 42:   call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
 43:   if (ierr .ne. 0) then
 44:     print*,'Unable to initialize PETSc'
 45:     stop
 46:   endif
 47:   comm = PETSC_COMM_WORLD
 48:   call MPI_Comm_rank(comm,rank,ierr);CHKERRA(ierr)
 49:   call MPI_Comm_size(comm,size,ierr);CHKERRA(ierr)
 50:   if (rank == 0) then
 51:     write(*,*) 'Hi! We are solving van der Pol using ',size,' processes.'
 52:     write(*,*) ' '
 53:     write(*,*) '  t     x1         x2'
 54:   endif

 56:   ! Set up the global grid
 57:   igmax = 50
 58:   jgmax = 50
 59:   kgmax = 50
 60:   xgmin = 0.0
 61:   ygmin = 0.0
 62:   zgmin = 0.0
 63:   xgmax = 1.0
 64:   ygmax = 1.0
 65:   zgmax = 1.0
 66:   stw = 1 ! stencil width
 67:   dof = 2 ! number of variables in this DA
 68:   ndim = 3 ! 3D code

 70:   ! Get the BCs and create the DMDA
 71:   call get_boundary_cond(b_x,b_y,b_z);CHKERRA(ierr)
 72:   call DMDACreate3d(comm,b_x,b_y,b_z,DMDA_STENCIL_STAR,igmax,jgmax,kgmax,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,dof,stw,  &
 73:                     PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,SolScal,ierr);CHKERRA(ierr)
 74:   call DMSetFromOptions(SolScal,ierr);CHKERRA(ierr)
 75:   call DMSetUp(SolScal,ierr);CHKERRA(ierr)

 77:   ! Set global coordinates, get a global and a local work vector
 78:   call DMDASetUniformCoordinates(SolScal,xgmin,xgmax,ygmin,ygmax,zgmin,zgmax,ierr);CHKERRA(ierr)
 79:   call DMCreateLocalVector(SolScal,Lvec,ierr);CHKERRA(ierr)
 80:   
 81:   ! Get ib1,imax,ibn etc. of the local grid. 
 82:   ! Our convention is:
 83:   ! the first local ghost cell is ib1
 84:   ! the first local       cell is 1
 85:   ! the last  local       cell is imax
 86:   ! the last  local ghost cell is ibn.
 87:   !
 88:   ! i,j,k must be in this call, but are not used
 89:   call DMDAGetCorners(SolScal,i,j,k,imax,jmax,kmax,ierr);CHKERRA(ierr)
 90:   ib1=1-stw
 91:   jb1=1-stw
 92:   kb1=1-stw
 93:   ibn=imax+stw
 94:   jbn=jmax+stw
 95:   kbn=kmax+stw
 96:   allocate(f(dof,ib1:ibn,jb1:jbn,kb1:kbn))
 97:   allocate(grid(ndim,ib1:ibn,jb1:jbn,kb1:kbn))

 99:   ! Get xmin,xmax etc. for the local grid
100:   ! The "coords" local vector here is borrowed, so we shall not destroy it.
101:   call DMGetCoordinatesLocal(SolScal,coords,ierr);CHKERRA(ierr)
102:   ! We need a new DM for coordinate stuff since PETSc supports unstructured grid
103:   call DMGetCoordinateDM(SolScal,CoordDM,ierr);CHKERRA(ierr)
104:   ! petsc_to_local and local_to_petsc are convenience functions, see 
105:   ! ex13f90aux.F90.
106:   call petsc_to_local(CoordDM,coords,array,grid,ndim,stw);CHKERRA(ierr)
107:   xmin=grid(1,1,1,1)
108:   ymin=grid(2,1,1,1)
109:   zmin=grid(3,1,1,1)
110:   xmax=grid(1,imax,jmax,kmax)
111:   ymax=grid(2,imax,jmax,kmax)
112:   zmax=grid(3,imax,jmax,kmax)
113:   call local_to_petsc(CoordDM,coords,array,grid,ndim,stw);CHKERRA(ierr)
114:   
115:   ! Note that we never use xmin,xmax in this example, but the preceding way of
116:   ! getting the local xmin,xmax etc. from PETSc for a structured uniform grid
117:   ! is not documented in any other examples I could find.

119:   ! Set up the time-stepping
120:   t = 0.0
121:   tend = 100.0
122:   dt = 1e-3
123:   maxstep=ceiling((tend-t)/dt)
124:   ! Write output every second (of simulation-time)
125:   nscreen = int(1.0/dt)+1

127:   ! Set initial condition
128:   call DMDAVecGetArrayF90(SolScal,Lvec,array,ierr);CHKERRA(ierr)
129:   array(0,:,:,:) = 0.5
130:   array(1,:,:,:) = 0.5
131:   call DMDAVecRestoreArrayF90(SolScal,Lvec,array,ierr);CHKERRA(ierr)
132:  
133:   ! Initial set-up finished.
134:   ! Time loop
135:   maxstep = 5 
136:   do itime=1,maxstep 

138:     ! Communicate such that everyone has the correct values in ghost cells
139:     call DMLocalToLocalBegin(SolScal,Lvec,INSERT_VALUES,Lvec,ierr);CHKERRA(ierr)
140:     call DMLocalToLocalEnd(SolScal,Lvec,INSERT_VALUES,Lvec,ierr);CHKERRA(ierr)
141:     
142:     ! Get the old solution from the PETSc data structures 
143:     call petsc_to_local(SolScal,Lvec,array,f,dof,stw);CHKERRA(ierr)
144:     
145:     ! Do the time step
146:     call forw_euler(t,dt,ib1,ibn,jb1,jbn,kb1,kbn,imax,jmax,kmax,dof,f,dfdt_vdp)
147:     t=t+dt

149:     ! Write result to screen (if main process and it's time to)
150:     if (rank == 0 .and. mod(itime,nscreen) == 0) then
151:       write(*,101) t,f(1,1,1,1),f(2,1,1,1)
152:     endif
153:     
154:     ! Put our new solution in the PETSc data structures
155:     call local_to_petsc(SolScal,Lvec,array,f,dof,stw)
156:   end do
157:   
158:   ! Deallocate and finalize
159:   call DMRestoreLocalVector(SolScal,Lvec,ierr);CHKERRA(ierr)
160:   call DMDestroy(SolScal,ierr);CHKERRA(ierr)
161:   deallocate(f)
162:   deallocate(grid)
163:   call PetscFinalize(ierr)

165:   ! Format for writing output to screen
166: 101 format(F5.1,2F11.6)

168: end program main

170: !/*TEST
171: !
172: !   build:
173: !     requires: !complex
174: !     depends:  ex13f90aux.F90
175: !
176: !   test:
177: !     nsize: 4
178: !
179: !TEST*/