Actual source code: ex8f.F90

  1: !
  2: ! Description: Demonstrates using a local ordering to set values into a parallel vector
  3: !

  5: program main
  6: #include <petsc/finclude/petscvec.h>
  7:   use petscvec

  9:   implicit none

 11:   PetscErrorCode ierr
 12:   PetscMPIInt rank
 13:   PetscInt    ::   i, ng, rstart, rend, M
 14:   PetscInt, pointer, dimension(:) :: gindices
 15:   PetscScalar, parameter :: sone = 1.0
 16:   Vec   ::         x
 17:   ISLocalToGlobalMapping :: ltog
 18:   PetscInt, parameter :: one = 1

 20:   PetscCallA(PetscInitialize(ierr))
 21:   PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))

 23: !
 24: !     Create a parallel vector.
 25: !      - In this case, we specify the size of each processor's local
 26: !        portion, and PETSc computes the global size.  Alternatively,
 27: !        PETSc could determine the vector's distribution if we specify
 28: !        just the global size.
 29: !
 30:   PetscCallA(VecCreate(PETSC_COMM_WORLD, x, ierr))
 31:   PetscCallA(VecSetSizes(x, rank + one, PETSC_DECIDE, ierr))
 32:   PetscCallA(VecSetFromOptions(x, ierr))

 34:   PetscCallA(VecSet(x, sone, ierr))

 36: !
 37: !     Set the local to global ordering for the vector. Each processor
 38: !     generates a list of the global indices for each local index. Note that
 39: !     the local indices are just whatever is convenient for a particular application.
 40: !     In this case we treat the vector as lying on a one dimensional grid and
 41: !     have one ghost point on each end of the blocks owned by each processor.
 42: !

 44:   PetscCallA(VecGetSize(x, M, ierr))
 45:   PetscCallA(VecGetOwnershipRange(x, rstart, rend, ierr))
 46:   ng = rend - rstart + 2
 47:   allocate (gindices(0:ng - 1))
 48:   gindices(0) = rstart - 1

 50:   do i = 0, ng - 2
 51:     gindices(i + 1) = gindices(i) + 1
 52:   end do

 54: ! map the first and last point as periodic

 56:   if (gindices(0) == -1) gindices(0) = M - 1

 58:   if (gindices(ng - 1) == M) gindices(ng - 1) = 0

 60:   PetscCallA(ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, one, ng, gindices, PETSC_COPY_VALUES, ltog, ierr))
 61:   PetscCallA(VecSetLocalToGlobalMapping(x, ltog, ierr))
 62:   PetscCallA(ISLocalToGlobalMappingDestroy(ltog, ierr))
 63:   deallocate (gindices)

 65:   ! Set the vector elements.
 66:   ! - In this case set the values using the local ordering
 67:   ! - Each processor can contribute any vector entries,
 68:   !   regardless of which processor "owns" them; any nonlocal
 69:   !   contributions will be transferred to the appropriate processor
 70:   !   during the assembly process.
 71:   ! - In this example, the flag ADD_VALUES indicates that all
 72:   !   contributions will be added together.

 74:   do i = 0, ng - 1
 75:     PetscCallA(VecSetValuesLocal(x, one, [i], [sone], ADD_VALUES, ierr))
 76:   end do

 78:   !
 79:   ! Assemble vector, using the 2-step process:
 80:   ! VecAssemblyBegin(), VecAssemblyEnd()
 81:   ! Computations can be done while messages are in transition
 82:   ! by placing code between these two statements.
 83:   !
 84:   PetscCallA(VecAssemblyBegin(x, ierr))
 85:   PetscCallA(VecAssemblyEnd(x, ierr))
 86:   !
 87:   ! View the vector; then destroy it.
 88:   !
 89:   PetscCallA(VecView(x, PETSC_VIEWER_STDOUT_WORLD, ierr))
 90:   PetscCallA(VecDestroy(x, ierr))
 91:   PetscCallA(PetscFinalize(ierr))

 93: end program

 95: !/*TEST
 96: !
 97: !     test:
 98: !       nsize: 4
 99: !       output_file: output/ex8_1.out
100: !
101: !TEST*/