Actual source code: ex2f.F90

  1: !
  2: !
  3: !  Description: Builds a parallel vector with 1 component on the first
  4: !               processor, 2 on the second, etc.  Then each processor adds
  5: !               one to all elements except the last rank.
  6: !
  7: ! -----------------------------------------------------------------------

  9: program main
 10: #include <petsc/finclude/petscvec.h>
 11:   use petscvec
 12:   implicit none

 14: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
 15: !                 Beginning of program
 16: ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

 18:   Vec x
 19:   PetscInt N, i, ione
 20:   PetscErrorCode ierr
 21:   PetscMPIInt rank
 22:   PetscScalar one, value(1)

 24:   PetscCallA(PetscInitialize(ierr))
 25:   one = 1.0
 26:   PetscCallMPIA(MPI_Comm_rank(PETSC_COMM_WORLD, rank, ierr))

 28: !  Create a parallel vector.
 29: !   - In this case, we specify the size of the local portion on
 30: !     each processor, and PETSc computes the global size.  Alternatively,
 31: !     if we pass the global size and use PETSC_DECIDE for the
 32: !     local size PETSc will choose a reasonable partition trying
 33: !     to put nearly an equal number of elements on each processor.

 35:   N = rank + 1
 36:   ione = 1
 37:   PetscCallA(VecCreateFromOptions(PETSC_COMM_WORLD, PETSC_NULL_CHARACTER, ione, N, PETSC_DECIDE, x, ierr))
 38:   PetscCallA(VecGetSize(x, N, ierr))
 39:   PetscCallA(VecSet(x, one, ierr))

 41: !  Set the vector elements.
 42: !   - Note that VecSetValues() uses 0-based row and column numbers
 43: !     in Fortran as well as in C.
 44: !   - Always specify global locations of vector entries.
 45: !   - Each processor can contribute any vector entries,
 46: !     regardless of which processor "owns" them; any nonlocal
 47: !     contributions will be transferred to the appropriate processor
 48: !     during the assembly process.
 49: !   - In this example, the flag ADD_VALUES indicates that all
 50: !     contributions will be added together.

 52:   ione = 1
 53:   do 100 i = 0, N - rank - 1
 54:     PetscCallA(VecSetValues(x, ione, [i], [one], ADD_VALUES, ierr))
 55: 100 continue

 57: !  Assemble vector, using the 2-step process:
 58: !    VecAssemblyBegin(), VecAssemblyEnd()
 59: !  Computations can be done while messages are in transition
 60: !  by placing code between these two statements.

 62:     PetscCallA(VecAssemblyBegin(x, ierr))
 63:     PetscCallA(VecAssemblyEnd(x, ierr))

 65: !     Test VecGetValues() with scalar entries
 66:     if (rank == 0) then
 67:       ione = 0
 68:       PetscCallA(VecGetValues(x, ione, [i], value, ierr))
 69:     end if

 71: !  View the vector; then destroy it.

 73:     PetscCallA(VecView(x, PETSC_VIEWER_STDOUT_WORLD, ierr))
 74:     PetscCallA(VecDestroy(x, ierr))

 76:     PetscCallA(PetscFinalize(ierr))
 77:   end

 79: !/*TEST
 80: !
 81: !     test:
 82: !       nsize: 2
 83: !       filter: grep -v " MPI process"
 84: !
 85: !TEST*/