Actual source code: ex8.c

petsc-3.10.5 2019-03-28
Report Typos and Errors

  2: static char help[] = "Demonstrates using a local ordering to set values into a parallel vector.\n\n";

  4: /*T
  5:    Concepts: vectors^assembling vectors with local ordering;
  6:    Processors: n
  7: T*/

  9: /*
 10:   Include "petscvec.h" so that we can use vectors.  Note that this file
 11:   automatically includes:
 12:      petscsys.h       - base PETSc routines   petscis.h     - index sets
 13:      petscviewer.h - viewers
 14: */
 15:  #include <petscvec.h>

 17: int main(int argc,char **argv)
 18: {
 20:   PetscMPIInt    rank;
 21:   PetscInt       i,N,ng,*gindices,rstart,rend,M;
 22:   PetscScalar    one = 1.0;
 23:   Vec            x;

 25:   PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
 26:   MPI_Comm_rank(PETSC_COMM_WORLD,&rank);

 28:   /*
 29:      Create a parallel vector.
 30:       - In this case, we specify the size of each processor's local
 31:         portion, and PETSc computes the global size.  Alternatively,
 32:         PETSc could determine the vector's distribution if we specify
 33:         just the global size.
 34:   */
 35:   VecCreate(PETSC_COMM_WORLD,&x);
 36:   VecSetSizes(x,rank+1,PETSC_DECIDE);
 37:   VecSetFromOptions(x);
 38:   VecGetSize(x,&N);
 39:   VecSet(x,one);

 41:   /*
 42:      Set the local to global ordering for the vector. Each processor
 43:      generates a list of the global indices for each local index. Note that
 44:      the local indices are just whatever is convenient for a particular application.
 45:      In this case we treat the vector as lying on a one dimensional grid and
 46:      have one ghost point on each end of the blocks owned by each processor.
 47:   */

 49:   VecGetSize(x,&M);
 50:   VecGetOwnershipRange(x,&rstart,&rend);
 51:   ng   = rend - rstart + 2;
 52:   PetscMalloc1(ng,&gindices);
 53:   gindices[0] = rstart - 1;
 54:   for (i=0; i<ng-1; i++) gindices[i+1] = gindices[i] + 1;
 55:   /* map the first and last point as periodic */
 56:   if (gindices[0]    == -1) gindices[0]    = M - 1;
 57:   if (gindices[ng-1] == M)  gindices[ng-1] = 0;
 58:   {
 59:     ISLocalToGlobalMapping ltog;
 60:     ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,1,ng,gindices,PETSC_COPY_VALUES,&ltog);
 61:     VecSetLocalToGlobalMapping(x,ltog);
 62:     ISLocalToGlobalMappingDestroy(&ltog);
 63:   }
 64:   PetscFree(gindices);

 66:   /*
 67:      Set the vector elements.
 68:       - In this case set the values using the local ordering
 69:       - Each processor can contribute any vector entries,
 70:         regardless of which processor "owns" them; any nonlocal
 71:         contributions will be transferred to the appropriate processor
 72:         during the assembly process.
 73:       - In this example, the flag ADD_VALUES indicates that all
 74:         contributions will be added together.
 75:   */
 76:   for (i=0; i<ng; i++) {
 77:     VecSetValuesLocal(x,1,&i,&one,ADD_VALUES);
 78:   }

 80:   /*
 81:      Assemble vector, using the 2-step process:
 82:        VecAssemblyBegin(), VecAssemblyEnd()
 83:      Computations can be done while messages are in transition
 84:      by placing code between these two statements.
 85:   */
 86:   VecAssemblyBegin(x);
 87:   VecAssemblyEnd(x);

 89:   /*
 90:       View the vector; then destroy it.
 91:   */
 92:   VecView(x,PETSC_VIEWER_STDOUT_WORLD);
 93:   VecDestroy(&x);

 95:   PetscFinalize();
 96:   return ierr;
 97: }

 99: /*TEST

101:      test:
102:        nsize: 4

104: TEST*/