Actual source code: ex9.c
petsc-3.9.4 2018-09-11
2: static char help[] = "Demonstrates use of VecCreateGhost().\n\n";
4: /*T
5: Concepts: vectors^assembling vectors;
6: Concepts: vectors^ghost padding;
7: Processors: n
9: Description: Ghost padding is one way to handle local calculations that
10: involve values from other processors. VecCreateGhost() provides
11: a way to create vectors with extra room at the end of the vector
12: array to contain the needed ghost values from other processors,
13: vector computations are otherwise unaffected.
14: T*/
16: /*
17: Include "petscvec.h" so that we can use vectors. Note that this file
18: automatically includes:
19: petscsys.h - base PETSc routines petscis.h - index sets
20: petscviewer.h - viewers
21: */
22: #include <petscvec.h>
24: int main(int argc,char **argv)
25: {
26: PetscMPIInt rank,size;
27: PetscInt nlocal = 6,nghost = 2,ifrom[2],i,rstart,rend;
29: PetscBool flg,flg2;
30: PetscScalar value,*array,*tarray=0;
31: Vec lx,gx,gxs;
33: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
34: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
35: MPI_Comm_size(PETSC_COMM_WORLD,&size);
36: if (size != 2) SETERRQ(PETSC_COMM_SELF,1,"Must run example with two processors\n");
38: /*
39: Construct a two dimensional graph connecting nlocal degrees of
40: freedom per processor. From this we will generate the global
41: indices of needed ghost values
43: For simplicity we generate the entire graph on each processor:
44: in real application the graph would stored in parallel, but this
45: example is only to demonstrate the management of ghost padding
46: with VecCreateGhost().
48: In this example we consider the vector as representing
49: degrees of freedom in a one dimensional grid with periodic
50: boundary conditions.
52: ----Processor 1--------- ----Processor 2 --------
53: 0 1 2 3 4 5 6 7 8 9 10 11
54: |----|
55: |-------------------------------------------------|
57: */
59: if (!rank) {
60: ifrom[0] = 11; ifrom[1] = 6;
61: } else {
62: ifrom[0] = 0; ifrom[1] = 5;
63: }
65: /*
66: Create the vector with two slots for ghost points. Note that both
67: the local vector (lx) and the global vector (gx) share the same
68: array for storing vector values.
69: */
70: PetscOptionsHasName(NULL,NULL,"-allocate",&flg);
71: PetscOptionsHasName(NULL,NULL,"-vecmpisetghost",&flg2);
72: if (flg) {
73: PetscMalloc1(nlocal+nghost,&tarray);
74: VecCreateGhostWithArray(PETSC_COMM_WORLD,nlocal,PETSC_DECIDE,nghost,ifrom,tarray,&gxs);
75: } else if (flg2) {
76: VecCreate(PETSC_COMM_WORLD,&gxs);
77: VecSetType(gxs,VECMPI);
78: VecSetSizes(gxs,nlocal,PETSC_DECIDE);
79: VecMPISetGhost(gxs,nghost,ifrom);
80: } else {
81: VecCreateGhost(PETSC_COMM_WORLD,nlocal,PETSC_DECIDE,nghost,ifrom,&gxs);
82: }
84: /*
85: Test VecDuplicate()
86: */
87: VecDuplicate(gxs,&gx);
88: VecDestroy(&gxs);
90: /*
91: Access the local representation
92: */
93: VecGhostGetLocalForm(gx,&lx);
95: /*
96: Set the values from 0 to 12 into the "global" vector
97: */
98: VecGetOwnershipRange(gx,&rstart,&rend);
99: for (i=rstart; i<rend; i++) {
100: value = (PetscScalar) i;
101: VecSetValues(gx,1,&i,&value,INSERT_VALUES);
102: }
103: VecAssemblyBegin(gx);
104: VecAssemblyEnd(gx);
106: VecGhostUpdateBegin(gx,INSERT_VALUES,SCATTER_FORWARD);
107: VecGhostUpdateEnd(gx,INSERT_VALUES,SCATTER_FORWARD);
109: /*
110: Print out each vector, including the ghost padding region.
111: */
112: VecGetArray(lx,&array);
113: for (i=0; i<nlocal+nghost; i++) {
114: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%D %g\n",i,(double)PetscRealPart(array[i]));
115: }
116: VecRestoreArray(lx,&array);
117: PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);
119: VecGhostRestoreLocalForm(gx,&lx);
120: VecDestroy(&gx);
121: if (flg) {PetscFree(tarray);}
122: PetscFinalize();
123: return ierr;
124: }
126: /*TEST
128: test:
129: nsize: 2
131: test:
132: suffix: 2
133: nsize: 2
134: args: -allocate
136: test:
137: suffix: 3
138: nsize: 2
139: args: -vecmpisetghost
141: TEST*/