Actual source code: ex9.c
petsc-3.13.6 2020-09-29
2: static char help[] = "Demonstrates use of VecCreateGhost().\n\n";
4: /*T
5: Concepts: vectors^assembling vectors;
6: Concepts: vectors^ghost padding;
7: Processors: n
9: Description: Ghost padding is one way to handle local calculations that
10: involve values from other processors. VecCreateGhost() provides
11: a way to create vectors with extra room at the end of the vector
12: array to contain the needed ghost values from other processors,
13: vector computations are otherwise unaffected.
14: T*/
16: /*
17: Include "petscvec.h" so that we can use vectors. Note that this file
18: automatically includes:
19: petscsys.h - base PETSc routines petscis.h - index sets
20: petscviewer.h - viewers
21: */
22: #include <petscvec.h>
24: int main(int argc,char **argv)
25: {
26: PetscMPIInt rank,size;
27: PetscInt nlocal = 6,nghost = 2,ifrom[2],i,rstart,rend;
29: PetscBool flg,flg2,flg3;
30: PetscScalar value,*array,*tarray=0;
31: Vec lx,gx,gxs;
33: PetscInitialize(&argc,&argv,(char*)0,help);if (ierr) return ierr;
34: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
35: MPI_Comm_size(PETSC_COMM_WORLD,&size);
36: if (size != 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_WRONG_MPI_SIZE,"Must run example with two processors\n");
38: /*
39: Construct a two dimensional graph connecting nlocal degrees of
40: freedom per processor. From this we will generate the global
41: indices of needed ghost values
43: For simplicity we generate the entire graph on each processor:
44: in real Section 1.5 Writing Application Codes with PETSc the graph would stored in parallel, but this
45: example is only to demonstrate the management of ghost padding
46: with VecCreateGhost().
48: In this example we consider the vector as representing
49: degrees of freedom in a one dimensional grid with periodic
50: boundary conditions.
52: ----Processor 1--------- ----Processor 2 --------
53: 0 1 2 3 4 5 6 7 8 9 10 11
54: |----|
55: |-------------------------------------------------|
57: */
59: if (!rank) {
60: ifrom[0] = 11; ifrom[1] = 6;
61: } else {
62: ifrom[0] = 0; ifrom[1] = 5;
63: }
65: /*
66: Create the vector with two slots for ghost points. Note that both
67: the local vector (lx) and the global vector (gx) share the same
68: array for storing vector values.
69: */
70: PetscOptionsHasName(NULL,NULL,"-allocate",&flg);
71: PetscOptionsHasName(NULL,NULL,"-vecmpisetghost",&flg2);
72: PetscOptionsHasName(NULL,NULL,"-minvalues",&flg3);
73: if (flg) {
74: PetscMalloc1(nlocal+nghost,&tarray);
75: VecCreateGhostWithArray(PETSC_COMM_WORLD,nlocal,PETSC_DECIDE,nghost,ifrom,tarray,&gxs);
76: } else if (flg2) {
77: VecCreate(PETSC_COMM_WORLD,&gxs);
78: VecSetType(gxs,VECMPI);
79: VecSetSizes(gxs,nlocal,PETSC_DECIDE);
80: VecMPISetGhost(gxs,nghost,ifrom);
81: } else {
82: VecCreateGhost(PETSC_COMM_WORLD,nlocal,PETSC_DECIDE,nghost,ifrom,&gxs);
83: }
85: /*
86: Test VecDuplicate()
87: */
88: VecDuplicate(gxs,&gx);
89: VecDestroy(&gxs);
91: /*
92: Access the local representation
93: */
94: VecGhostGetLocalForm(gx,&lx);
96: /*
97: Set the values from 0 to 12 into the "global" vector
98: */
99: VecGetOwnershipRange(gx,&rstart,&rend);
100: for (i=rstart; i<rend; i++) {
101: value = (PetscScalar) i;
102: VecSetValues(gx,1,&i,&value,INSERT_VALUES);
103: }
104: VecAssemblyBegin(gx);
105: VecAssemblyEnd(gx);
107: VecGhostUpdateBegin(gx,INSERT_VALUES,SCATTER_FORWARD);
108: VecGhostUpdateEnd(gx,INSERT_VALUES,SCATTER_FORWARD);
110: /*
111: Print out each vector, including the ghost padding region.
112: */
113: VecGetArray(lx,&array);
114: for (i=0; i<nlocal+nghost; i++) {
115: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%D %g\n",i,(double)PetscRealPart(array[i]));
116: }
117: VecRestoreArray(lx,&array);
118: PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);
119: VecGhostRestoreLocalForm(gx,&lx);
121: /* Another test that sets ghost values and then accumulates onto the owning processors using MIN_VALUES */
122: if (flg3) {
123: if (!rank){PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nTesting VecGhostUpdate with MIN_VALUES\n");}
124: VecGhostGetLocalForm(gx,&lx);
125: VecGetArray(lx,&array);
126: for (i=0; i<nghost; i++) array[nlocal+i] = rank ? (PetscScalar)4 : (PetscScalar)8;
127: VecRestoreArray(lx,&array);
128: VecGhostRestoreLocalForm(gx,&lx);
130: VecGhostUpdateBegin(gx,MIN_VALUES,SCATTER_REVERSE);
131: VecGhostUpdateEnd(gx,MIN_VALUES,SCATTER_REVERSE);
133: VecGhostGetLocalForm(gx,&lx);
134: VecGetArray(lx,&array);
136: for (i=0; i<nlocal+nghost; i++) {
137: PetscSynchronizedPrintf(PETSC_COMM_WORLD,"%D %g\n",i,(double)PetscRealPart(array[i]));
138: }
139: VecRestoreArray(lx,&array);
140: PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);
141: VecGhostRestoreLocalForm(gx,&lx);
142: }
144: VecDestroy(&gx);
146: if (flg) {PetscFree(tarray);}
147: PetscFinalize();
148: return ierr;
149: }
151: /*TEST
153: test:
154: nsize: 2
156: test:
157: suffix: 2
158: nsize: 2
159: args: -allocate
160: output_file: output/ex9_1.out
162: test:
163: suffix: 3
164: nsize: 2
165: args: -vecmpisetghost
166: output_file: output/ex9_1.out
168: test:
169: suffix: 4
170: nsize: 2
171: args: -minvalues
172: output_file: output/ex9_2.out
173: requires: !complex
175: TEST*/