Actual source code: ex96.c
petsc-3.10.5 2019-03-28
2: static char help[] ="Tests sequential and parallel DMCreateMatrix(), MatMatMult() and MatPtAP()\n\
3: -Mx <xg>, where <xg> = number of coarse grid points in the x-direction\n\
4: -My <yg>, where <yg> = number of coarse grid points in the y-direction\n\
5: -Mz <zg>, where <zg> = number of coarse grid points in the z-direction\n\
6: -Npx <npx>, where <npx> = number of processors in the x-direction\n\
7: -Npy <npy>, where <npy> = number of processors in the y-direction\n\
8: -Npz <npz>, where <npz> = number of processors in the z-direction\n\n";
10: /*
11: This test is modified from ~src/ksp/examples/tests/ex19.c.
12: Example of usage: mpiexec -n 3 ./ex96 -Mx 10 -My 10 -Mz 10
13: */
15: #include <petscdm.h>
16: #include <petscdmda.h>
18: /* User-defined application contexts */
19: typedef struct {
20: PetscInt mx,my,mz; /* number grid points in x, y and z direction */
21: Vec localX,localF; /* local vectors with ghost region */
22: DM da;
23: Vec x,b,r; /* global vectors */
24: Mat J; /* Jacobian on grid */
25: } GridCtx;
26: typedef struct {
27: GridCtx fine;
28: GridCtx coarse;
29: PetscInt ratio;
30: Mat Ii; /* interpolation from coarse to fine */
31: } AppCtx;
33: #define COARSE_LEVEL 0
34: #define FINE_LEVEL 1
36: /*
37: Mm_ratio - ration of grid lines between fine and coarse grids.
38: */
39: int main(int argc,char **argv)
40: {
42: AppCtx user;
43: PetscInt Npx=PETSC_DECIDE,Npy=PETSC_DECIDE,Npz=PETSC_DECIDE;
44: PetscMPIInt size,rank;
45: PetscInt m,n,M,N,i,nrows;
46: PetscScalar one = 1.0;
47: PetscReal fill=2.0;
48: Mat A,A_tmp,P,C,C1,C2;
49: PetscScalar *array,none = -1.0,alpha;
50: Vec x,v1,v2,v3,v4;
51: PetscReal norm,norm_tmp,norm_tmp1,tol=100.*PETSC_MACHINE_EPSILON;
52: PetscRandom rdm;
53: PetscBool Test_MatMatMult=PETSC_TRUE,Test_MatPtAP=PETSC_TRUE,Test_3D=PETSC_FALSE,flg;
54: const PetscInt *ia,*ja;
56: PetscInitialize(&argc,&argv,NULL,help);if (ierr) return ierr;
57: PetscOptionsGetReal(NULL,NULL,"-tol",&tol,NULL);
59: user.ratio = 2;
60: user.coarse.mx = 2; user.coarse.my = 2; user.coarse.mz = 0;
62: PetscOptionsGetInt(NULL,NULL,"-Mx",&user.coarse.mx,NULL);
63: PetscOptionsGetInt(NULL,NULL,"-My",&user.coarse.my,NULL);
64: PetscOptionsGetInt(NULL,NULL,"-Mz",&user.coarse.mz,NULL);
65: PetscOptionsGetInt(NULL,NULL,"-ratio",&user.ratio,NULL);
67: if (user.coarse.mz) Test_3D = PETSC_TRUE;
69: user.fine.mx = user.ratio*(user.coarse.mx-1)+1;
70: user.fine.my = user.ratio*(user.coarse.my-1)+1;
71: user.fine.mz = user.ratio*(user.coarse.mz-1)+1;
73: MPI_Comm_size(PETSC_COMM_WORLD,&size);
74: MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
75: PetscOptionsGetInt(NULL,NULL,"-Npx",&Npx,NULL);
76: PetscOptionsGetInt(NULL,NULL,"-Npy",&Npy,NULL);
77: PetscOptionsGetInt(NULL,NULL,"-Npz",&Npz,NULL);
79: /* Set up distributed array for fine grid */
80: if (!Test_3D) {
81: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,user.fine.my,Npx,Npy,1,1,NULL,NULL,&user.fine.da);
82: } else {
83: DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.fine.mx,user.fine.my,user.fine.mz,Npx,Npy,Npz,1,1,NULL,NULL,NULL,&user.fine.da);
84: }
85: DMSetFromOptions(user.fine.da);
86: DMSetUp(user.fine.da);
88: /* Test DMCreateMatrix() */
89: /*------------------------------------------------------------*/
90: DMSetMatType(user.fine.da,MATAIJ);
91: DMCreateMatrix(user.fine.da,&A);
92: DMSetMatType(user.fine.da,MATBAIJ);
93: DMCreateMatrix(user.fine.da,&C);
95: MatConvert(C,MATAIJ,MAT_INITIAL_MATRIX,&A_tmp); /* not work for mpisbaij matrix! */
96: MatEqual(A,A_tmp,&flg);
97: if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NOTSAMETYPE,"A != C");
98: MatDestroy(&C);
99: MatDestroy(&A_tmp);
101: /*------------------------------------------------------------*/
103: MatGetLocalSize(A,&m,&n);
104: MatGetSize(A,&M,&N);
105: /* set val=one to A */
106: if (size == 1) {
107: MatGetRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
108: if (flg) {
109: MatSeqAIJGetArray(A,&array);
110: for (i=0; i<ia[nrows]; i++) array[i] = one;
111: MatSeqAIJRestoreArray(A,&array);
112: }
113: MatRestoreRowIJ(A,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
114: } else {
115: Mat AA,AB;
116: MatMPIAIJGetSeqAIJ(A,&AA,&AB,NULL);
117: MatGetRowIJ(AA,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
118: if (flg) {
119: MatSeqAIJGetArray(AA,&array);
120: for (i=0; i<ia[nrows]; i++) array[i] = one;
121: MatSeqAIJRestoreArray(AA,&array);
122: }
123: MatRestoreRowIJ(AA,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
124: MatGetRowIJ(AB,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
125: if (flg) {
126: MatSeqAIJGetArray(AB,&array);
127: for (i=0; i<ia[nrows]; i++) array[i] = one;
128: MatSeqAIJRestoreArray(AB,&array);
129: }
130: MatRestoreRowIJ(AB,0,PETSC_FALSE,PETSC_FALSE,&nrows,&ia,&ja,&flg);
131: }
132: /* MatView(A, PETSC_VIEWER_STDOUT_WORLD); */
134: /* Set up distributed array for coarse grid */
135: if (!Test_3D) {
136: DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,user.coarse.my,Npx,Npy,1,1,NULL,NULL,&user.coarse.da);
137: } else {
138: DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,user.coarse.mx,user.coarse.my,user.coarse.mz,Npx,Npy,Npz,
139: 1,1,NULL,NULL,NULL,&user.coarse.da);
140: }
141: DMSetFromOptions(user.coarse.da);
142: DMSetUp(user.coarse.da);
144: /* Create interpolation between the levels */
145: DMCreateInterpolation(user.coarse.da,user.fine.da,&P,NULL);
147: MatGetLocalSize(P,&m,&n);
148: MatGetSize(P,&M,&N);
150: /* Create vectors v1 and v2 that are compatible with A */
151: VecCreate(PETSC_COMM_WORLD,&v1);
152: MatGetLocalSize(A,&m,NULL);
153: VecSetSizes(v1,m,PETSC_DECIDE);
154: VecSetFromOptions(v1);
155: VecDuplicate(v1,&v2);
156: PetscRandomCreate(PETSC_COMM_WORLD,&rdm);
157: PetscRandomSetFromOptions(rdm);
159: /* Test MatMatMult(): C = A*P */
160: /*----------------------------*/
161: if (Test_MatMatMult) {
162: MatDuplicate(A,MAT_COPY_VALUES,&A_tmp);
163: MatMatMult(A_tmp,P,MAT_INITIAL_MATRIX,fill,&C);
165: /* Test MAT_REUSE_MATRIX - reuse symbolic C */
166: alpha=1.0;
167: for (i=0; i<2; i++) {
168: alpha -=0.1;
169: MatScale(A_tmp,alpha);
170: MatMatMult(A_tmp,P,MAT_REUSE_MATRIX,fill,&C);
171: }
173: /* Test MatDuplicate() */
174: /*----------------------------*/
175: MatDuplicate(C,MAT_COPY_VALUES,&C1);
176: MatDuplicate(C1,MAT_COPY_VALUES,&C2);
177: MatDestroy(&C1);
178: MatDestroy(&C2);
180: /* Create vector x that is compatible with P */
181: VecCreate(PETSC_COMM_WORLD,&x);
182: MatGetLocalSize(P,NULL,&n);
183: VecSetSizes(x,n,PETSC_DECIDE);
184: VecSetFromOptions(x);
186: norm = 0.0;
187: for (i=0; i<10; i++) {
188: VecSetRandom(x,rdm);
189: MatMult(P,x,v1);
190: MatMult(A_tmp,v1,v2); /* v2 = A*P*x */
191: MatMult(C,x,v1); /* v1 = C*x */
192: VecAXPY(v1,none,v2);
193: VecNorm(v1,NORM_1,&norm_tmp);
194: VecNorm(v2,NORM_1,&norm_tmp1);
195: norm_tmp /= norm_tmp1;
196: if (norm_tmp > norm) norm = norm_tmp;
197: }
198: if (norm >= tol && !rank) {
199: PetscPrintf(PETSC_COMM_SELF,"Error: MatMatMult(), |v1 - v2|/|v2|: %g\n",(double)norm);
200: }
202: VecDestroy(&x);
203: MatDestroy(&C);
204: MatDestroy(&A_tmp);
205: }
207: /* Test P^T * A * P - MatPtAP() */
208: /*------------------------------*/
209: if (Test_MatPtAP) {
210: MatPtAP(A,P,MAT_INITIAL_MATRIX,fill,&C);
211: MatGetLocalSize(C,&m,&n);
213: /* Test MAT_REUSE_MATRIX - reuse symbolic C */
214: alpha=1.0;
215: for (i=0; i<1; i++) {
216: alpha -=0.1;
217: MatScale(A,alpha);
218: MatPtAP(A,P,MAT_REUSE_MATRIX,fill,&C);
219: }
221: /* Test MatDuplicate() */
222: /*----------------------------*/
223: MatDuplicate(C,MAT_COPY_VALUES,&C1);
224: MatDuplicate(C1,MAT_COPY_VALUES,&C2);
225: MatDestroy(&C1);
226: MatDestroy(&C2);
228: /* Create vector x that is compatible with P */
229: VecCreate(PETSC_COMM_WORLD,&x);
230: MatGetLocalSize(P,&m,&n);
231: VecSetSizes(x,n,PETSC_DECIDE);
232: VecSetFromOptions(x);
234: VecCreate(PETSC_COMM_WORLD,&v3);
235: VecSetSizes(v3,n,PETSC_DECIDE);
236: VecSetFromOptions(v3);
237: VecDuplicate(v3,&v4);
239: norm = 0.0;
240: for (i=0; i<10; i++) {
241: VecSetRandom(x,rdm);
242: MatMult(P,x,v1);
243: MatMult(A,v1,v2); /* v2 = A*P*x */
245: MatMultTranspose(P,v2,v3); /* v3 = Pt*A*P*x */
246: MatMult(C,x,v4); /* v3 = C*x */
247: VecAXPY(v4,none,v3);
248: VecNorm(v4,NORM_1,&norm_tmp);
249: VecNorm(v3,NORM_1,&norm_tmp1);
251: norm_tmp /= norm_tmp1;
252: if (norm_tmp > norm) norm = norm_tmp;
253: }
254: if (norm >= tol && !rank) {
255: PetscPrintf(PETSC_COMM_SELF,"Error: MatPtAP(), |v3 - v4|/|v3|: %g\n",(double)norm);
256: }
257: MatDestroy(&C);
258: VecDestroy(&v3);
259: VecDestroy(&v4);
260: VecDestroy(&x);
261: }
263: /* Clean up */
264: MatDestroy(&A);
265: PetscRandomDestroy(&rdm);
266: VecDestroy(&v1);
267: VecDestroy(&v2);
268: DMDestroy(&user.fine.da);
269: DMDestroy(&user.coarse.da);
270: MatDestroy(&P);
271: PetscFinalize();
272: return ierr;
273: }
276: /*TEST
278: test:
279: nsize: 3
280: args: -Mx 10 -My 5
282: TEST*/