Actual source code: ex16.c
petsc-3.14.6 2021-03-30
1: static char help[] = "Tests MatDenseGetArray() and MatView()/MatLoad() with binary viewers.\n\n";
3: #include <petscmat.h>
4: #include <petscviewer.h>
6: static PetscErrorCode CheckValues(Mat A,PetscBool one)
7: {
8: const PetscScalar *array;
9: PetscInt M,N,rstart,rend,lda,i,j;
10: PetscErrorCode ierr;
13: MatDenseGetArrayRead(A,&array);
14: MatDenseGetLDA(A,&lda);
15: MatGetSize(A,&M,&N);
16: MatGetOwnershipRange(A,&rstart,&rend);
17: for (i=rstart; i<rend; i++) {
18: for (j=0; j<N; j++) {
19: PetscInt ii = i - rstart, jj = j;
20: PetscReal v = (PetscReal)(one ? 1 : (1 + i + j*M));
21: PetscReal w = PetscRealPart(array[ii + jj*lda]);
22: if (PetscAbsReal(v-w) > 0) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Matrix entry (%D,%D) should be %g, got %g",i,j,(double)v,(double)w);
23: }
24: }
25: MatDenseRestoreArrayRead(A,&array);
26: return(0);
27: }
29: #define CheckValuesIJ(A) CheckValues(A,PETSC_FALSE)
30: #define CheckValuesOne(A) CheckValues(A,PETSC_TRUE)
32: int main(int argc,char **args)
33: {
34: Mat A;
35: PetscInt i,j,M = 4,N = 3,rstart,rend;
37: PetscScalar *array;
38: char mattype[256];
39: PetscViewer view;
41: PetscInitialize(&argc,&args,NULL,help);if (ierr) return ierr;
42: PetscStrcpy(mattype,MATMPIDENSE);
43: PetscOptionsGetString(NULL,NULL,"-mat_type",mattype,sizeof(mattype),NULL);
44: /*
45: Create a parallel dense matrix shared by all processors
46: */
47: MatCreateDense(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,M,N,NULL,&A);
48: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
49: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
50: MatConvert(A,mattype,MAT_INPLACE_MATRIX,&A);
51: /*
52: Set values into the matrix
53: */
54: for (i=0; i<M; i++) {
55: for (j=0; j<N; j++) {
56: PetscScalar v = (PetscReal)(1 + i + j*M);
57: MatSetValues(A,1,&i,1,&j,&v,INSERT_VALUES);
58: }
59: }
60: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
61: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
62: MatScale(A,2.0);
63: MatScale(A,1.0/2.0);
65: /*
66: Store the binary matrix to a file
67: */
68: PetscViewerBinaryOpen(PETSC_COMM_WORLD, "matrix.dat", FILE_MODE_WRITE, &view);
69: for (i=0; i<2; i++) {
70: MatView(A,view);
71: PetscViewerPushFormat(view,PETSC_VIEWER_NATIVE);
72: MatView(A,view);
73: PetscViewerPopFormat(view);
74: }
75: PetscViewerDestroy(&view);
76: MatDestroy(&A);
78: /*
79: Now reload the matrix and check its values
80: */
81: PetscViewerBinaryOpen(PETSC_COMM_WORLD,"matrix.dat",FILE_MODE_READ,&view);
82: MatCreate(PETSC_COMM_WORLD,&A);
83: MatSetType(A,mattype);
84: for (i=0; i<4; i++) {
85: if (i > 0) {MatZeroEntries(A);}
86: MatLoad(A,view);
87: CheckValuesIJ(A);
88: }
89: PetscViewerDestroy(&view);
91: MatGetOwnershipRange(A,&rstart,&rend);
92: PetscMalloc1((rend-rstart)*N,&array);
93: for (i=0; i<(rend-rstart)*N; i++) array[i] = (PetscReal)1;
94: MatDensePlaceArray(A,array);
95: MatScale(A,2.0);
96: MatScale(A,1.0/2.0);
97: CheckValuesOne(A);
98: PetscViewerBinaryOpen(PETSC_COMM_WORLD,"matrix.dat",FILE_MODE_WRITE,&view);
99: MatView(A,view);
100: MatDenseResetArray(A);
101: PetscFree(array);
102: CheckValuesIJ(A);
103: PetscViewerBinarySetSkipHeader(view,PETSC_TRUE);
104: MatView(A,view);
105: PetscViewerBinarySetSkipHeader(view,PETSC_FALSE);
106: PetscViewerDestroy(&view);
107: MatDestroy(&A);
109: MatCreate(PETSC_COMM_WORLD,&A);
110: MatSetType(A,mattype);
111: PetscViewerBinaryOpen(PETSC_COMM_WORLD,"matrix.dat",FILE_MODE_READ,&view);
112: MatLoad(A,view);
113: CheckValuesOne(A);
114: MatZeroEntries(A);
115: PetscViewerBinarySetSkipHeader(view,PETSC_TRUE);
116: MatLoad(A,view);
117: PetscViewerBinarySetSkipHeader(view,PETSC_FALSE);
118: CheckValuesIJ(A);
119: PetscViewerDestroy(&view);
120: MatDestroy(&A);
122: {
123: PetscInt m = PETSC_DECIDE, n = PETSC_DECIDE;
124: PetscSplitOwnership(PETSC_COMM_WORLD,&m,&M);
125: PetscSplitOwnership(PETSC_COMM_WORLD,&n,&N);
126: /* TODO: MatCreateDense requires data!=NULL at all processes! */
127: PetscMalloc1(m*N+1,&array);
129: PetscViewerBinaryOpen(PETSC_COMM_WORLD,"matrix.dat",FILE_MODE_READ,&view);
130: MatCreateDense(PETSC_COMM_WORLD,m,n,M,N,array,&A);
131: MatLoad(A,view);
132: CheckValuesOne(A);
133: PetscViewerBinarySetSkipHeader(view,PETSC_TRUE);
134: MatLoad(A,view);
135: PetscViewerBinarySetSkipHeader(view,PETSC_FALSE);
136: CheckValuesIJ(A);
137: MatDestroy(&A);
138: PetscViewerDestroy(&view);
140: MatCreateDense(PETSC_COMM_WORLD,m,n,M,N,array,&A);
141: CheckValuesIJ(A);
142: MatDestroy(&A);
144: PetscFree(array);
145: }
147: PetscFinalize();
148: return ierr;
149: }
152: /*TEST
154: testset:
155: args: -viewer_binary_mpiio 0
156: output_file: output/ex16.out
157: test:
158: suffix: stdio_1
159: nsize: 1
160: args: -mat_type seqdense
161: test:
162: suffix: stdio_2
163: nsize: 2
164: test:
165: suffix: stdio_3
166: nsize: 3
167: test:
168: suffix: stdio_4
169: nsize: 4
170: test:
171: suffix: stdio_5
172: nsize: 5
173: test:
174: requires: cuda
175: args: -mat_type seqdensecuda
176: suffix: stdio_cuda_1
177: nsize: 1
178: test:
179: requires: cuda
180: args: -mat_type mpidensecuda
181: suffix: stdio_cuda_2
182: nsize: 2
183: test:
184: requires: cuda
185: args: -mat_type mpidensecuda
186: suffix: stdio_cuda_3
187: nsize: 3
188: test:
189: requires: cuda
190: args: -mat_type mpidensecuda
191: suffix: stdio_cuda_4
192: nsize: 4
193: test:
194: requires: cuda
195: args: -mat_type mpidensecuda
196: suffix: stdio_cuda_5
197: nsize: 5
199: testset:
200: requires: mpiio
201: args: -viewer_binary_mpiio 1
202: output_file: output/ex16.out
203: test:
204: suffix: mpiio_1
205: nsize: 1
206: test:
207: suffix: mpiio_2
208: nsize: 2
209: test:
210: suffix: mpiio_3
211: nsize: 3
212: test:
213: suffix: mpiio_4
214: nsize: 4
215: test:
216: suffix: mpiio_5
217: nsize: 5
218: test:
219: requires: cuda
220: args: -mat_type mpidensecuda
221: suffix: mpiio_cuda_1
222: nsize: 1
223: test:
224: requires: cuda
225: args: -mat_type mpidensecuda
226: suffix: mpiio_cuda_2
227: nsize: 2
228: test:
229: requires: cuda
230: args: -mat_type mpidensecuda
231: suffix: mpiio_cuda_3
232: nsize: 3
233: test:
234: requires: cuda
235: args: -mat_type mpidensecuda
236: suffix: mpiio_cuda_4
237: nsize: 4
238: test:
239: requires: cuda
240: args: -mat_type mpidensecuda
241: suffix: mpiio_cuda_5
242: nsize: 5
244: TEST*/