Actual source code: ex134.c
petsc-3.14.6 2021-03-30
1: static const char help[] = "Test parallel assembly of SBAIJ matrices\n\n";
3: #include <petscmat.h>
5: PetscErrorCode Assemble(MPI_Comm comm,PetscInt bs,MatType mtype)
6: {
7: const PetscInt rc[] = {0,1,2,3};
8: const PetscScalar vals[] = {100, 2, 3, 4, 5, 600, 7, 8,
9: 9,100,11,1200,13,14,15,1600,
10: 17,18,19,20,21,22,23,24,
11: 25,26,27,2800,29,30,31,32,
12: 33,34,35,36,37,38,39,40,
13: 41,42,43,44,45,46,47,48,
14: 49,50,51,52,53,54,55,56,
15: 57,58,49,60,61,62,63,64};
16: Mat A;
17: #if defined(PETSC_HAVE_MUMPS) || defined(PETSC_HAVE_MKL_CPARDISO)
18: Mat F;
19: MatSolverType stype;
20: PetscRandom rdm;
21: Vec b,x,y;
22: PetscInt i,j;
23: PetscReal norm2,tol=100*PETSC_SMALL;
24: PetscBool issbaij;
25: #endif
26: PetscViewer viewer;
27: PetscErrorCode ierr;
30: MatCreate(comm,&A);
31: MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,4*bs,4*bs);
32: MatSetType(A,mtype);
33: MatMPIBAIJSetPreallocation(A,bs,2,NULL,2,NULL);
34: MatMPISBAIJSetPreallocation(A,bs,2,NULL,2,NULL);
35: MatSetOption(A,MAT_NEW_NONZERO_ALLOCATION_ERR,PETSC_TRUE);
36: /* All processes contribute a global matrix */
37: MatSetValuesBlocked(A,4,rc,4,rc,vals,ADD_VALUES);
38: MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);
39: MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);
40: PetscPrintf(comm,"Matrix %s(%D)\n",mtype,bs);
41: PetscViewerASCIIGetStdout(comm,&viewer);
42: PetscViewerPushFormat(viewer,PETSC_VIEWER_ASCII_INFO_DETAIL);
43: MatView(A,viewer);
44: PetscViewerPopFormat(viewer);
45: MatView(A,viewer);
46: #if defined(PETSC_HAVE_MUMPS) || defined(PETSC_HAVE_MKL_CPARDISO)
47: PetscStrcmp(mtype,MATMPISBAIJ,&issbaij);
48: if (!issbaij) {
49: MatShift(A,10);
50: }
51: PetscRandomCreate(PETSC_COMM_WORLD,&rdm);
52: PetscRandomSetFromOptions(rdm);
53: MatCreateVecs(A,&x,&y);
54: VecDuplicate(x,&b);
55: for (j=0; j<2; j++) {
56: #if defined(PETSC_HAVE_MUMPS)
57: if (j==0) stype = MATSOLVERMUMPS;
58: #else
59: if (j==0) continue;
60: #endif
61: #if defined(PETSC_HAVE_MKL_CPARDISO)
62: if (j==1) stype = MATSOLVERMKL_CPARDISO;
63: #else
64: if (j==1) continue;
65: #endif
66: if (issbaij) {
67: MatGetFactor(A,stype,MAT_FACTOR_CHOLESKY,&F);
68: MatCholeskyFactorSymbolic(F,A,NULL,NULL);
69: MatCholeskyFactorNumeric(F,A,NULL);
70: } else {
71: MatGetFactor(A,stype,MAT_FACTOR_LU,&F);
72: MatLUFactorSymbolic(F,A,NULL,NULL,NULL);
73: MatLUFactorNumeric(F,A,NULL);
74: }
75: for (i=0; i<10; i++) {
76: VecSetRandom(b,rdm);
77: MatSolve(F,b,y);
78: /* Check the error */
79: MatMult(A,y,x);
80: VecAXPY(x,-1.0,b);
81: VecNorm(x,NORM_2,&norm2);
82: if (norm2>tol) {
83: PetscPrintf(PETSC_COMM_WORLD,"Error:MatSolve(), norm2: %g\n",(double)norm2);
84: }
85: }
86: MatDestroy(&F);
87: }
88: VecDestroy(&x);
89: VecDestroy(&y);
90: VecDestroy(&b);
91: PetscRandomDestroy(&rdm);
92: #endif
93: MatDestroy(&A);
94: return(0);
95: }
97: int main(int argc,char *argv[])
98: {
100: MPI_Comm comm;
101: PetscMPIInt size;
103: PetscInitialize(&argc,&argv,NULL,help);if (ierr) return ierr;
104: comm = PETSC_COMM_WORLD;
105: MPI_Comm_size(comm,&size);
106: if (size != 2) SETERRQ(comm,PETSC_ERR_USER,"This example must be run with exactly two processes");
107: Assemble(comm,2,MATMPIBAIJ);
108: Assemble(comm,2,MATMPISBAIJ);
109: Assemble(comm,1,MATMPIBAIJ);
110: Assemble(comm,1,MATMPISBAIJ);
111: PetscFinalize();
112: return ierr;
113: }
116: /*TEST
118: test:
119: nsize: 2
120: args: -mat_ignore_lower_triangular -vecscatter_type sf
121: filter: sed -e "s~mem [0-9]*~mem~g"
123: TEST*/