Actual source code: ex81.c

  1: #include <petsc.h>

  3: static char help[] = "Solves a linear system with a MatNest and nested fields.\n\n";

  5: #define Q 5 /* everything is hardwired for a 5x5 MatNest for now */

  7: int main(int argc,char **args)
  8: {
  9:   KSP                ksp;
 10:   PC                 pc;
 11:   Mat                array[Q*Q],A,a;
 12:   Vec                b,x,sub;
 13:   IS                 rows[Q];
 14:   PetscInt           i,j,*outer,M,N,found=Q;
 15:   PetscMPIInt        size;
 16:   PetscBool          flg;
 17:   PetscRandom        rctx;
 18:   PetscErrorCode     ierr;

 20:   PetscInitialize(&argc,&args,NULL,help);if (ierr) return ierr;
 21:   MPI_Comm_size(PETSC_COMM_WORLD,&size);
 22:   PetscMalloc1(found,&outer);
 23:   PetscOptionsGetIntArray(NULL,NULL,"-outer_fieldsplit_sizes",outer,&found,&flg);
 24:   PetscRandomCreate(PETSC_COMM_WORLD,&rctx);
 25:   if (flg) {
 26:     if (found == 1) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_USER,"Must supply more than one field");
 27:     j = 0;
 28:     for (i=0; i<found; ++i) j += outer[i];
 29:     if (j != Q) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_USER,"Sum of outer fieldsplit sizes (%D) greater than number of blocks in MatNest (%D)",j,Q);
 30:   }
 31:   KSPCreate(PETSC_COMM_WORLD,&ksp);
 32:   size = PetscMax(3,size);
 33:   for (i=0; i<Q*Q; ++i) array[i] = NULL;
 34:   for (i=0; i<Q; ++i) {
 35:     if (i == 0) {
 36:       MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,size,size,1,NULL,0,NULL,array+(Q+1)*i);
 37:     } else if (i == 1 || i == 3) {
 38:       MatCreateSBAIJ(PETSC_COMM_WORLD,2,PETSC_DECIDE,PETSC_DECIDE,size,size,1,NULL,0,NULL,array+(Q+1)*i);
 39:     } else if (i == 2 || i == 4) {
 40:       MatCreateBAIJ(PETSC_COMM_WORLD,2,PETSC_DECIDE,PETSC_DECIDE,size,size,1,NULL,0,NULL,array+(Q+1)*i);
 41:     }
 42:     MatAssemblyBegin(array[(Q+1)*i],MAT_FINAL_ASSEMBLY);
 43:     MatAssemblyEnd(array[(Q+1)*i],MAT_FINAL_ASSEMBLY);
 44:     MatShift(array[(Q+1)*i],100+i+1);
 45:     if (i == 3) {
 46:       MatDuplicate(array[(Q+1)*i],MAT_COPY_VALUES,&a);
 47:       MatDestroy(array+(Q+1)*i);
 48:       MatCreateHermitianTranspose(a,array+(Q+1)*i);
 49:       MatDestroy(&a);
 50:     }
 51:     size *= 2;
 52:   }
 53:   MatGetSize(array[0],&M,NULL);
 54:   for (i=2; i<Q; ++i) {
 55:     MatGetSize(array[(Q+1)*i],NULL,&N);
 56:     if (i != Q-1) {
 57:       MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,i==3?N:M,i==3?M:N,0,NULL,0,NULL,array+i);
 58:     } else {
 59:       MatCreateDense(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,M,N,NULL,array+i);
 60:     }
 61:     MatAssemblyBegin(array[i],MAT_FINAL_ASSEMBLY);
 62:     MatAssemblyEnd(array[i],MAT_FINAL_ASSEMBLY);
 63:     MatSetRandom(array[i],rctx);
 64:     if (i == 3) {
 65:       MatDuplicate(array[i],MAT_COPY_VALUES,&a);
 66:       MatDestroy(array+i);
 67:       MatCreateHermitianTranspose(a,array+i);
 68:       MatDestroy(&a);
 69:     }
 70:   }
 71:   MatGetSize(array[0],NULL,&N);
 72:   for (i=2; i<Q; i+=2) {
 73:     MatGetSize(array[(Q+1)*i],&M,NULL);
 74:     if (i != Q-1) {
 75:       MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,M,N,2,NULL,2,NULL,array+Q*i);
 76:     } else {
 77:       MatCreateDense(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,N,M,NULL,array+Q*i);
 78:     }
 79:     MatAssemblyBegin(array[Q*i],MAT_FINAL_ASSEMBLY);
 80:     MatAssemblyEnd(array[Q*i],MAT_FINAL_ASSEMBLY);
 81:     MatSetRandom(array[Q*i],rctx);
 82:     if (i == Q-1) {
 83:       MatDuplicate(array[Q*i],MAT_COPY_VALUES,&a);
 84:       MatDestroy(array+Q*i);
 85:       MatCreateHermitianTranspose(a,array+Q*i);
 86:       MatDestroy(&a);
 87:     }
 88:   }
 89:   MatGetSize(array[(Q+1)*3],&M,NULL);
 90:   for (i=1; i<3; ++i) {
 91:     MatGetSize(array[(Q+1)*i],NULL,&N);
 92:     MatCreateAIJ(PETSC_COMM_WORLD,PETSC_DECIDE,PETSC_DECIDE,M,N,2,NULL,2,NULL,array+Q*3+i);
 93:     MatAssemblyBegin(array[Q*3+i],MAT_FINAL_ASSEMBLY);
 94:     MatAssemblyEnd(array[Q*3+i],MAT_FINAL_ASSEMBLY);
 95:     MatSetRandom(array[Q*3+i],rctx);
 96:   }
 97:   MatGetSize(array[(Q+1)*1],NULL,&N);
 98:   MatGetSize(array[(Q+1)*(Q-1)],&M,NULL);
 99:   MatCreateBAIJ(PETSC_COMM_WORLD,2,PETSC_DECIDE,PETSC_DECIDE,M,N,0,NULL,0,NULL,&a);
100:   MatAssemblyBegin(a,MAT_FINAL_ASSEMBLY);
101:   MatAssemblyEnd(a,MAT_FINAL_ASSEMBLY);
102:   MatCreateHermitianTranspose(a,array+Q+Q-1);
103:   MatDestroy(&a);
104:   MatDestroy(array+Q*Q-1);
105:   MatCreateNest(PETSC_COMM_WORLD,Q,NULL,Q,NULL,array,&A);
106:   for (i=0; i<Q; ++i) {
107:     MatDestroy(array+(Q+1)*i);
108:   }
109:   for (i=2; i<Q; ++i) {
110:     MatDestroy(array+i);
111:     MatDestroy(array+Q*i);
112:   }
113:   for (i=1; i<3; ++i) {
114:     MatDestroy(array+Q*3+i);
115:   }
116:   MatDestroy(array+Q+Q-1);
117:   KSPSetOperators(ksp,A,A);
118:   MatNestGetISs(A,rows,NULL);
119:   KSPGetPC(ksp,&pc);
120:   PCSetType(pc,PCFIELDSPLIT);
121:   M = 0;
122:   for (j=0; j<found; ++j) {
123:     IS expand1,expand2;
124:     ISDuplicate(rows[M],&expand1);
125:     for (i=1; i<outer[j]; ++i) {
126:       ISExpand(expand1,rows[M+i],&expand2);
127:       ISDestroy(&expand1);
128:       expand1 = expand2;
129:     }
130:     M += outer[j];
131:     PCFieldSplitSetIS(pc,NULL,expand1);
132:     ISDestroy(&expand1);
133:   }
134:   KSPSetFromOptions(ksp);
135:   flg = PETSC_FALSE;
136:   PetscOptionsGetBool(NULL,NULL,"-test_matmult",&flg,NULL);
137:   if (flg) {
138:     Mat D, E, F, H;
139:     MatConvert(A,MATDENSE,MAT_INITIAL_MATRIX,&D);
140:     MatMultEqual(A,D,10,&flg);
141:     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatDense != MatNest");
142:     MatZeroEntries(D);
143:     MatConvert(A,MATDENSE,MAT_REUSE_MATRIX,&D);
144:     MatMultEqual(A,D,10,&flg);
145:     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatDense != MatNest");
146:     MatConvert(D,MATAIJ,MAT_INITIAL_MATRIX,&E);
147:     MatMultEqual(E,D,10,&flg);
148:     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatDense != MatAIJ");
149:     MatZeroEntries(E);
150:     MatConvert(D,MATAIJ,MAT_REUSE_MATRIX,&E);
151:     MatMultEqual(E,D,10,&flg);
152:     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatDense != MatAIJ");
153:     MatConvert(E,MATDENSE,MAT_INPLACE_MATRIX,&E);
154:     MatMultEqual(A,E,10,&flg);
155:     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatAIJ != MatNest");
156:     MatConvert(D,MATAIJ,MAT_INPLACE_MATRIX,&D);
157:     MatMultEqual(A,D,10,&flg);
158:     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatDense != MatNest");
159:     MatDestroy(&E);
160:     MatCreateHermitianTranspose(D,&E);
161:     MatConvert(E,MATAIJ,MAT_INITIAL_MATRIX,&F);
162:     MatMultEqual(E,F,10,&flg);
163:     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatHermitianTranspose != MatAIJ");
164:     MatZeroEntries(F);
165:     MatConvert(E,MATAIJ,MAT_REUSE_MATRIX,&F);
166:     MatMultEqual(E,F,10,&flg);
167:     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatHermitianTranspose != MatAIJ");
168:     MatDestroy(&F);
169:     MatConvert(E,MATAIJ,MAT_INPLACE_MATRIX,&E);
170:     MatCreateHermitianTranspose(D,&H);
171:     MatMultEqual(E,H,10,&flg);
172:     if (!flg) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"MatHermitianTranspose != MatAIJ");
173:     MatDestroy(&H);
174:     MatDestroy(&E);
175:     MatDestroy(&D);
176:   }
177:   KSPSetUp(ksp);
178:   MatCreateVecs(A,&b,&x);
179:   VecSetRandom(b,rctx);
180:   VecGetSubVector(b,rows[Q-1],&sub);
181:   VecSet(sub,0.0);
182:   VecRestoreSubVector(b,rows[Q-1],&sub);
183:   KSPSolve(ksp,b,x);
184:   VecDestroy(&b);
185:   VecDestroy(&x);
186:   PetscRandomDestroy(&rctx);
187:   MatDestroy(&A);
188:   KSPDestroy(&ksp);
189:   PetscFree(outer);
190:   PetscFinalize();
191:   return ierr;
192: }

194: /*TEST

196:    test:
197:       nsize: {{1 3}shared output}
198:       suffix: wo_explicit_schur
199:       filter: sed -e "s/seq/mpi/g" -e "s/CONVERGED_ATOL/CONVERGED_RTOL/g" -e "s/ 1 MPI processes/ 3 MPI processes/g" -e "s/iterations [4-5]/iterations 4/g"
200:       args: -outer_fieldsplit_sizes {{1,2,2 2,1,2 2,2,1}separate output} -ksp_view_mat -pc_type fieldsplit -ksp_converged_reason -fieldsplit_pc_type jacobi -test_matmult

202:    test:
203:       nsize: {{1 3}shared output}
204:       suffix: w_explicit_schur
205:       filter: sed -e "s/seq/mpi/g" -e "s/CONVERGED_ATOL/CONVERGED_RTOL/g" -e "s/ 1 MPI processes/ 3 MPI processes/g" -e "s/iterations [1-2]/iterations 1/g"
206:       args: -outer_fieldsplit_sizes {{1,4 2,3 3,2 4,1}separate output} -ksp_view_mat -pc_type fieldsplit -ksp_converged_reason -fieldsplit_pc_type jacobi -pc_fieldsplit_type schur -pc_fieldsplit_schur_fact_type full -pc_fieldsplit_schur_precondition full

208: TEST*/