Actual source code: telescope_coarsedm.c
petsc-3.11.4 2019-09-28
2: #include <petsc/private/matimpl.h>
3: #include <petsc/private/pcimpl.h>
4: #include <petsc/private/dmimpl.h>
5: #include <petscksp.h>
6: #include <petscdm.h>
7: #include <petscdmda.h>
8: #include <petscdmshell.h>
10: #include "../src/ksp/pc/impls/telescope/telescope.h"
12: static PetscBool cited = PETSC_FALSE;
13: static const char citation[] =
14: "@inproceedings{MaySananRuppKnepleySmith2016,\n"
15: " title = {Extreme-Scale Multigrid Components within PETSc},\n"
16: " author = {Dave A. May and Patrick Sanan and Karl Rupp and Matthew G. Knepley and Barry F. Smith},\n"
17: " booktitle = {Proceedings of the Platform for Advanced Scientific Computing Conference},\n"
18: " series = {PASC '16},\n"
19: " isbn = {978-1-4503-4126-4},\n"
20: " location = {Lausanne, Switzerland},\n"
21: " pages = {5:1--5:12},\n"
22: " articleno = {5},\n"
23: " numpages = {12},\n"
24: " url = {http://doi.acm.org/10.1145/2929908.2929913},\n"
25: " doi = {10.1145/2929908.2929913},\n"
26: " acmid = {2929913},\n"
27: " publisher = {ACM},\n"
28: " address = {New York, NY, USA},\n"
29: " keywords = {GPU, HPC, agglomeration, coarse-level solver, multigrid, parallel computing, preconditioning},\n"
30: " year = {2016}\n"
31: "}\n";
33: typedef struct {
34: DM dm_fine,dm_coarse; /* these DM's should be topologically identical but use different communicators */
35: Mat permutation;
36: Vec xp;
37: PetscErrorCode (*fp_dm_field_scatter)(DM,Vec,ScatterMode,DM,Vec);
38: PetscErrorCode (*fp_dm_state_scatter)(DM,ScatterMode,DM);
39: void *dmksp_context_determined;
40: void *dmksp_context_user;
41: } PC_Telescope_CoarseDMCtx;
44: PetscErrorCode PCTelescopeSetUp_scatters_CoarseDM(PC pc,PC_Telescope sred,PC_Telescope_CoarseDMCtx *ctx)
45: {
47: Vec xred,yred,xtmp,x,xp;
48: VecScatter scatter;
49: IS isin;
50: Mat B;
51: PetscInt m,bs,st,ed;
52: MPI_Comm comm;
55: PetscObjectGetComm((PetscObject)pc,&comm);
56: PCGetOperators(pc,NULL,&B);
57: MatCreateVecs(B,&x,NULL);
58: MatGetBlockSize(B,&bs);
59: VecDuplicate(x,&xp);
60: m = 0;
61: xred = NULL;
62: yred = NULL;
63: if (PCTelescope_isActiveRank(sred)) {
64: DMCreateGlobalVector(ctx->dm_coarse,&xred);
65: VecDuplicate(xred,&yred);
66: VecGetOwnershipRange(xred,&st,&ed);
67: ISCreateStride(comm,ed-st,st,1,&isin);
68: VecGetLocalSize(xred,&m);
69: } else {
70: VecGetOwnershipRange(x,&st,&ed);
71: ISCreateStride(comm,0,st,1,&isin);
72: }
73: ISSetBlockSize(isin,bs);
74: VecCreate(comm,&xtmp);
75: VecSetSizes(xtmp,m,PETSC_DECIDE);
76: VecSetBlockSize(xtmp,bs);
77: VecSetType(xtmp,((PetscObject)x)->type_name);
78: VecScatterCreate(x,isin,xtmp,NULL,&scatter);
79: sred->xred = xred;
80: sred->yred = yred;
81: sred->isin = isin;
82: sred->scatter = scatter;
83: sred->xtmp = xtmp;
84: ctx->xp = xp;
85: VecDestroy(&x);
86: return(0);
87: }
89: PetscErrorCode PCTelescopeSetUp_CoarseDM(PC pc,PC_Telescope sred)
90: {
91: PC_Telescope_CoarseDMCtx *ctx;
92: DM dm,dm_coarse = NULL;
93: MPI_Comm comm;
94: PetscBool has_perm,has_kspcomputeoperators,using_kspcomputeoperators;
95: PetscErrorCode ierr;
98: PetscInfo(pc,"PCTelescope: setup (CoarseDM)\n");
99: PetscMalloc1(1,&ctx);
100: PetscMemzero(ctx,sizeof(PC_Telescope_CoarseDMCtx));
101: sred->dm_ctx = (void*)ctx;
103: PetscObjectGetComm((PetscObject)pc,&comm);
104: PCGetDM(pc,&dm);
105: DMGetCoarseDM(dm,&dm_coarse);
106: ctx->dm_fine = dm;
107: ctx->dm_coarse = dm_coarse;
109: /* attach coarse dm to ksp on sub communicator */
110: if (PCTelescope_isActiveRank(sred)) {
111: KSPSetDM(sred->ksp,ctx->dm_coarse);
112: if (sred->ignore_kspcomputeoperators) {
113: KSPSetDMActive(sred->ksp,PETSC_FALSE);
114: }
115: }
117: /* check if there is a method to provide a permutation */
118: has_perm = PETSC_FALSE;
119: has_kspcomputeoperators = PETSC_FALSE;
120: using_kspcomputeoperators = PETSC_FALSE;
122: /* if no permutation is provided, we must rely on KSPSetComputeOperators */
123: {
124: PetscErrorCode (*dmfine_kspfunc)(KSP,Mat,Mat,void*) = NULL;
125: void *dmfine_kspctx = NULL,*dmcoarse_kspctx = NULL;
126: void *dmfine_appctx = NULL,*dmcoarse_appctx = NULL;
127: void *dmfine_shellctx = NULL,*dmcoarse_shellctx = NULL;
129: DMKSPGetComputeOperators(dm,&dmfine_kspfunc,&dmfine_kspctx);
130: if (dmfine_kspfunc) { has_kspcomputeoperators = PETSC_TRUE; }
132: DMGetApplicationContext(ctx->dm_fine,&dmfine_appctx);
133: DMShellGetContext(ctx->dm_fine,&dmfine_shellctx);
135: /* need to define dmcoarse_kspctx */
136: if (dmfine_kspfunc && !sred->ignore_kspcomputeoperators) {
138: PetscInfo(pc,"PCTelescope: KSPSetComputeOperators fetched from parent DM\n");
139: if (PCTelescope_isActiveRank(sred)) {
140: DMGetApplicationContext(ctx->dm_coarse,&dmcoarse_appctx);
141: DMShellGetContext(ctx->dm_coarse,&dmcoarse_shellctx);
142: }
144: /* Assume that if the fine operator didn't require any context, neither will the coarse */
145: if (!dmfine_kspctx) {
146: dmcoarse_kspctx = NULL;
147: PetscInfo(pc,"PCTelescope: KSPSetComputeOperators using NULL context\n");
148: } else {
150: PetscInfo(pc,"PCTelescope: KSPSetComputeOperators detected non-NULL context from parent DM \n");
151: if (PCTelescope_isActiveRank(sred)) {
153: if (dmfine_kspctx == dmfine_appctx) {
154: dmcoarse_kspctx = dmcoarse_appctx;
155: PetscInfo(pc,"PCTelescope: KSPSetComputeOperators using context from DM->ApplicationContext\n");
156: if (!dmcoarse_kspctx) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Non NULL dmfine->kspctx == dmfine->appctx. NULL dmcoarse->appctx found. Likely this is an error");
157: } else if (dmfine_kspctx == dmfine_shellctx) {
158: dmcoarse_kspctx = dmcoarse_shellctx;
159: PetscInfo(pc,"PCTelescope: KSPSetComputeOperators using context from DMShell->Context\n");
160: if (!dmcoarse_kspctx) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Non NULL dmfine->kspctx == dmfine.shell->ctx. NULL dmcoarse.shell->ctx found. Likely this is an error");
161: }
162: ctx->dmksp_context_determined = dmcoarse_kspctx;
164: /* look for user provided method to fetch the context */
165: {
166: PetscErrorCode (*fp_get_coarsedm_context)(DM,void**) = NULL;
167: void *dmcoarse_context_user = NULL;
168: char dmcoarse_method[PETSC_MAX_PATH_LEN];
170: PetscSNPrintf(dmcoarse_method,sizeof(dmcoarse_method),"PCTelescopeGetCoarseDMKSPContext");
171: PetscObjectQueryFunction((PetscObject)ctx->dm_coarse,dmcoarse_method,&fp_get_coarsedm_context);
172: if (fp_get_coarsedm_context) {
173: PetscInfo(pc,"PCTelescope: Found composed method PCTelescopeGetCoarseDMKSPContext from coarse DM\n");
174: fp_get_coarsedm_context(ctx->dm_coarse,&dmcoarse_context_user);
175: ctx->dmksp_context_user = dmcoarse_context_user;
176: dmcoarse_kspctx = dmcoarse_context_user;
177: } else {
178: PetscInfo(pc,"PCTelescope: Failed to find composed method PCTelescopeGetCoarseDMKSPContext from coarse DM\n");
179: }
180: }
182: if (!dmcoarse_kspctx) {
183: PetscInfo(pc,"PCTelescope: KSPSetComputeOperators failed to determine the context to use on sub-communicator\n");
184: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_USER,"Cannot determine which context with use for KSPSetComputeOperators() on sub-communicator");
185: }
186: }
187: }
188: }
190: if (dmfine_kspfunc && !sred->ignore_kspcomputeoperators) {
191: using_kspcomputeoperators = PETSC_TRUE;
193: if (PCTelescope_isActiveRank(sred)) {
194: /* sub ksp inherits dmksp_func and context provided by user */
195: KSPSetComputeOperators(sred->ksp,dmfine_kspfunc,dmcoarse_kspctx);
196: /*PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)ctx->dmrepart);*/
197: KSPSetDMActive(sred->ksp,PETSC_TRUE);
198: }
199: }
200: }
202: if (!has_perm && has_kspcomputeoperators && !using_kspcomputeoperators) SETERRQ(comm,PETSC_ERR_SUP,"No method to permute an operator was found on the parent DM. A method for KSPSetComputeOperators() was provided but it was requested to be ignored. Telescope setup cannot proceed");
203: if (!has_perm && !has_kspcomputeoperators) SETERRQ(comm,PETSC_ERR_SUP,"No method to permute an operator was found on the parent DM. No method for KSPSetComputeOperators() was provided. Telescope setup cannot proceed");
205: {
206: char dmfine_method[PETSC_MAX_PATH_LEN];
208: PetscSNPrintf(dmfine_method,sizeof(dmfine_method),"PCTelescopeFieldScatter");
209: PetscObjectQueryFunction((PetscObject)ctx->dm_fine,dmfine_method,&ctx->fp_dm_field_scatter);
211: PetscSNPrintf(dmfine_method,sizeof(dmfine_method),"PCTelescopeStateScatter");
212: PetscObjectQueryFunction((PetscObject)ctx->dm_fine,dmfine_method,&ctx->fp_dm_state_scatter);
213: }
215: if (ctx->fp_dm_state_scatter) {
216: PetscInfo(pc,"PCTelescope: Found composed method PCTelescopeStateScatter from parent DM\n");
217: } else {
218: PetscInfo(pc,"PCTelescope: Failed to find composed method PCTelescopeStateScatter from parent DM\n");
219: }
221: if (ctx->fp_dm_field_scatter) {
222: PetscInfo(pc,"PCTelescope: Found composed method PCTelescopeFieldScatter from parent DM\n");
223: } else {
224: PetscInfo(pc,"PCTelescope: Failed to find composed method PCTelescopeFieldScatter from parent DM\n");
225: SETERRQ(comm,PETSC_ERR_SUP,"No method to scatter fields between the parent DM and coarse DM was found. Must call PetscObjectComposeFunction() with the parent DM. Telescope setup cannot proceed");
226: }
228: /*PCTelescopeSetUp_permutation_CoarseDM(pc,sred,ctx);*/
229: PCTelescopeSetUp_scatters_CoarseDM(pc,sred,ctx);
230: return(0);
231: }
233: PetscErrorCode PCApply_Telescope_CoarseDM(PC pc,Vec x,Vec y)
234: {
235: PC_Telescope sred = (PC_Telescope)pc->data;
236: PetscErrorCode ierr;
237: Vec xred,yred;
238: PC_Telescope_CoarseDMCtx *ctx;
241: ctx = (PC_Telescope_CoarseDMCtx*)sred->dm_ctx;
242: xred = sred->xred;
243: yred = sred->yred;
245: PetscCitationsRegister(citation,&cited);
247: if (ctx->fp_dm_state_scatter) {
248: ctx->fp_dm_state_scatter(ctx->dm_fine,SCATTER_FORWARD,ctx->dm_coarse);
249: }
251: ctx->fp_dm_field_scatter(ctx->dm_fine,x,SCATTER_FORWARD,ctx->dm_coarse,xred);
253: /* solve */
254: if (PCTelescope_isActiveRank(sred)) {
255: KSPSolve(sred->ksp,xred,yred);
256: }
258: ctx->fp_dm_field_scatter(ctx->dm_fine,y,SCATTER_REVERSE,ctx->dm_coarse,yred);
259: return(0);
260: }
262: PetscErrorCode PCTelescopeSubNullSpaceCreate_CoarseDM(PC pc,PC_Telescope sred,MatNullSpace nullspace,MatNullSpace *sub_nullspace)
263: {
264: PetscErrorCode ierr;
265: PetscBool has_const;
266: PetscInt k,n = 0;
267: const Vec *vecs;
268: Vec *sub_vecs = NULL;
269: MPI_Comm subcomm;
270: PC_Telescope_CoarseDMCtx *ctx;
273: ctx = (PC_Telescope_CoarseDMCtx*)sred->dm_ctx;
274: subcomm = sred->subcomm;
275: MatNullSpaceGetVecs(nullspace,&has_const,&n,&vecs);
277: if (PCTelescope_isActiveRank(sred)) {
278: /* create new vectors */
279: if (n) {
280: VecDuplicateVecs(sred->xred,n,&sub_vecs);
281: }
282: }
284: /* copy entries */
285: for (k=0; k<n; k++) {
286: ctx->fp_dm_field_scatter(ctx->dm_fine,vecs[k],SCATTER_FORWARD,ctx->dm_coarse,sub_vecs[k]);
287: }
289: if (PCTelescope_isActiveRank(sred)) {
290: /* create new (near) nullspace for redundant object */
291: MatNullSpaceCreate(subcomm,has_const,n,sub_vecs,sub_nullspace);
292: VecDestroyVecs(n,&sub_vecs);
293: }
294: return(0);
295: }
297: PetscErrorCode PCTelescopeMatNullSpaceCreate_CoarseDM(PC pc,PC_Telescope sred,Mat sub_mat)
298: {
299: PetscErrorCode ierr;
300: Mat B;
301: PC_Telescope_CoarseDMCtx *ctx;
304: ctx = (PC_Telescope_CoarseDMCtx*)sred->dm_ctx;
305: PCGetOperators(pc,NULL,&B);
306: {
307: MatNullSpace nullspace,sub_nullspace;
308: MatGetNullSpace(B,&nullspace);
309: if (nullspace) {
310: PetscInfo(pc,"PCTelescope: generating nullspace (CoarseDM)\n");
311: PCTelescopeSubNullSpaceCreate_CoarseDM(pc,sred,nullspace,&sub_nullspace);
313: /* attach any user nullspace removal methods and contexts */
314: if (PCTelescope_isActiveRank(sred)) {
315: void *context = NULL;
316: if (nullspace->remove && !nullspace->rmctx){
317: MatNullSpaceSetFunction(sub_nullspace,nullspace->remove,context);
318: } else if (nullspace->remove && nullspace->rmctx) {
319: char dmcoarse_method[PETSC_MAX_PATH_LEN];
320: PetscErrorCode (*fp_get_coarsedm_context)(DM,void**) = NULL;
322: PetscSNPrintf(dmcoarse_method,sizeof(dmcoarse_method),"PCTelescopeGetCoarseDMNullSpaceUserContext");
323: PetscObjectQueryFunction((PetscObject)ctx->dm_coarse,dmcoarse_method,&fp_get_coarsedm_context);
324: if (!context) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Propagation of user null-space removal method with non-NULL context requires the coarse DM be composed with a function named \"%s\"",dmcoarse_method);
325: MatNullSpaceSetFunction(sub_nullspace,nullspace->remove,context);
326: }
327: }
329: if (PCTelescope_isActiveRank(sred)) {
330: MatSetNullSpace(sub_mat,sub_nullspace);
331: MatNullSpaceDestroy(&sub_nullspace);
332: }
333: }
334: }
335: {
336: MatNullSpace nearnullspace,sub_nearnullspace;
337: MatGetNearNullSpace(B,&nearnullspace);
338: if (nearnullspace) {
339: PetscInfo(pc,"PCTelescope: generating near nullspace (CoarseDM)\n");
340: PCTelescopeSubNullSpaceCreate_CoarseDM(pc,sred,nearnullspace,&sub_nearnullspace);
342: /* attach any user nullspace removal methods and contexts */
343: if (PCTelescope_isActiveRank(sred)) {
344: void *context = NULL;
345: if (nearnullspace->remove && !nearnullspace->rmctx){
346: MatNullSpaceSetFunction(sub_nearnullspace,nearnullspace->remove,context);
347: } else if (nearnullspace->remove && nearnullspace->rmctx) {
348: char dmcoarse_method[PETSC_MAX_PATH_LEN];
349: PetscErrorCode (*fp_get_coarsedm_context)(DM,void**) = NULL;
351: PetscSNPrintf(dmcoarse_method,sizeof(dmcoarse_method),"PCTelescopeGetCoarseDMNearNullSpaceUserContext");
352: PetscObjectQueryFunction((PetscObject)ctx->dm_coarse,dmcoarse_method,&fp_get_coarsedm_context);
353: if (!context) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Propagation of user near null-space removal method with non-NULL context requires the coarse DM be composed with a function named \"%s\"",dmcoarse_method);
354: MatNullSpaceSetFunction(sub_nearnullspace,nearnullspace->remove,context);
355: }
356: }
358: if (PCTelescope_isActiveRank(sred)) {
359: MatSetNearNullSpace(sub_mat,sub_nearnullspace);
360: MatNullSpaceDestroy(&sub_nearnullspace);
361: }
362: }
363: }
364: return(0);
365: }
367: PetscErrorCode PCReset_Telescope_CoarseDM(PC pc)
368: {
369: PetscErrorCode ierr;
370: PC_Telescope sred = (PC_Telescope)pc->data;
371: PC_Telescope_CoarseDMCtx *ctx;
374: ctx = (PC_Telescope_CoarseDMCtx*)sred->dm_ctx;
375: ctx->dm_fine = NULL; /* since I did not increment the ref counter we set these to NULL */
376: ctx->dm_coarse = NULL; /* since I did not increment the ref counter we set these to NULL */
377: ctx->permutation = NULL; /* this will be fetched from the dm so no need to call destroy */
378: VecDestroy(&ctx->xp);
379: ctx->fp_dm_field_scatter = NULL;
380: ctx->fp_dm_state_scatter = NULL;
381: ctx->dmksp_context_determined = NULL;
382: ctx->dmksp_context_user = NULL;
383: return(0);
384: }
386: PetscErrorCode PCApplyRichardson_Telescope_CoarseDM(PC pc,Vec x,Vec y,Vec w,PetscReal rtol,PetscReal abstol,PetscReal dtol,PetscInt its,PetscBool zeroguess,PetscInt *outits,PCRichardsonConvergedReason *reason)
387: {
388: PC_Telescope sred = (PC_Telescope)pc->data;
389: PetscErrorCode ierr;
390: Vec yred = NULL;
391: PetscBool default_init_guess_value = PETSC_FALSE;
392: PC_Telescope_CoarseDMCtx *ctx;
395: ctx = (PC_Telescope_CoarseDMCtx*)sred->dm_ctx;
396: yred = sred->yred;
398: if (its > 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCApplyRichardson_Telescope_CoarseDM only supports max_it = 1");
399: *reason = (PCRichardsonConvergedReason)0;
400:
401: if (!zeroguess) {
402: PetscInfo(pc,"PCTelescopeCoarseDM: Scattering y for non-zero-initial guess\n");
404: ctx->fp_dm_field_scatter(ctx->dm_fine,y,SCATTER_FORWARD,ctx->dm_coarse,yred);
405: }
407: if (PCTelescope_isActiveRank(sred)) {
408: KSPGetInitialGuessNonzero(sred->ksp,&default_init_guess_value);
409: if (!zeroguess) KSPSetInitialGuessNonzero(sred->ksp,PETSC_TRUE);
410: }
412: PCApply_Telescope_CoarseDM(pc,x,y);
414: if (PCTelescope_isActiveRank(sred)) {
415: KSPSetInitialGuessNonzero(sred->ksp,default_init_guess_value);
416: }
418: if (!*reason) *reason = PCRICHARDSON_CONVERGED_ITS;
419: *outits = 1;
420: return(0);
421: }