Actual source code: dmshell.c
petsc-3.4.5 2014-06-29
1: #include <petscdmshell.h> /*I "petscdmshell.h" I*/
2: #include <petscmat.h>
3: #include <petsc-private/dmimpl.h>
5: typedef struct {
6: Vec Xglobal;
7: Vec Xlocal;
8: Mat A;
9: VecScatter gtol;
10: VecScatter ltog;
11: } DM_Shell;
15: /*@
16: DMGlobalToLocalBeginDefaultShell - Uses the GlobalToLocal VecScatter context set by the user to begin a global to local scatter
17: Collective
19: Input Arguments:
20: + dm - shell DM
21: . g - global vector
22: . mode - InsertMode
23: - l - local vector
25: Level: advanced
27: Note: This is not normally called directly by user code, generally user code calls DMGlobalToLocalBegin() and DMGlobalToLocalEnd(). If the user provides their own custom routines to DMShellSetLocalToGlobal() then those routines might have reason to call this function.
29: .seealso: DMGlobalToLocalEndDefaultShell()
30: @*/
31: PetscErrorCode DMGlobalToLocalBeginDefaultShell(DM dm,Vec g,InsertMode mode,Vec l)
32: {
34: DM_Shell *shell = (DM_Shell*)dm->data;
37: if (!shell->gtol) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE, "Cannot be used without first setting the scatter context via DMShellSetGlobalToLocalVecScatter()");
38: VecScatterBegin(shell->gtol,g,l,mode,SCATTER_FORWARD);
39: return(0);
40: }
44: /*@
45: DMGlobalToLocalEndDefaultShell - Uses the GlobalToLocal VecScatter context set by the user to end a global to local scatter
46: Collective
48: Input Arguments:
49: + dm - shell DM
50: . g - global vector
51: . mode - InsertMode
52: - l - local vector
54: Level: advanced
56: .seealso: DMGlobalToLocalBeginDefaultShell()
57: @*/
58: PetscErrorCode DMGlobalToLocalEndDefaultShell(DM dm,Vec g,InsertMode mode,Vec l)
59: {
61: DM_Shell *shell = (DM_Shell*)dm->data;
64: if (!shell->gtol) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE, "Cannot be used without first setting the scatter context via DMShellSetGlobalToLocalVecScatter()");
65: VecScatterEnd(shell->gtol,g,l,mode,SCATTER_FORWARD);
66: return(0);
67: }
71: /*@
72: DMLocalToGlobalBeginDefaultShell - Uses the LocalToGlobal VecScatter context set by the user to begin a local to global scatter
73: Collective
75: Input Arguments:
76: + dm - shell DM
77: . l - local vector
78: . mode - InsertMode
79: - g - global vector
81: Level: advanced
83: Note: This is not normally called directly by user code, generally user code calls DMLocalToGlobalBegin() and DMLocalToGlobalEnd(). If the user provides their own custom routines to DMShellSetLocalToGlobal() then those routines might have reason to call this function.
85: .seealso: DMLocalToGlobalEndDefaultShell()
86: @*/
87: PetscErrorCode DMLocalToGlobalBeginDefaultShell(DM dm,Vec l,InsertMode mode,Vec g)
88: {
90: DM_Shell *shell = (DM_Shell*)dm->data;
93: if (!shell->ltog) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE, "Cannot be used without first setting the scatter context via DMShellSetLocalToGlobalVecScatter()");
94: VecScatterBegin(shell->ltog,l,g,mode,SCATTER_FORWARD);
95: return(0);
96: }
100: /*@
101: DMLocalToGlobalEndDefaultShell - Uses the LocalToGlobal VecScatter context set by the user to end a local to global scatter
102: Collective
104: Input Arguments:
105: + dm - shell DM
106: . l - local vector
107: . mode - InsertMode
108: - g - global vector
110: Level: advanced
112: .seealso: DMLocalToGlobalBeginDefaultShell()
113: @*/
114: PetscErrorCode DMLocalToGlobalEndDefaultShell(DM dm,Vec l,InsertMode mode,Vec g)
115: {
117: DM_Shell *shell = (DM_Shell*)dm->data;
120: if (!shell->ltog) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE, "Cannot be used without first setting the scatter context via DMShellSetLocalToGlobalVecScatter()");
121: VecScatterEnd(shell->ltog,l,g,mode,SCATTER_FORWARD);
122: return(0);
123: }
128: static PetscErrorCode DMCreateMatrix_Shell(DM dm,MatType mtype,Mat *J)
129: {
131: DM_Shell *shell = (DM_Shell*)dm->data;
132: Mat A;
137: if (!shell->A) {
138: if (shell->Xglobal) {
139: PetscInt m,M;
140: PetscInfo(dm,"Naively creating matrix using global vector distribution without preallocation");
141: VecGetSize(shell->Xglobal,&M);
142: VecGetLocalSize(shell->Xglobal,&m);
143: MatCreate(PetscObjectComm((PetscObject)dm),&shell->A);
144: MatSetSizes(shell->A,m,m,M,M);
145: if (mtype) {MatSetType(shell->A,mtype);}
146: MatSetUp(shell->A);
147: } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_USER,"Must call DMShellSetMatrix(), DMShellSetCreateMatrix(), or provide a vector");
148: }
149: A = shell->A;
150: /* the check below is tacky and incomplete */
151: if (mtype) {
152: PetscBool flg,aij,seqaij,mpiaij;
153: PetscObjectTypeCompare((PetscObject)A,mtype,&flg);
154: PetscObjectTypeCompare((PetscObject)A,MATSEQAIJ,&seqaij);
155: PetscObjectTypeCompare((PetscObject)A,MATMPIAIJ,&mpiaij);
156: PetscStrcmp(mtype,MATAIJ,&aij);
157: if (!flg) {
158: if (!(aij && (seqaij || mpiaij))) SETERRQ2(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_NOTSAMETYPE,"Requested matrix of type %s, but only %s available",mtype,((PetscObject)A)->type_name);
159: }
160: }
161: if (((PetscObject)A)->refct < 2) { /* We have an exclusive reference so we can give it out */
162: PetscObjectReference((PetscObject)A);
163: MatZeroEntries(A);
164: *J = A;
165: } else { /* Need to create a copy, could use MAT_SHARE_NONZERO_PATTERN in most cases */
166: MatDuplicate(A,MAT_DO_NOT_COPY_VALUES,J);
167: MatZeroEntries(*J);
168: }
169: return(0);
170: }
174: PetscErrorCode DMCreateGlobalVector_Shell(DM dm,Vec *gvec)
175: {
177: DM_Shell *shell = (DM_Shell*)dm->data;
178: Vec X;
183: *gvec = 0;
184: X = shell->Xglobal;
185: if (!X) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_USER,"Must call DMShellSetGlobalVector() or DMShellSetCreateGlobalVector()");
186: if (((PetscObject)X)->refct < 2) { /* We have an exclusive reference so we can give it out */
187: PetscObjectReference((PetscObject)X);
188: VecZeroEntries(X);
189: *gvec = X;
190: } else { /* Need to create a copy, could use MAT_SHARE_NONZERO_PATTERN in most cases */
191: VecDuplicate(X,gvec);
192: VecZeroEntries(*gvec);
193: }
194: VecSetDM(*gvec,dm);
195: return(0);
196: }
200: PetscErrorCode DMCreateLocalVector_Shell(DM dm,Vec *gvec)
201: {
203: DM_Shell *shell = (DM_Shell*)dm->data;
204: Vec X;
209: *gvec = 0;
210: X = shell->Xlocal;
211: if (!X) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_USER,"Must call DMShellSetLocalVector() or DMShellSetCreateLocalVector()");
212: if (((PetscObject)X)->refct < 2) { /* We have an exclusive reference so we can give it out */
213: PetscObjectReference((PetscObject)X);
214: VecZeroEntries(X);
215: *gvec = X;
216: } else { /* Need to create a copy, could use MAT_SHARE_NONZERO_PATTERN in most cases */
217: VecDuplicate(X,gvec);
218: VecZeroEntries(*gvec);
219: }
220: VecSetDM(*gvec,dm);
221: return(0);
222: }
226: /*@
227: DMShellSetMatrix - sets a template matrix associated with the DMShell
229: Collective
231: Input Arguments:
232: + dm - shell DM
233: - J - template matrix
235: Level: advanced
237: .seealso: DMCreateMatrix(), DMShellSetCreateMatrix()
238: @*/
239: PetscErrorCode DMShellSetMatrix(DM dm,Mat J)
240: {
241: DM_Shell *shell = (DM_Shell*)dm->data;
243: PetscBool isshell;
248: PetscObjectTypeCompare((PetscObject)dm,DMSHELL,&isshell);
249: if (!isshell) return(0);
250: PetscObjectReference((PetscObject)J);
251: MatDestroy(&shell->A);
252: shell->A = J;
253: return(0);
254: }
258: /*@C
259: DMShellSetCreateMatrix - sets the routine to create a matrix associated with the shell DM
261: Logically Collective on DM
263: Input Arguments:
264: + dm - the shell DM
265: - func - the function to create a matrix
267: Level: advanced
269: .seealso: DMCreateMatrix(), DMShellSetMatrix()
270: @*/
271: PetscErrorCode DMShellSetCreateMatrix(DM dm,PetscErrorCode (*func)(DM,MatType,Mat*))
272: {
276: dm->ops->creatematrix = func;
277: return(0);
278: }
282: /*@
283: DMShellSetGlobalVector - sets a template global vector associated with the DMShell
285: Logically Collective on DM
287: Input Arguments:
288: + dm - shell DM
289: - X - template vector
291: Level: advanced
293: .seealso: DMCreateGlobalVector(), DMShellSetMatrix(), DMShellSetCreateGlobalVector()
294: @*/
295: PetscErrorCode DMShellSetGlobalVector(DM dm,Vec X)
296: {
297: DM_Shell *shell = (DM_Shell*)dm->data;
299: PetscBool isshell;
304: PetscObjectTypeCompare((PetscObject)dm,DMSHELL,&isshell);
305: if (!isshell) return(0);
306: PetscObjectReference((PetscObject)X);
307: VecDestroy(&shell->Xglobal);
308: shell->Xglobal = X;
309: return(0);
310: }
314: /*@C
315: DMShellSetCreateGlobalVector - sets the routine to create a global vector associated with the shell DM
317: Logically Collective
319: Input Arguments:
320: + dm - the shell DM
321: - func - the creation routine
323: Level: advanced
325: .seealso: DMShellSetGlobalVector(), DMShellSetCreateMatrix()
326: @*/
327: PetscErrorCode DMShellSetCreateGlobalVector(DM dm,PetscErrorCode (*func)(DM,Vec*))
328: {
332: dm->ops->createglobalvector = func;
333: return(0);
334: }
338: /*@
339: DMShellSetLocalVector - sets a template local vector associated with the DMShell
341: Logically Collective on DM
343: Input Arguments:
344: + dm - shell DM
345: - X - template vector
347: Level: advanced
349: .seealso: DMCreateLocalVector(), DMShellSetMatrix(), DMShellSetCreateLocalVector()
350: @*/
351: PetscErrorCode DMShellSetLocalVector(DM dm,Vec X)
352: {
353: DM_Shell *shell = (DM_Shell*)dm->data;
355: PetscBool isshell;
360: PetscObjectTypeCompare((PetscObject)dm,DMSHELL,&isshell);
361: if (!isshell) return(0);
362: PetscObjectReference((PetscObject)X);
363: VecDestroy(&shell->Xlocal);
364: shell->Xlocal = X;
365: return(0);
366: }
370: /*@C
371: DMShellSetCreateLocalVector - sets the routine to create a local vector associated with the shell DM
373: Logically Collective
375: Input Arguments:
376: + dm - the shell DM
377: - func - the creation routine
379: Level: advanced
381: .seealso: DMShellSetLocalVector(), DMShellSetCreateMatrix()
382: @*/
383: PetscErrorCode DMShellSetCreateLocalVector(DM dm,PetscErrorCode (*func)(DM,Vec*))
384: {
388: dm->ops->createlocalvector = func;
389: return(0);
390: }
394: /*@C
395: DMShellSetGlobalToLocal - Sets the routines used to perform a global to local scatter
397: Logically Collective on DM
399: Input Arguments
400: + dm - the shell DM
401: . begin - the routine that begins the global to local scatter
402: - end - the routine that ends the global to local scatter
404: Notes: If these functions are not provided but DMShellSetGlobalToLocalVecScatter() is called then
405: DMGlobalToLocalBeginDefaultShell() are used to to perform the transfers DMGlobalToLocalEndDefaultShell()
407: Level: advanced
409: .seealso: DMShellSetLocalToGlobal(), DMGlobalToLocalBeginDefaultShell(), DMGlobalToLocalEndDefaultShell()
410: @*/
411: PetscErrorCode DMShellSetGlobalToLocal(DM dm,PetscErrorCode (*begin)(DM,Vec,InsertMode,Vec),PetscErrorCode (*end)(DM,Vec,InsertMode,Vec)) {
413: dm->ops->globaltolocalbegin = begin;
414: dm->ops->globaltolocalend = end;
415: return(0);
416: }
420: /*@C
421: DMShellSetLocalToGlobal - Sets the routines used to perform a local to global scatter
423: Logically Collective on DM
425: Input Arguments
426: + dm - the shell DM
427: . begin - the routine that begins the local to global scatter
428: - end - the routine that ends the local to global scatter
430: Level: advanced
432: .seealso: DMShellSetGlobalToLocal()
433: @*/
434: PetscErrorCode DMShellSetLocalToGlobal(DM dm,PetscErrorCode (*begin)(DM,Vec,InsertMode,Vec),PetscErrorCode (*end)(DM,Vec,InsertMode,Vec)) {
436: dm->ops->localtoglobalbegin = begin;
437: dm->ops->localtoglobalend = end;
438: return(0);
439: }
443: /*@
444: DMShellSetGlobalToLocalVecScatter - Sets a VecScatter context for global to local communication
446: Logically Collective on DM
448: Input Arguments
449: + dm - the shell DM
450: - gtol - the global to local VecScatter context
452: Level: advanced
454: .seealso: DMShellSetGlobalToLocal()
455: @*/
456: PetscErrorCode DMShellSetGlobalToLocalVecScatter(DM dm, VecScatter gtol)
457: {
458: DM_Shell *shell = (DM_Shell*)dm->data;
462: PetscObjectReference((PetscObject)gtol);
463: /* Call VecScatterDestroy() to avoid a memory leak in case of re-setting. */
464: VecScatterDestroy(&shell->gtol);
465: shell->gtol = gtol;
466: return(0);
467: }
471: /*@
472: DMShellSetLocalToGlobalVecScatter - Sets a VecScatter context for local to global communication
474: Logically Collective on DM
476: Input Arguments
477: + dm - the shell DM
478: - ltog - the local to global VecScatter context
480: Level: advanced
482: .seealso: DMShellSetLocalToGlobal()
483: @*/
484: PetscErrorCode DMShellSetLocalToGlobalVecScatter(DM dm, VecScatter ltog)
485: {
486: DM_Shell *shell = (DM_Shell*)dm->data;
490: PetscObjectReference((PetscObject)ltog);
491: /* Call VecScatterDestroy() to avoid a memory leak in case of re-setting. */
492: VecScatterDestroy(&shell->ltog);
493: shell->ltog = ltog;
494: return(0);
495: }
499: static PetscErrorCode DMDestroy_Shell(DM dm)
500: {
502: DM_Shell *shell = (DM_Shell*)dm->data;
505: MatDestroy(&shell->A);
506: VecDestroy(&shell->Xglobal);
507: VecDestroy(&shell->Xlocal);
508: VecScatterDestroy(&shell->gtol);
509: VecScatterDestroy(&shell->ltog);
510: /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
511: PetscFree(shell);
512: return(0);
513: }
517: static PetscErrorCode DMView_Shell(DM dm,PetscViewer v)
518: {
520: DM_Shell *shell = (DM_Shell*)dm->data;
523: VecView(shell->Xglobal,v);
524: return(0);
525: }
529: static PetscErrorCode DMLoad_Shell(DM dm,PetscViewer v)
530: {
532: DM_Shell *shell = (DM_Shell*)dm->data;
535: VecCreate(PetscObjectComm((PetscObject)dm),&shell->Xglobal);
536: VecLoad(shell->Xglobal,v);
537: return(0);
538: }
542: PETSC_EXTERN PetscErrorCode DMCreate_Shell(DM dm)
543: {
545: DM_Shell *shell;
548: PetscNewLog(dm,DM_Shell,&shell);
549: dm->data = shell;
551: PetscObjectChangeTypeName((PetscObject)dm,DMSHELL);
553: dm->ops->destroy = DMDestroy_Shell;
554: dm->ops->createglobalvector = DMCreateGlobalVector_Shell;
555: dm->ops->createlocalvector = DMCreateLocalVector_Shell;
556: dm->ops->creatematrix = DMCreateMatrix_Shell;
557: dm->ops->view = DMView_Shell;
558: dm->ops->load = DMLoad_Shell;
559: dm->ops->globaltolocalbegin = DMGlobalToLocalBeginDefaultShell;
560: dm->ops->globaltolocalend = DMGlobalToLocalEndDefaultShell;
561: dm->ops->localtoglobalbegin = DMLocalToGlobalBeginDefaultShell;
562: dm->ops->localtoglobalend = DMLocalToGlobalEndDefaultShell;
563: return(0);
564: }
568: /*@
569: DMShellCreate - Creates a shell DM object, used to manage user-defined problem data
571: Collective on MPI_Comm
573: Input Parameter:
574: . comm - the processors that will share the global vector
576: Output Parameters:
577: . shell - the shell DM
579: Level: advanced
581: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateLocalVector()
582: @*/
583: PetscErrorCode DMShellCreate(MPI_Comm comm,DM *dm)
584: {
589: DMCreate(comm,dm);
590: DMSetType(*dm,DMSHELL);
591: return(0);
592: }