Actual source code: dm.c
petsc-3.4.5 2014-06-29
1: #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/
2: #include <petscsf.h>
4: PetscClassId DM_CLASSID;
5: PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
9: /*
10: DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed.
12: Collective on Vec
14: Input Parameters:
15: + dm - the DM
16: . prefix - prefix to use for viewing, or NULL to use prefix of 'dm'
17: - optionname - option to activate viewing
19: Level: intermediate
21: .keywords: DM, view, options, database
22: .seealso: VecViewFromOptions(), MatViewFromOptions()
23: */
24: PetscErrorCode DMViewFromOptions(DM dm,const char prefix[],const char optionname[])
25: {
26: PetscErrorCode ierr;
27: PetscBool flg;
28: PetscViewer viewer;
29: PetscViewerFormat format;
32: if (prefix) {
33: PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);
34: } else {
35: PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);
36: }
37: if (flg) {
38: PetscViewerPushFormat(viewer,format);
39: DMView(dm,viewer);
40: PetscViewerPopFormat(viewer);
41: PetscViewerDestroy(&viewer);
42: }
43: return(0);
44: }
49: /*@
50: DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
52: If you never call DMSetType() it will generate an
53: error when you try to use the vector.
55: Collective on MPI_Comm
57: Input Parameter:
58: . comm - The communicator for the DM object
60: Output Parameter:
61: . dm - The DM object
63: Level: beginner
65: .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
66: @*/
67: PetscErrorCode DMCreate(MPI_Comm comm,DM *dm)
68: {
69: DM v;
74: *dm = NULL;
75: #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
76: VecInitializePackage();
77: MatInitializePackage();
78: DMInitializePackage();
79: #endif
81: PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);
82: PetscMemzero(v->ops, sizeof(struct _DMOps));
85: v->ltogmap = NULL;
86: v->ltogmapb = NULL;
87: v->bs = 1;
88: v->coloringtype = IS_COLORING_GLOBAL;
89: PetscSFCreate(comm, &v->sf);
90: PetscSFCreate(comm, &v->defaultSF);
91: v->defaultSection = NULL;
92: v->defaultGlobalSection = NULL;
93: {
94: PetscInt i;
95: for (i = 0; i < 10; ++i) {
96: v->nullspaceConstructors[i] = NULL;
97: }
98: }
99: v->numFields = 0;
100: v->fields = NULL;
102: *dm = v;
103: return(0);
104: }
108: /*@C
109: DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
111: Logically Collective on DMDA
113: Input Parameter:
114: + da - initial distributed array
115: . ctype - the vector type, currently either VECSTANDARD or VECCUSP
117: Options Database:
118: . -dm_vec_type ctype
120: Level: intermediate
122: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
123: @*/
124: PetscErrorCode DMSetVecType(DM da,VecType ctype)
125: {
130: PetscFree(da->vectype);
131: PetscStrallocpy(ctype,(char**)&da->vectype);
132: return(0);
133: }
137: /*@
138: VecGetDM - Gets the DM defining the data layout of the vector
140: Not collective
142: Input Parameter:
143: . v - The Vec
145: Output Parameter:
146: . dm - The DM
148: Level: intermediate
150: .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
151: @*/
152: PetscErrorCode VecGetDM(Vec v, DM *dm)
153: {
159: PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);
160: return(0);
161: }
165: /*@
166: VecSetDM - Sets the DM defining the data layout of the vector
168: Not collective
170: Input Parameters:
171: + v - The Vec
172: - dm - The DM
174: Level: intermediate
176: .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
177: @*/
178: PetscErrorCode VecSetDM(Vec v, DM dm)
179: {
185: PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);
186: return(0);
187: }
191: /*@C
192: DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
194: Logically Collective on DM
196: Input Parameter:
197: + dm - the DM context
198: . ctype - the matrix type
200: Options Database:
201: . -dm_mat_type ctype
203: Level: intermediate
205: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
206: @*/
207: PetscErrorCode DMSetMatType(DM dm,MatType ctype)
208: {
213: PetscFree(dm->mattype);
214: PetscStrallocpy(ctype,(char**)&dm->mattype);
215: return(0);
216: }
220: /*@
221: MatGetDM - Gets the DM defining the data layout of the matrix
223: Not collective
225: Input Parameter:
226: . A - The Mat
228: Output Parameter:
229: . dm - The DM
231: Level: intermediate
233: .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
234: @*/
235: PetscErrorCode MatGetDM(Mat A, DM *dm)
236: {
242: PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);
243: return(0);
244: }
248: /*@
249: MatSetDM - Sets the DM defining the data layout of the matrix
251: Not collective
253: Input Parameters:
254: + A - The Mat
255: - dm - The DM
257: Level: intermediate
259: .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
260: @*/
261: PetscErrorCode MatSetDM(Mat A, DM dm)
262: {
268: PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);
269: return(0);
270: }
274: /*@C
275: DMSetOptionsPrefix - Sets the prefix used for searching for all
276: DMDA options in the database.
278: Logically Collective on DMDA
280: Input Parameter:
281: + da - the DMDA context
282: - prefix - the prefix to prepend to all option names
284: Notes:
285: A hyphen (-) must NOT be given at the beginning of the prefix name.
286: The first character of all runtime options is AUTOMATICALLY the hyphen.
288: Level: advanced
290: .keywords: DMDA, set, options, prefix, database
292: .seealso: DMSetFromOptions()
293: @*/
294: PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[])
295: {
300: PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);
301: return(0);
302: }
306: /*@
307: DMDestroy - Destroys a vector packer or DMDA.
309: Collective on DM
311: Input Parameter:
312: . dm - the DM object to destroy
314: Level: developer
316: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
318: @*/
319: PetscErrorCode DMDestroy(DM *dm)
320: {
321: PetscInt i, cnt = 0, f;
322: DMNamedVecLink nlink,nnext;
326: if (!*dm) return(0);
329: /* count all the circular references of DM and its contained Vecs */
330: for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
331: if ((*dm)->localin[i]) cnt++;
332: if ((*dm)->globalin[i]) cnt++;
333: }
334: for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
335: for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
336: if ((*dm)->x) {
337: DM obj;
338: VecGetDM((*dm)->x, &obj);
339: if (obj == *dm) cnt++;
340: }
342: if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; return(0);}
343: /*
344: Need this test because the dm references the vectors that
345: reference the dm, so destroying the dm calls destroy on the
346: vectors that cause another destroy on the dm
347: */
348: if (((PetscObject)(*dm))->refct < 0) return(0);
349: ((PetscObject) (*dm))->refct = 0;
350: for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
351: if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
352: VecDestroy(&(*dm)->localin[i]);
353: }
354: for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
355: nnext = nlink->next;
356: if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
357: PetscFree(nlink->name);
358: VecDestroy(&nlink->X);
359: PetscFree(nlink);
360: }
361: (*dm)->namedglobal = NULL;
363: for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
364: nnext = nlink->next;
365: if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
366: PetscFree(nlink->name);
367: VecDestroy(&nlink->X);
368: PetscFree(nlink);
369: }
370: (*dm)->namedlocal = NULL;
372: /* Destroy the list of hooks */
373: {
374: DMCoarsenHookLink link,next;
375: for (link=(*dm)->coarsenhook; link; link=next) {
376: next = link->next;
377: PetscFree(link);
378: }
379: (*dm)->coarsenhook = NULL;
380: }
381: {
382: DMRefineHookLink link,next;
383: for (link=(*dm)->refinehook; link; link=next) {
384: next = link->next;
385: PetscFree(link);
386: }
387: (*dm)->refinehook = NULL;
388: }
389: {
390: DMSubDomainHookLink link,next;
391: for (link=(*dm)->subdomainhook; link; link=next) {
392: next = link->next;
393: PetscFree(link);
394: }
395: (*dm)->subdomainhook = NULL;
396: }
397: {
398: DMGlobalToLocalHookLink link,next;
399: for (link=(*dm)->gtolhook; link; link=next) {
400: next = link->next;
401: PetscFree(link);
402: }
403: (*dm)->gtolhook = NULL;
404: }
405: /* Destroy the work arrays */
406: {
407: DMWorkLink link,next;
408: if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
409: for (link=(*dm)->workin; link; link=next) {
410: next = link->next;
411: PetscFree(link->mem);
412: PetscFree(link);
413: }
414: (*dm)->workin = NULL;
415: }
417: PetscObjectDestroy(&(*dm)->dmksp);
418: PetscObjectDestroy(&(*dm)->dmsnes);
419: PetscObjectDestroy(&(*dm)->dmts);
421: if ((*dm)->ctx && (*dm)->ctxdestroy) {
422: (*(*dm)->ctxdestroy)(&(*dm)->ctx);
423: }
424: VecDestroy(&(*dm)->x);
425: MatFDColoringDestroy(&(*dm)->fd);
426: DMClearGlobalVectors(*dm);
427: ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);
428: ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);
429: PetscFree((*dm)->vectype);
430: PetscFree((*dm)->mattype);
432: PetscSectionDestroy(&(*dm)->defaultSection);
433: PetscSectionDestroy(&(*dm)->defaultGlobalSection);
434: PetscLayoutDestroy(&(*dm)->map);
435: PetscSFDestroy(&(*dm)->sf);
436: PetscSFDestroy(&(*dm)->defaultSF);
438: DMDestroy(&(*dm)->coordinateDM);
439: VecDestroy(&(*dm)->coordinates);
440: VecDestroy(&(*dm)->coordinatesLocal);
442: for (f = 0; f < (*dm)->numFields; ++f) {
443: PetscObjectDestroy(&(*dm)->fields[f]);
444: }
445: PetscFree((*dm)->fields);
446: /* if memory was published with AMS then destroy it */
447: PetscObjectAMSViewOff((PetscObject)*dm);
449: (*(*dm)->ops->destroy)(*dm);
450: /* We do not destroy (*dm)->data here so that we can reference count backend objects */
451: PetscHeaderDestroy(dm);
452: return(0);
453: }
457: /*@
458: DMSetUp - sets up the data structures inside a DM object
460: Collective on DM
462: Input Parameter:
463: . dm - the DM object to setup
465: Level: developer
467: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
469: @*/
470: PetscErrorCode DMSetUp(DM dm)
471: {
476: if (dm->setupcalled) return(0);
477: if (dm->ops->setup) {
478: (*dm->ops->setup)(dm);
479: }
480: dm->setupcalled = PETSC_TRUE;
481: return(0);
482: }
486: /*@
487: DMSetFromOptions - sets parameters in a DM from the options database
489: Collective on DM
491: Input Parameter:
492: . dm - the DM object to set options for
494: Options Database:
495: + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
496: . -dm_vec_type <type> type of vector to create inside DM
497: . -dm_mat_type <type> type of matrix to create inside DM
498: - -dm_coloring_type <global or ghosted>
500: Level: developer
502: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
504: @*/
505: PetscErrorCode DMSetFromOptions(DM dm)
506: {
507: char typeName[256] = MATAIJ;
508: PetscBool flg;
513: PetscObjectOptionsBegin((PetscObject)dm);
514: PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);
515: PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);
516: if (flg) {
517: DMSetVecType(dm,typeName);
518: }
519: PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);
520: if (flg) {
521: DMSetMatType(dm,typeName);
522: }
523: PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);
524: if (dm->ops->setfromoptions) {
525: (*dm->ops->setfromoptions)(dm);
526: }
527: /* process any options handlers added with PetscObjectAddOptionsHandler() */
528: PetscObjectProcessOptionsHandlers((PetscObject) dm);
529: PetscOptionsEnd();
530: DMViewFromOptions(dm,NULL,"-dm_view");
531: return(0);
532: }
536: /*@C
537: DMView - Views a vector packer or DMDA.
539: Collective on DM
541: Input Parameter:
542: + dm - the DM object to view
543: - v - the viewer
545: Level: developer
547: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
549: @*/
550: PetscErrorCode DMView(DM dm,PetscViewer v)
551: {
553: PetscBool isbinary;
557: if (!v) {
558: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);
559: }
560: PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);
561: if (isbinary) {
562: PetscInt classid = DM_FILE_CLASSID;
563: char type[256];
565: PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);
566: PetscStrncpy(type,((PetscObject)dm)->type_name,256);
567: PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);
568: }
569: if (dm->ops->view) {
570: (*dm->ops->view)(dm,v);
571: }
572: return(0);
573: }
577: /*@
578: DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
580: Collective on DM
582: Input Parameter:
583: . dm - the DM object
585: Output Parameter:
586: . vec - the global vector
588: Level: beginner
590: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
592: @*/
593: PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec)
594: {
599: (*dm->ops->createglobalvector)(dm,vec);
600: return(0);
601: }
605: /*@
606: DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
608: Not Collective
610: Input Parameter:
611: . dm - the DM object
613: Output Parameter:
614: . vec - the local vector
616: Level: beginner
618: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
620: @*/
621: PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec)
622: {
627: (*dm->ops->createlocalvector)(dm,vec);
628: return(0);
629: }
633: /*@
634: DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
636: Collective on DM
638: Input Parameter:
639: . dm - the DM that provides the mapping
641: Output Parameter:
642: . ltog - the mapping
644: Level: intermediate
646: Notes:
647: This mapping can then be used by VecSetLocalToGlobalMapping() or
648: MatSetLocalToGlobalMapping().
650: .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
651: @*/
652: PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
653: {
659: if (!dm->ltogmap) {
660: PetscSection section, sectionGlobal;
662: DMGetDefaultSection(dm, §ion);
663: if (section) {
664: PetscInt *ltog;
665: PetscInt pStart, pEnd, size, p, l;
667: DMGetDefaultGlobalSection(dm, §ionGlobal);
668: PetscSectionGetChart(section, &pStart, &pEnd);
669: PetscSectionGetStorageSize(section, &size);
670: PetscMalloc(size * sizeof(PetscInt), <og); /* We want the local+overlap size */
671: for (p = pStart, l = 0; p < pEnd; ++p) {
672: PetscInt dof, off, c;
674: /* Should probably use constrained dofs */
675: PetscSectionGetDof(section, p, &dof);
676: PetscSectionGetOffset(sectionGlobal, p, &off);
677: for (c = 0; c < dof; ++c, ++l) {
678: ltog[l] = off+c;
679: }
680: }
681: ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);
682: PetscLogObjectParent(dm, dm->ltogmap);
683: } else {
684: if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
685: (*dm->ops->getlocaltoglobalmapping)(dm);
686: }
687: }
688: *ltog = dm->ltogmap;
689: return(0);
690: }
694: /*@
695: DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
697: Collective on DM
699: Input Parameter:
700: . da - the distributed array that provides the mapping
702: Output Parameter:
703: . ltog - the block mapping
705: Level: intermediate
707: Notes:
708: This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
709: MatSetLocalToGlobalMappingBlock().
711: .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
712: @*/
713: PetscErrorCode DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
714: {
720: if (!dm->ltogmapb) {
721: PetscInt bs;
722: DMGetBlockSize(dm,&bs);
723: if (bs > 1) {
724: if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
725: (*dm->ops->getlocaltoglobalmappingblock)(dm);
726: } else {
727: DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);
728: PetscObjectReference((PetscObject)dm->ltogmapb);
729: }
730: }
731: *ltog = dm->ltogmapb;
732: return(0);
733: }
737: /*@
738: DMGetBlockSize - Gets the inherent block size associated with a DM
740: Not Collective
742: Input Parameter:
743: . dm - the DM with block structure
745: Output Parameter:
746: . bs - the block size, 1 implies no exploitable block structure
748: Level: intermediate
750: .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
751: @*/
752: PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs)
753: {
757: if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
758: *bs = dm->bs;
759: return(0);
760: }
764: /*@
765: DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
767: Collective on DM
769: Input Parameter:
770: + dm1 - the DM object
771: - dm2 - the second, finer DM object
773: Output Parameter:
774: + mat - the interpolation
775: - vec - the scaling (optional)
777: Level: developer
779: Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
780: DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
782: For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
783: EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
786: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
788: @*/
789: PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
790: {
796: (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);
797: return(0);
798: }
802: /*@
803: DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
805: Collective on DM
807: Input Parameter:
808: + dm1 - the DM object
809: - dm2 - the second, finer DM object
811: Output Parameter:
812: . ctx - the injection
814: Level: developer
816: Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
817: DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
819: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
821: @*/
822: PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
823: {
829: (*dm1->ops->getinjection)(dm1,dm2,ctx);
830: return(0);
831: }
835: /*@C
836: DMCreateColoring - Gets coloring for a DMDA or DMComposite
838: Collective on DM
840: Input Parameter:
841: + dm - the DM object
842: . ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
843: - matype - either MATAIJ or MATBAIJ
845: Output Parameter:
846: . coloring - the coloring
848: Level: developer
850: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
852: @*/
853: PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
854: {
859: if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
860: (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);
861: return(0);
862: }
866: /*@C
867: DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
869: Collective on DM
871: Input Parameter:
872: + dm - the DM object
873: - mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
874: any type which inherits from one of these (such as MATAIJ)
876: Output Parameter:
877: . mat - the empty Jacobian
879: Level: beginner
881: Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
882: do not need to do it yourself.
884: By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
885: the nonzero pattern call DMDASetMatPreallocateOnly()
887: For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
888: internally by PETSc.
890: For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
891: the indices for the global numbering for DMDAs which is complicated.
893: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
895: @*/
896: PetscErrorCode DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
897: {
902: #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
903: MatInitializePackage();
904: #endif
907: if (dm->mattype) {
908: (*dm->ops->creatematrix)(dm,dm->mattype,mat);
909: } else {
910: (*dm->ops->creatematrix)(dm,mtype,mat);
911: }
912: return(0);
913: }
917: /*@
918: DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
919: preallocated but the nonzero structure and zero values will not be set.
921: Logically Collective on DMDA
923: Input Parameter:
924: + dm - the DM
925: - only - PETSC_TRUE if only want preallocation
927: Level: developer
928: .seealso DMCreateMatrix()
929: @*/
930: PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
931: {
934: dm->prealloc_only = only;
935: return(0);
936: }
940: /*@C
941: DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
943: Not Collective
945: Input Parameters:
946: + dm - the DM object
947: . count - The minium size
948: - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
950: Output Parameter:
951: . array - the work array
953: Level: developer
955: .seealso DMDestroy(), DMCreate()
956: @*/
957: PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
958: {
960: DMWorkLink link;
961: size_t size;
966: if (dm->workin) {
967: link = dm->workin;
968: dm->workin = dm->workin->next;
969: } else {
970: PetscNewLog(dm,struct _DMWorkLink,&link);
971: }
972: PetscDataTypeGetSize(dtype,&size);
973: if (size*count > link->bytes) {
974: PetscFree(link->mem);
975: PetscMalloc(size*count,&link->mem);
976: link->bytes = size*count;
977: }
978: link->next = dm->workout;
979: dm->workout = link;
980: *(void**)mem = link->mem;
981: return(0);
982: }
986: /*@C
987: DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
989: Not Collective
991: Input Parameters:
992: + dm - the DM object
993: . count - The minium size
994: - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
996: Output Parameter:
997: . array - the work array
999: Level: developer
1001: .seealso DMDestroy(), DMCreate()
1002: @*/
1003: PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1004: {
1005: DMWorkLink *p,link;
1010: for (p=&dm->workout; (link=*p); p=&link->next) {
1011: if (link->mem == *(void**)mem) {
1012: *p = link->next;
1013: link->next = dm->workin;
1014: dm->workin = link;
1015: *(void**)mem = NULL;
1016: return(0);
1017: }
1018: }
1019: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1020: return(0);
1021: }
1025: PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1026: {
1029: if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1030: dm->nullspaceConstructors[field] = nullsp;
1031: return(0);
1032: }
1036: /*@C
1037: DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1039: Not collective
1041: Input Parameter:
1042: . dm - the DM object
1044: Output Parameters:
1045: + numFields - The number of fields (or NULL if not requested)
1046: . fieldNames - The name for each field (or NULL if not requested)
1047: - fields - The global indices for each field (or NULL if not requested)
1049: Level: intermediate
1051: Notes:
1052: The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1053: PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1054: PetscFree().
1056: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1057: @*/
1058: PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1059: {
1060: PetscSection section, sectionGlobal;
1065: if (numFields) {
1067: *numFields = 0;
1068: }
1069: if (fieldNames) {
1071: *fieldNames = NULL;
1072: }
1073: if (fields) {
1075: *fields = NULL;
1076: }
1077: DMGetDefaultSection(dm, §ion);
1078: if (section) {
1079: PetscInt *fieldSizes, **fieldIndices;
1080: PetscInt nF, f, pStart, pEnd, p;
1082: DMGetDefaultGlobalSection(dm, §ionGlobal);
1083: PetscSectionGetNumFields(section, &nF);
1084: PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);
1085: PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);
1086: for (f = 0; f < nF; ++f) {
1087: fieldSizes[f] = 0;
1088: }
1089: for (p = pStart; p < pEnd; ++p) {
1090: PetscInt gdof;
1092: PetscSectionGetDof(sectionGlobal, p, &gdof);
1093: if (gdof > 0) {
1094: for (f = 0; f < nF; ++f) {
1095: PetscInt fdof, fcdof;
1097: PetscSectionGetFieldDof(section, p, f, &fdof);
1098: PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);
1099: fieldSizes[f] += fdof-fcdof;
1100: }
1101: }
1102: }
1103: for (f = 0; f < nF; ++f) {
1104: PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);
1105: fieldSizes[f] = 0;
1106: }
1107: for (p = pStart; p < pEnd; ++p) {
1108: PetscInt gdof, goff;
1110: PetscSectionGetDof(sectionGlobal, p, &gdof);
1111: if (gdof > 0) {
1112: PetscSectionGetOffset(sectionGlobal, p, &goff);
1113: for (f = 0; f < nF; ++f) {
1114: PetscInt fdof, fcdof, fc;
1116: PetscSectionGetFieldDof(section, p, f, &fdof);
1117: PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);
1118: for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1119: fieldIndices[f][fieldSizes[f]] = goff++;
1120: }
1121: }
1122: }
1123: }
1124: if (numFields) *numFields = nF;
1125: if (fieldNames) {
1126: PetscMalloc(nF * sizeof(char*), fieldNames);
1127: for (f = 0; f < nF; ++f) {
1128: const char *fieldName;
1130: PetscSectionGetFieldName(section, f, &fieldName);
1131: PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);
1132: }
1133: }
1134: if (fields) {
1135: PetscMalloc(nF * sizeof(IS), fields);
1136: for (f = 0; f < nF; ++f) {
1137: ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);
1138: }
1139: }
1140: PetscFree2(fieldSizes,fieldIndices);
1141: } else if (dm->ops->createfieldis) {
1142: (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);
1143: }
1144: return(0);
1145: }
1150: /*@C
1151: DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1152: corresponding to different fields: each IS contains the global indices of the dofs of the
1153: corresponding field. The optional list of DMs define the DM for each subproblem.
1154: Generalizes DMCreateFieldIS().
1156: Not collective
1158: Input Parameter:
1159: . dm - the DM object
1161: Output Parameters:
1162: + len - The number of subproblems in the field decomposition (or NULL if not requested)
1163: . namelist - The name for each field (or NULL if not requested)
1164: . islist - The global indices for each field (or NULL if not requested)
1165: - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1167: Level: intermediate
1169: Notes:
1170: The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1171: PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1172: and all of the arrays should be freed with PetscFree().
1174: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1175: @*/
1176: PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1177: {
1182: if (len) {
1184: *len = 0;
1185: }
1186: if (namelist) {
1188: *namelist = 0;
1189: }
1190: if (islist) {
1192: *islist = 0;
1193: }
1194: if (dmlist) {
1196: *dmlist = 0;
1197: }
1198: /*
1199: Is it a good idea to apply the following check across all impls?
1200: Perhaps some impls can have a well-defined decomposition before DMSetUp?
1201: This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1202: */
1203: if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1204: if (!dm->ops->createfielddecomposition) {
1205: PetscSection section;
1206: PetscInt numFields, f;
1208: DMGetDefaultSection(dm, §ion);
1209: if (section) {PetscSectionGetNumFields(section, &numFields);}
1210: if (section && numFields && dm->ops->createsubdm) {
1211: *len = numFields;
1212: PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);
1213: for (f = 0; f < numFields; ++f) {
1214: const char *fieldName;
1216: DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);
1217: PetscSectionGetFieldName(section, f, &fieldName);
1218: PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);
1219: }
1220: } else {
1221: DMCreateFieldIS(dm, len, namelist, islist);
1222: /* By default there are no DMs associated with subproblems. */
1223: if (dmlist) *dmlist = NULL;
1224: }
1225: } else {
1226: (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);
1227: }
1228: return(0);
1229: }
1233: /*@C
1234: DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1235: The fields are defined by DMCreateFieldIS().
1237: Not collective
1239: Input Parameters:
1240: + dm - the DM object
1241: . numFields - number of fields in this subproblem
1242: - len - The number of subproblems in the decomposition (or NULL if not requested)
1244: Output Parameters:
1245: . is - The global indices for the subproblem
1246: - dm - The DM for the subproblem
1248: Level: intermediate
1250: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1251: @*/
1252: PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1253: {
1261: if (dm->ops->createsubdm) {
1262: (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);
1263: } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1264: return(0);
1265: }
1270: /*@C
1271: DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1272: corresponding to restrictions to pairs nested subdomains: each IS contains the global
1273: indices of the dofs of the corresponding subdomains. The inner subdomains conceptually
1274: define a nonoverlapping covering, while outer subdomains can overlap.
1275: The optional list of DMs define the DM for each subproblem.
1277: Not collective
1279: Input Parameter:
1280: . dm - the DM object
1282: Output Parameters:
1283: + len - The number of subproblems in the domain decomposition (or NULL if not requested)
1284: . namelist - The name for each subdomain (or NULL if not requested)
1285: . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1286: . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1287: - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1289: Level: intermediate
1291: Notes:
1292: The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1293: PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1294: and all of the arrays should be freed with PetscFree().
1296: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1297: @*/
1298: PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1299: {
1300: PetscErrorCode ierr;
1301: DMSubDomainHookLink link;
1302: PetscInt i,l;
1311: /*
1312: Is it a good idea to apply the following check across all impls?
1313: Perhaps some impls can have a well-defined decomposition before DMSetUp?
1314: This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1315: */
1316: if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1317: if (dm->ops->createdomaindecomposition) {
1318: (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);
1319: /* copy subdomain hooks and context over to the subdomain DMs */
1320: if (dmlist) {
1321: for (i = 0; i < l; i++) {
1322: for (link=dm->subdomainhook; link; link=link->next) {
1323: if (link->ddhook) {(*link->ddhook)(dm,(*dmlist)[i],link->ctx);}
1324: }
1325: (*dmlist)[i]->ctx = dm->ctx;
1326: }
1327: }
1328: if (len) *len = l;
1329: }
1330: return(0);
1331: }
1336: /*@C
1337: DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1339: Not collective
1341: Input Parameters:
1342: + dm - the DM object
1343: . n - the number of subdomain scatters
1344: - subdms - the local subdomains
1346: Output Parameters:
1347: + n - the number of scatters returned
1348: . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1349: . oscat - scatter from global vector to overlapping global vector entries on subdomain
1350: - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1352: Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1353: of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets
1354: of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1355: solution and residual data.
1357: Level: developer
1359: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1360: @*/
1361: PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1362: {
1368: if (dm->ops->createddscatters) {
1369: (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);
1370: } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1371: return(0);
1372: }
1376: /*@
1377: DMRefine - Refines a DM object
1379: Collective on DM
1381: Input Parameter:
1382: + dm - the DM object
1383: - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1385: Output Parameter:
1386: . dmf - the refined DM, or NULL
1388: Note: If no refinement was done, the return value is NULL
1390: Level: developer
1392: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1393: @*/
1394: PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1395: {
1396: PetscErrorCode ierr;
1397: DMRefineHookLink link;
1401: (*dm->ops->refine)(dm,comm,dmf);
1402: if (*dmf) {
1403: (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1405: PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);
1407: (*dmf)->ctx = dm->ctx;
1408: (*dmf)->leveldown = dm->leveldown;
1409: (*dmf)->levelup = dm->levelup + 1;
1411: DMSetMatType(*dmf,dm->mattype);
1412: for (link=dm->refinehook; link; link=link->next) {
1413: if (link->refinehook) {
1414: (*link->refinehook)(dm,*dmf,link->ctx);
1415: }
1416: }
1417: }
1418: return(0);
1419: }
1423: /*@C
1424: DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1426: Logically Collective
1428: Input Arguments:
1429: + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1430: . refinehook - function to run when setting up a coarser level
1431: . interphook - function to run to update data on finer levels (once per SNESSolve())
1432: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1434: Calling sequence of refinehook:
1435: $ refinehook(DM coarse,DM fine,void *ctx);
1437: + coarse - coarse level DM
1438: . fine - fine level DM to interpolate problem to
1439: - ctx - optional user-defined function context
1441: Calling sequence for interphook:
1442: $ interphook(DM coarse,Mat interp,DM fine,void *ctx)
1444: + coarse - coarse level DM
1445: . interp - matrix interpolating a coarse-level solution to the finer grid
1446: . fine - fine level DM to update
1447: - ctx - optional user-defined function context
1449: Level: advanced
1451: Notes:
1452: This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1454: If this function is called multiple times, the hooks will be run in the order they are added.
1456: This function is currently not available from Fortran.
1458: .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1459: @*/
1460: PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1461: {
1462: PetscErrorCode ierr;
1463: DMRefineHookLink link,*p;
1467: for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1468: PetscMalloc(sizeof(struct _DMRefineHookLink),&link);
1469: link->refinehook = refinehook;
1470: link->interphook = interphook;
1471: link->ctx = ctx;
1472: link->next = NULL;
1473: *p = link;
1474: return(0);
1475: }
1479: /*@
1480: DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1482: Collective if any hooks are
1484: Input Arguments:
1485: + coarse - coarser DM to use as a base
1486: . restrct - interpolation matrix, apply using MatInterpolate()
1487: - fine - finer DM to update
1489: Level: developer
1491: .seealso: DMRefineHookAdd(), MatInterpolate()
1492: @*/
1493: PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1494: {
1495: PetscErrorCode ierr;
1496: DMRefineHookLink link;
1499: for (link=fine->refinehook; link; link=link->next) {
1500: if (link->interphook) {
1501: (*link->interphook)(coarse,interp,fine,link->ctx);
1502: }
1503: }
1504: return(0);
1505: }
1509: /*@
1510: DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1512: Not Collective
1514: Input Parameter:
1515: . dm - the DM object
1517: Output Parameter:
1518: . level - number of refinements
1520: Level: developer
1522: .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1524: @*/
1525: PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level)
1526: {
1529: *level = dm->levelup;
1530: return(0);
1531: }
1535: /*@C
1536: DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1538: Logically Collective
1540: Input Arguments:
1541: + dm - the DM
1542: . beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1543: . endhook - function to run after DMGlobalToLocalEnd() has completed
1544: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1546: Calling sequence for beginhook:
1547: $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1549: + dm - global DM
1550: . g - global vector
1551: . mode - mode
1552: . l - local vector
1553: - ctx - optional user-defined function context
1556: Calling sequence for endhook:
1557: $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1559: + global - global DM
1560: - ctx - optional user-defined function context
1562: Level: advanced
1564: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1565: @*/
1566: PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1567: {
1568: PetscErrorCode ierr;
1569: DMGlobalToLocalHookLink link,*p;
1573: for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1574: PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);
1575: link->beginhook = beginhook;
1576: link->endhook = endhook;
1577: link->ctx = ctx;
1578: link->next = NULL;
1579: *p = link;
1580: return(0);
1581: }
1585: /*@
1586: DMGlobalToLocalBegin - Begins updating local vectors from global vector
1588: Neighbor-wise Collective on DM
1590: Input Parameters:
1591: + dm - the DM object
1592: . g - the global vector
1593: . mode - INSERT_VALUES or ADD_VALUES
1594: - l - the local vector
1597: Level: beginner
1599: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1601: @*/
1602: PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1603: {
1604: PetscSF sf;
1605: PetscErrorCode ierr;
1606: DMGlobalToLocalHookLink link;
1610: for (link=dm->gtolhook; link; link=link->next) {
1611: if (link->beginhook) {
1612: (*link->beginhook)(dm,g,mode,l,link->ctx);
1613: }
1614: }
1615: DMGetDefaultSF(dm, &sf);
1616: if (sf) {
1617: PetscScalar *lArray, *gArray;
1619: if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1620: VecGetArray(l, &lArray);
1621: VecGetArray(g, &gArray);
1622: PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);
1623: VecRestoreArray(l, &lArray);
1624: VecRestoreArray(g, &gArray);
1625: } else {
1626: (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1627: }
1628: return(0);
1629: }
1633: /*@
1634: DMGlobalToLocalEnd - Ends updating local vectors from global vector
1636: Neighbor-wise Collective on DM
1638: Input Parameters:
1639: + dm - the DM object
1640: . g - the global vector
1641: . mode - INSERT_VALUES or ADD_VALUES
1642: - l - the local vector
1645: Level: beginner
1647: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1649: @*/
1650: PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1651: {
1652: PetscSF sf;
1653: PetscErrorCode ierr;
1654: PetscScalar *lArray, *gArray;
1655: DMGlobalToLocalHookLink link;
1659: DMGetDefaultSF(dm, &sf);
1660: if (sf) {
1661: if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1663: VecGetArray(l, &lArray);
1664: VecGetArray(g, &gArray);
1665: PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);
1666: VecRestoreArray(l, &lArray);
1667: VecRestoreArray(g, &gArray);
1668: } else {
1669: (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1670: }
1671: for (link=dm->gtolhook; link; link=link->next) {
1672: if (link->endhook) {(*link->endhook)(dm,g,mode,l,link->ctx);}
1673: }
1674: return(0);
1675: }
1679: /*@
1680: DMLocalToGlobalBegin - updates global vectors from local vectors
1682: Neighbor-wise Collective on DM
1684: Input Parameters:
1685: + dm - the DM object
1686: . l - the local vector
1687: . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1688: base point.
1689: - - the global vector
1691: Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1692: array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1693: global array to the final global array with VecAXPY().
1695: Level: beginner
1697: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1699: @*/
1700: PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1701: {
1702: PetscSF sf;
1707: DMGetDefaultSF(dm, &sf);
1708: if (sf) {
1709: MPI_Op op;
1710: PetscScalar *lArray, *gArray;
1712: switch (mode) {
1713: case INSERT_VALUES:
1714: case INSERT_ALL_VALUES:
1715: op = MPIU_REPLACE; break;
1716: case ADD_VALUES:
1717: case ADD_ALL_VALUES:
1718: op = MPI_SUM; break;
1719: default:
1720: SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1721: }
1722: VecGetArray(l, &lArray);
1723: VecGetArray(g, &gArray);
1724: PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);
1725: VecRestoreArray(l, &lArray);
1726: VecRestoreArray(g, &gArray);
1727: } else {
1728: (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);
1729: }
1730: return(0);
1731: }
1735: /*@
1736: DMLocalToGlobalEnd - updates global vectors from local vectors
1738: Neighbor-wise Collective on DM
1740: Input Parameters:
1741: + dm - the DM object
1742: . l - the local vector
1743: . mode - INSERT_VALUES or ADD_VALUES
1744: - g - the global vector
1747: Level: beginner
1749: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1751: @*/
1752: PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1753: {
1754: PetscSF sf;
1759: DMGetDefaultSF(dm, &sf);
1760: if (sf) {
1761: MPI_Op op;
1762: PetscScalar *lArray, *gArray;
1764: switch (mode) {
1765: case INSERT_VALUES:
1766: case INSERT_ALL_VALUES:
1767: op = MPIU_REPLACE; break;
1768: case ADD_VALUES:
1769: case ADD_ALL_VALUES:
1770: op = MPI_SUM; break;
1771: default:
1772: SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1773: }
1774: VecGetArray(l, &lArray);
1775: VecGetArray(g, &gArray);
1776: PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);
1777: VecRestoreArray(l, &lArray);
1778: VecRestoreArray(g, &gArray);
1779: } else {
1780: (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);
1781: }
1782: return(0);
1783: }
1787: /*@
1788: DMCoarsen - Coarsens a DM object
1790: Collective on DM
1792: Input Parameter:
1793: + dm - the DM object
1794: - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1796: Output Parameter:
1797: . dmc - the coarsened DM
1799: Level: developer
1801: .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1803: @*/
1804: PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1805: {
1806: PetscErrorCode ierr;
1807: DMCoarsenHookLink link;
1811: (*dm->ops->coarsen)(dm, comm, dmc);
1812: (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1813: PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);
1814: (*dmc)->ctx = dm->ctx;
1815: (*dmc)->levelup = dm->levelup;
1816: (*dmc)->leveldown = dm->leveldown + 1;
1817: DMSetMatType(*dmc,dm->mattype);
1818: for (link=dm->coarsenhook; link; link=link->next) {
1819: if (link->coarsenhook) {(*link->coarsenhook)(dm,*dmc,link->ctx);}
1820: }
1821: return(0);
1822: }
1826: /*@C
1827: DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1829: Logically Collective
1831: Input Arguments:
1832: + fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1833: . coarsenhook - function to run when setting up a coarser level
1834: . restricthook - function to run to update data on coarser levels (once per SNESSolve())
1835: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1837: Calling sequence of coarsenhook:
1838: $ coarsenhook(DM fine,DM coarse,void *ctx);
1840: + fine - fine level DM
1841: . coarse - coarse level DM to restrict problem to
1842: - ctx - optional user-defined function context
1844: Calling sequence for restricthook:
1845: $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1847: + fine - fine level DM
1848: . mrestrict - matrix restricting a fine-level solution to the coarse grid
1849: . rscale - scaling vector for restriction
1850: . inject - matrix restricting by injection
1851: . coarse - coarse level DM to update
1852: - ctx - optional user-defined function context
1854: Level: advanced
1856: Notes:
1857: This function is only needed if auxiliary data needs to be set up on coarse grids.
1859: If this function is called multiple times, the hooks will be run in the order they are added.
1861: In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1862: extract the finest level information from its context (instead of from the SNES).
1864: This function is currently not available from Fortran.
1866: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1867: @*/
1868: PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1869: {
1870: PetscErrorCode ierr;
1871: DMCoarsenHookLink link,*p;
1875: for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1876: PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);
1877: link->coarsenhook = coarsenhook;
1878: link->restricthook = restricthook;
1879: link->ctx = ctx;
1880: link->next = NULL;
1881: *p = link;
1882: return(0);
1883: }
1887: /*@
1888: DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1890: Collective if any hooks are
1892: Input Arguments:
1893: + fine - finer DM to use as a base
1894: . restrct - restriction matrix, apply using MatRestrict()
1895: . inject - injection matrix, also use MatRestrict()
1896: - coarse - coarer DM to update
1898: Level: developer
1900: .seealso: DMCoarsenHookAdd(), MatRestrict()
1901: @*/
1902: PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1903: {
1904: PetscErrorCode ierr;
1905: DMCoarsenHookLink link;
1908: for (link=fine->coarsenhook; link; link=link->next) {
1909: if (link->restricthook) {
1910: (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);
1911: }
1912: }
1913: return(0);
1914: }
1918: /*@C
1919: DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1921: Logically Collective
1923: Input Arguments:
1924: + global - global DM
1925: . ddhook - function to run to pass data to the decomposition DM upon its creation
1926: . restricthook - function to run to update data on block solve (at the beginning of the block solve)
1927: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1930: Calling sequence for ddhook:
1931: $ ddhook(DM global,DM block,void *ctx)
1933: + global - global DM
1934: . block - block DM
1935: - ctx - optional user-defined function context
1937: Calling sequence for restricthook:
1938: $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
1940: + global - global DM
1941: . out - scatter to the outer (with ghost and overlap points) block vector
1942: . in - scatter to block vector values only owned locally
1943: . block - block DM
1944: - ctx - optional user-defined function context
1946: Level: advanced
1948: Notes:
1949: This function is only needed if auxiliary data needs to be set up on subdomain DMs.
1951: If this function is called multiple times, the hooks will be run in the order they are added.
1953: In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1954: extract the global information from its context (instead of from the SNES).
1956: This function is currently not available from Fortran.
1958: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1959: @*/
1960: PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1961: {
1962: PetscErrorCode ierr;
1963: DMSubDomainHookLink link,*p;
1967: for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1968: PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);
1969: link->restricthook = restricthook;
1970: link->ddhook = ddhook;
1971: link->ctx = ctx;
1972: link->next = NULL;
1973: *p = link;
1974: return(0);
1975: }
1979: /*@
1980: DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
1982: Collective if any hooks are
1984: Input Arguments:
1985: + fine - finer DM to use as a base
1986: . oscatter - scatter from domain global vector filling subdomain global vector with overlap
1987: . gscatter - scatter from domain global vector filling subdomain local vector with ghosts
1988: - coarse - coarer DM to update
1990: Level: developer
1992: .seealso: DMCoarsenHookAdd(), MatRestrict()
1993: @*/
1994: PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
1995: {
1996: PetscErrorCode ierr;
1997: DMSubDomainHookLink link;
2000: for (link=global->subdomainhook; link; link=link->next) {
2001: if (link->restricthook) {
2002: (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);
2003: }
2004: }
2005: return(0);
2006: }
2010: /*@
2011: DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2013: Not Collective
2015: Input Parameter:
2016: . dm - the DM object
2018: Output Parameter:
2019: . level - number of coarsenings
2021: Level: developer
2023: .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2025: @*/
2026: PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level)
2027: {
2030: *level = dm->leveldown;
2031: return(0);
2032: }
2038: /*@C
2039: DMRefineHierarchy - Refines a DM object, all levels at once
2041: Collective on DM
2043: Input Parameter:
2044: + dm - the DM object
2045: - nlevels - the number of levels of refinement
2047: Output Parameter:
2048: . dmf - the refined DM hierarchy
2050: Level: developer
2052: .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2054: @*/
2055: PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2056: {
2061: if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2062: if (nlevels == 0) return(0);
2063: if (dm->ops->refinehierarchy) {
2064: (*dm->ops->refinehierarchy)(dm,nlevels,dmf);
2065: } else if (dm->ops->refine) {
2066: PetscInt i;
2068: DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);
2069: for (i=1; i<nlevels; i++) {
2070: DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);
2071: }
2072: } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2073: return(0);
2074: }
2078: /*@C
2079: DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2081: Collective on DM
2083: Input Parameter:
2084: + dm - the DM object
2085: - nlevels - the number of levels of coarsening
2087: Output Parameter:
2088: . dmc - the coarsened DM hierarchy
2090: Level: developer
2092: .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2094: @*/
2095: PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2096: {
2101: if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2102: if (nlevels == 0) return(0);
2104: if (dm->ops->coarsenhierarchy) {
2105: (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);
2106: } else if (dm->ops->coarsen) {
2107: PetscInt i;
2109: DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);
2110: for (i=1; i<nlevels; i++) {
2111: DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);
2112: }
2113: } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2114: return(0);
2115: }
2119: /*@
2120: DMCreateAggregates - Gets the aggregates that map between
2121: grids associated with two DMs.
2123: Collective on DM
2125: Input Parameters:
2126: + dmc - the coarse grid DM
2127: - dmf - the fine grid DM
2129: Output Parameters:
2130: . rest - the restriction matrix (transpose of the projection matrix)
2132: Level: intermediate
2134: .keywords: interpolation, restriction, multigrid
2136: .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2137: @*/
2138: PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2139: {
2145: (*dmc->ops->getaggregates)(dmc, dmf, rest);
2146: return(0);
2147: }
2151: /*@C
2152: DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2154: Not Collective
2156: Input Parameters:
2157: + dm - the DM object
2158: - destroy - the destroy function
2160: Level: intermediate
2162: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2164: @*/
2165: PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2166: {
2169: dm->ctxdestroy = destroy;
2170: return(0);
2171: }
2175: /*@
2176: DMSetApplicationContext - Set a user context into a DM object
2178: Not Collective
2180: Input Parameters:
2181: + dm - the DM object
2182: - ctx - the user context
2184: Level: intermediate
2186: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2188: @*/
2189: PetscErrorCode DMSetApplicationContext(DM dm,void *ctx)
2190: {
2193: dm->ctx = ctx;
2194: return(0);
2195: }
2199: /*@
2200: DMGetApplicationContext - Gets a user context from a DM object
2202: Not Collective
2204: Input Parameter:
2205: . dm - the DM object
2207: Output Parameter:
2208: . ctx - the user context
2210: Level: intermediate
2212: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2214: @*/
2215: PetscErrorCode DMGetApplicationContext(DM dm,void *ctx)
2216: {
2219: *(void**)ctx = dm->ctx;
2220: return(0);
2221: }
2225: /*@C
2226: DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2228: Logically Collective on DM
2230: Input Parameter:
2231: + dm - the DM object
2232: - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2234: Level: intermediate
2236: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2237: DMSetJacobian()
2239: @*/
2240: PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2241: {
2243: dm->ops->computevariablebounds = f;
2244: return(0);
2245: }
2249: /*@
2250: DMHasVariableBounds - does the DM object have a variable bounds function?
2252: Not Collective
2254: Input Parameter:
2255: . dm - the DM object to destroy
2257: Output Parameter:
2258: . flg - PETSC_TRUE if the variable bounds function exists
2260: Level: developer
2262: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2264: @*/
2265: PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg)
2266: {
2268: *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2269: return(0);
2270: }
2274: /*@C
2275: DMComputeVariableBounds - compute variable bounds used by SNESVI.
2277: Logically Collective on DM
2279: Input Parameters:
2280: + dm - the DM object to destroy
2281: - x - current solution at which the bounds are computed
2283: Output parameters:
2284: + xl - lower bound
2285: - xu - upper bound
2287: Level: intermediate
2289: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2291: @*/
2292: PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2293: {
2299: if (dm->ops->computevariablebounds) {
2300: (*dm->ops->computevariablebounds)(dm, xl,xu);
2301: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2302: return(0);
2303: }
2307: /*@
2308: DMHasColoring - does the DM object have a method of providing a coloring?
2310: Not Collective
2312: Input Parameter:
2313: . dm - the DM object
2315: Output Parameter:
2316: . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2318: Level: developer
2320: .seealso DMHasFunction(), DMCreateColoring()
2322: @*/
2323: PetscErrorCode DMHasColoring(DM dm,PetscBool *flg)
2324: {
2326: *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2327: return(0);
2328: }
2330: #undef __FUNCT__
2332: /*@C
2333: DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2335: Collective on DM
2337: Input Parameter:
2338: + dm - the DM object
2339: - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2341: Level: developer
2343: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2345: @*/
2346: PetscErrorCode DMSetVec(DM dm,Vec x)
2347: {
2351: if (x) {
2352: if (!dm->x) {
2353: DMCreateGlobalVector(dm,&dm->x);
2354: }
2355: VecCopy(x,dm->x);
2356: } else if (dm->x) {
2357: VecDestroy(&dm->x);
2358: }
2359: return(0);
2360: }
2362: PetscFunctionList DMList = NULL;
2363: PetscBool DMRegisterAllCalled = PETSC_FALSE;
2367: /*@C
2368: DMSetType - Builds a DM, for a particular DM implementation.
2370: Collective on DM
2372: Input Parameters:
2373: + dm - The DM object
2374: - method - The name of the DM type
2376: Options Database Key:
2377: . -dm_type <type> - Sets the DM type; use -help for a list of available types
2379: Notes:
2380: See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2382: Level: intermediate
2384: .keywords: DM, set, type
2385: .seealso: DMGetType(), DMCreate()
2386: @*/
2387: PetscErrorCode DMSetType(DM dm, DMType method)
2388: {
2389: PetscErrorCode (*r)(DM);
2390: PetscBool match;
2395: PetscObjectTypeCompare((PetscObject) dm, method, &match);
2396: if (match) return(0);
2398: if (!DMRegisterAllCalled) {DMRegisterAll();}
2399: PetscFunctionListFind(DMList,method,&r);
2400: if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2402: if (dm->ops->destroy) {
2403: (*dm->ops->destroy)(dm);
2404: dm->ops->destroy = NULL;
2405: }
2406: (*r)(dm);
2407: PetscObjectChangeTypeName((PetscObject)dm,method);
2408: return(0);
2409: }
2413: /*@C
2414: DMGetType - Gets the DM type name (as a string) from the DM.
2416: Not Collective
2418: Input Parameter:
2419: . dm - The DM
2421: Output Parameter:
2422: . type - The DM type name
2424: Level: intermediate
2426: .keywords: DM, get, type, name
2427: .seealso: DMSetType(), DMCreate()
2428: @*/
2429: PetscErrorCode DMGetType(DM dm, DMType *type)
2430: {
2436: if (!DMRegisterAllCalled) {
2437: DMRegisterAll();
2438: }
2439: *type = ((PetscObject)dm)->type_name;
2440: return(0);
2441: }
2445: /*@C
2446: DMConvert - Converts a DM to another DM, either of the same or different type.
2448: Collective on DM
2450: Input Parameters:
2451: + dm - the DM
2452: - newtype - new DM type (use "same" for the same type)
2454: Output Parameter:
2455: . M - pointer to new DM
2457: Notes:
2458: Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2459: the MPI communicator of the generated DM is always the same as the communicator
2460: of the input DM.
2462: Level: intermediate
2464: .seealso: DMCreate()
2465: @*/
2466: PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2467: {
2468: DM B;
2469: char convname[256];
2470: PetscBool sametype, issame;
2477: PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);
2478: PetscStrcmp(newtype, "same", &issame);
2479: {
2480: PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2482: /*
2483: Order of precedence:
2484: 1) See if a specialized converter is known to the current DM.
2485: 2) See if a specialized converter is known to the desired DM class.
2486: 3) See if a good general converter is registered for the desired class
2487: 4) See if a good general converter is known for the current matrix.
2488: 5) Use a really basic converter.
2489: */
2491: /* 1) See if a specialized converter is known to the current DM and the desired class */
2492: PetscStrcpy(convname,"DMConvert_");
2493: PetscStrcat(convname,((PetscObject) dm)->type_name);
2494: PetscStrcat(convname,"_");
2495: PetscStrcat(convname,newtype);
2496: PetscStrcat(convname,"_C");
2497: PetscObjectQueryFunction((PetscObject)dm,convname,&conv);
2498: if (conv) goto foundconv;
2500: /* 2) See if a specialized converter is known to the desired DM class. */
2501: DMCreate(PetscObjectComm((PetscObject)dm), &B);
2502: DMSetType(B, newtype);
2503: PetscStrcpy(convname,"DMConvert_");
2504: PetscStrcat(convname,((PetscObject) dm)->type_name);
2505: PetscStrcat(convname,"_");
2506: PetscStrcat(convname,newtype);
2507: PetscStrcat(convname,"_C");
2508: PetscObjectQueryFunction((PetscObject)B,convname,&conv);
2509: if (conv) {
2510: DMDestroy(&B);
2511: goto foundconv;
2512: }
2514: #if 0
2515: /* 3) See if a good general converter is registered for the desired class */
2516: conv = B->ops->convertfrom;
2517: DMDestroy(&B);
2518: if (conv) goto foundconv;
2520: /* 4) See if a good general converter is known for the current matrix */
2521: if (dm->ops->convert) {
2522: conv = dm->ops->convert;
2523: }
2524: if (conv) goto foundconv;
2525: #endif
2527: /* 5) Use a really basic converter. */
2528: SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2530: foundconv:
2531: PetscLogEventBegin(DM_Convert,dm,0,0,0);
2532: (*conv)(dm,newtype,M);
2533: PetscLogEventEnd(DM_Convert,dm,0,0,0);
2534: }
2535: PetscObjectStateIncrease((PetscObject) *M);
2536: return(0);
2537: }
2539: /*--------------------------------------------------------------------------------------------------------------------*/
2543: /*@C
2544: DMRegister - Adds a new DM component implementation
2546: Not Collective
2548: Input Parameters:
2549: + name - The name of a new user-defined creation routine
2550: - create_func - The creation routine itself
2552: Notes:
2553: DMRegister() may be called multiple times to add several user-defined DMs
2556: Sample usage:
2557: .vb
2558: DMRegister("my_da", MyDMCreate);
2559: .ve
2561: Then, your DM type can be chosen with the procedural interface via
2562: .vb
2563: DMCreate(MPI_Comm, DM *);
2564: DMSetType(DM,"my_da");
2565: .ve
2566: or at runtime via the option
2567: .vb
2568: -da_type my_da
2569: .ve
2571: Level: advanced
2573: .keywords: DM, register
2574: .seealso: DMRegisterAll(), DMRegisterDestroy()
2576: @*/
2577: PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2578: {
2582: PetscFunctionListAdd(&DMList,sname,function);
2583: return(0);
2584: }
2588: /*@C
2589: DMLoad - Loads a DM that has been stored in binary with DMView().
2591: Collective on PetscViewer
2593: Input Parameters:
2594: + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2595: some related function before a call to DMLoad().
2596: - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2597: HDF5 file viewer, obtained from PetscViewerHDF5Open()
2599: Level: intermediate
2601: Notes:
2602: The type is determined by the data in the file, any type set into the DM before this call is ignored.
2604: Notes for advanced users:
2605: Most users should not need to know the details of the binary storage
2606: format, since DMLoad() and DMView() completely hide these details.
2607: But for anyone who's interested, the standard binary matrix storage
2608: format is
2609: .vb
2610: has not yet been determined
2611: .ve
2613: .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2614: @*/
2615: PetscErrorCode DMLoad(DM newdm, PetscViewer viewer)
2616: {
2618: PetscBool isbinary;
2619: PetscInt classid;
2620: char type[256];
2625: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
2626: if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2628: PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);
2629: if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file");
2630: PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);
2631: DMSetType(newdm, type);
2632: if (newdm->ops->load) {
2633: (*newdm->ops->load)(newdm,viewer);
2634: }
2635: return(0);
2636: }
2638: /******************************** FEM Support **********************************/
2642: PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2643: {
2644: PetscInt f;
2648: PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);
2649: for (f = 0; f < len; ++f) {
2650: PetscPrintf(PETSC_COMM_SELF, " | %G |\n", PetscRealPart(x[f]));
2651: }
2652: return(0);
2653: }
2657: PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2658: {
2659: PetscInt f, g;
2663: PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);
2664: for (f = 0; f < rows; ++f) {
2665: PetscPrintf(PETSC_COMM_SELF, " |");
2666: for (g = 0; g < cols; ++g) {
2667: PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));
2668: }
2669: PetscPrintf(PETSC_COMM_SELF, " |\n");
2670: }
2671: return(0);
2672: }
2676: /*@
2677: DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2679: Input Parameter:
2680: . dm - The DM
2682: Output Parameter:
2683: . section - The PetscSection
2685: Level: intermediate
2687: Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2689: .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2690: @*/
2691: PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2692: {
2696: *section = dm->defaultSection;
2697: return(0);
2698: }
2702: /*@
2703: DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2705: Input Parameters:
2706: + dm - The DM
2707: - section - The PetscSection
2709: Level: intermediate
2711: Note: Any existing Section will be destroyed
2713: .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2714: @*/
2715: PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2716: {
2717: PetscInt numFields;
2718: PetscInt f;
2724: PetscObjectReference((PetscObject)section);
2725: PetscSectionDestroy(&dm->defaultSection);
2726: dm->defaultSection = section;
2727: PetscSectionGetNumFields(dm->defaultSection, &numFields);
2728: if (numFields) {
2729: DMSetNumFields(dm, numFields);
2730: for (f = 0; f < numFields; ++f) {
2731: const char *name;
2733: PetscSectionGetFieldName(dm->defaultSection, f, &name);
2734: PetscObjectSetName(dm->fields[f], name);
2735: }
2736: }
2737: /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2738: PetscSectionDestroy(&dm->defaultGlobalSection);
2739: return(0);
2740: }
2744: /*@
2745: DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2747: Collective on DM
2749: Input Parameter:
2750: . dm - The DM
2752: Output Parameter:
2753: . section - The PetscSection
2755: Level: intermediate
2757: Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2759: .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2760: @*/
2761: PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2762: {
2768: if (!dm->defaultGlobalSection) {
2769: if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2770: PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);
2771: PetscLayoutDestroy(&dm->map);
2772: PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);
2773: }
2774: *section = dm->defaultGlobalSection;
2775: return(0);
2776: }
2780: /*@
2781: DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2783: Input Parameters:
2784: + dm - The DM
2785: - section - The PetscSection, or NULL
2787: Level: intermediate
2789: Note: Any existing Section will be destroyed
2791: .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2792: @*/
2793: PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2794: {
2800: PetscObjectReference((PetscObject)section);
2801: PetscSectionDestroy(&dm->defaultGlobalSection);
2802: dm->defaultGlobalSection = section;
2803: return(0);
2804: }
2808: /*@
2809: DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2810: it is created from the default PetscSection layouts in the DM.
2812: Input Parameter:
2813: . dm - The DM
2815: Output Parameter:
2816: . sf - The PetscSF
2818: Level: intermediate
2820: Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2822: .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2823: @*/
2824: PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2825: {
2826: PetscInt nroots;
2832: PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);
2833: if (nroots < 0) {
2834: PetscSection section, gSection;
2836: DMGetDefaultSection(dm, §ion);
2837: if (section) {
2838: DMGetDefaultGlobalSection(dm, &gSection);
2839: DMCreateDefaultSF(dm, section, gSection);
2840: } else {
2841: *sf = NULL;
2842: return(0);
2843: }
2844: }
2845: *sf = dm->defaultSF;
2846: return(0);
2847: }
2851: /*@
2852: DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2854: Input Parameters:
2855: + dm - The DM
2856: - sf - The PetscSF
2858: Level: intermediate
2860: Note: Any previous SF is destroyed
2862: .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2863: @*/
2864: PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
2865: {
2871: PetscSFDestroy(&dm->defaultSF);
2872: dm->defaultSF = sf;
2873: return(0);
2874: }
2878: /*@C
2879: DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2880: describing the data layout.
2882: Input Parameters:
2883: + dm - The DM
2884: . localSection - PetscSection describing the local data layout
2885: - globalSection - PetscSection describing the global data layout
2887: Level: intermediate
2889: .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2890: @*/
2891: PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2892: {
2893: MPI_Comm comm;
2894: PetscLayout layout;
2895: const PetscInt *ranges;
2896: PetscInt *local;
2897: PetscSFNode *remote;
2898: PetscInt pStart, pEnd, p, nroots, nleaves, l;
2899: PetscMPIInt size, rank;
2903: PetscObjectGetComm((PetscObject)dm,&comm);
2905: MPI_Comm_size(comm, &size);
2906: MPI_Comm_rank(comm, &rank);
2907: PetscSectionGetChart(globalSection, &pStart, &pEnd);
2908: PetscSectionGetConstrainedStorageSize(globalSection, &nroots);
2909: PetscLayoutCreate(comm, &layout);
2910: PetscLayoutSetBlockSize(layout, 1);
2911: PetscLayoutSetLocalSize(layout, nroots);
2912: PetscLayoutSetUp(layout);
2913: PetscLayoutGetRanges(layout, &ranges);
2914: for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2915: PetscInt gdof, gcdof;
2917: PetscSectionGetDof(globalSection, p, &gdof);
2918: PetscSectionGetConstraintDof(globalSection, p, &gcdof);
2919: nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2920: }
2921: PetscMalloc(nleaves * sizeof(PetscInt), &local);
2922: PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);
2923: for (p = pStart, l = 0; p < pEnd; ++p) {
2924: const PetscInt *cind;
2925: PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2927: PetscSectionGetDof(localSection, p, &dof);
2928: PetscSectionGetOffset(localSection, p, &off);
2929: PetscSectionGetConstraintDof(localSection, p, &cdof);
2930: PetscSectionGetConstraintIndices(localSection, p, &cind);
2931: PetscSectionGetDof(globalSection, p, &gdof);
2932: PetscSectionGetConstraintDof(globalSection, p, &gcdof);
2933: PetscSectionGetOffset(globalSection, p, &goff);
2934: if (!gdof) continue; /* Censored point */
2935: gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2936: if (gsize != dof-cdof) {
2937: if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2938: cdof = 0; /* Ignore constraints */
2939: }
2940: for (d = 0, c = 0; d < dof; ++d) {
2941: if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2942: local[l+d-c] = off+d;
2943: }
2944: if (gdof < 0) {
2945: for (d = 0; d < gsize; ++d, ++l) {
2946: PetscInt offset = -(goff+1) + d, r;
2948: PetscFindInt(offset,size,ranges,&r);
2949: if (r < 0) r = -(r+2);
2950: remote[l].rank = r;
2951: remote[l].index = offset - ranges[r];
2952: }
2953: } else {
2954: for (d = 0; d < gsize; ++d, ++l) {
2955: remote[l].rank = rank;
2956: remote[l].index = goff+d - ranges[rank];
2957: }
2958: }
2959: }
2960: if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2961: PetscLayoutDestroy(&layout);
2962: PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);
2963: return(0);
2964: }
2968: /*@
2969: DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2971: Input Parameter:
2972: . dm - The DM
2974: Output Parameter:
2975: . sf - The PetscSF
2977: Level: intermediate
2979: Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2981: .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2982: @*/
2983: PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
2984: {
2988: *sf = dm->sf;
2989: return(0);
2990: }
2994: /*@
2995: DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
2997: Input Parameters:
2998: + dm - The DM
2999: - sf - The PetscSF
3001: Level: intermediate
3003: .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3004: @*/
3005: PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3006: {
3012: PetscSFDestroy(&dm->sf);
3013: PetscObjectReference((PetscObject) sf);
3014: dm->sf = sf;
3015: return(0);
3016: }
3020: PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3021: {
3025: *numFields = dm->numFields;
3026: return(0);
3027: }
3031: PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3032: {
3033: PetscInt f;
3038: for (f = 0; f < dm->numFields; ++f) {
3039: PetscObjectDestroy(&dm->fields[f]);
3040: }
3041: PetscFree(dm->fields);
3042: dm->numFields = numFields;
3043: PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);
3044: for (f = 0; f < dm->numFields; ++f) {
3045: PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);
3046: }
3047: return(0);
3048: }
3052: PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3053: {
3057: if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3058: if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3059: *field = dm->fields[f];
3060: return(0);
3061: }
3065: /*@
3066: DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3068: Collective on DM
3070: Input Parameters:
3071: + dm - the DM
3072: - c - coordinate vector
3074: Note:
3075: The coordinates do include those for ghost points, which are in the local vector
3077: Level: intermediate
3079: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3080: .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3081: @*/
3082: PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3083: {
3089: PetscObjectReference((PetscObject) c);
3090: VecDestroy(&dm->coordinates);
3091: dm->coordinates = c;
3092: VecDestroy(&dm->coordinatesLocal);
3093: return(0);
3094: }
3098: /*@
3099: DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3101: Collective on DM
3103: Input Parameters:
3104: + dm - the DM
3105: - c - coordinate vector
3107: Note:
3108: The coordinates of ghost points can be set using DMSetCoordinates()
3109: followed by DMGetCoordinatesLocal(). This is intended to enable the
3110: setting of ghost coordinates outside of the domain.
3112: Level: intermediate
3114: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3115: .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3116: @*/
3117: PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3118: {
3124: PetscObjectReference((PetscObject) c);
3125: VecDestroy(&dm->coordinatesLocal);
3127: dm->coordinatesLocal = c;
3129: VecDestroy(&dm->coordinates);
3130: return(0);
3131: }
3135: /*@
3136: DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3138: Not Collective
3140: Input Parameter:
3141: . dm - the DM
3143: Output Parameter:
3144: . c - global coordinate vector
3146: Note:
3147: This is a borrowed reference, so the user should NOT destroy this vector
3149: Each process has only the local coordinates (does NOT have the ghost coordinates).
3151: For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3152: and (x_0,y_0,z_0,x_1,y_1,z_1...)
3154: Level: intermediate
3156: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3157: .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3158: @*/
3159: PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3160: {
3166: if (!dm->coordinates && dm->coordinatesLocal) {
3167: DM cdm = NULL;
3169: DMGetCoordinateDM(dm, &cdm);
3170: DMCreateGlobalVector(cdm, &dm->coordinates);
3171: PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");
3172: DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);
3173: DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);
3174: }
3175: *c = dm->coordinates;
3176: return(0);
3177: }
3181: /*@
3182: DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3184: Collective on DM
3186: Input Parameter:
3187: . dm - the DM
3189: Output Parameter:
3190: . c - coordinate vector
3192: Note:
3193: This is a borrowed reference, so the user should NOT destroy this vector
3195: Each process has the local and ghost coordinates
3197: For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3198: and (x_0,y_0,z_0,x_1,y_1,z_1...)
3200: Level: intermediate
3202: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3203: .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3204: @*/
3205: PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3206: {
3212: if (!dm->coordinatesLocal && dm->coordinates) {
3213: DM cdm = NULL;
3215: DMGetCoordinateDM(dm, &cdm);
3216: DMCreateLocalVector(cdm, &dm->coordinatesLocal);
3217: PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");
3218: DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);
3219: DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);
3220: }
3221: *c = dm->coordinatesLocal;
3222: return(0);
3223: }
3227: /*@
3228: DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3230: Collective on DM
3232: Input Parameter:
3233: . dm - the DM
3235: Output Parameter:
3236: . cdm - coordinate DM
3238: Level: intermediate
3240: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3241: .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3242: @*/
3243: PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3244: {
3250: if (!dm->coordinateDM) {
3251: if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3252: (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);
3253: }
3254: *cdm = dm->coordinateDM;
3255: return(0);
3256: }
3260: /*@
3261: DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3263: Not collective
3265: Input Parameters:
3266: + dm - The DM
3267: - v - The Vec of points
3269: Output Parameter:
3270: . cells - The local cell numbers for cells which contain the points
3272: Level: developer
3274: .keywords: point location, mesh
3275: .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3276: @*/
3277: PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3278: {
3285: if (dm->ops->locatepoints) {
3286: (*dm->ops->locatepoints)(dm,v,cells);
3287: } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3288: return(0);
3289: }