Actual source code: dm.c
petsc-3.5.2 2014-09-08
1: #include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/
2: #include <petscsf.h>
3: #include <petscds.h>
5: PetscClassId DM_CLASSID;
6: PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;
8: const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DM_BOUNDARY_",0};
12: /*@
13: DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
15: If you never call DMSetType() it will generate an
16: error when you try to use the vector.
18: Collective on MPI_Comm
20: Input Parameter:
21: . comm - The communicator for the DM object
23: Output Parameter:
24: . dm - The DM object
26: Level: beginner
28: .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
29: @*/
30: PetscErrorCode DMCreate(MPI_Comm comm,DM *dm)
31: {
32: DM v;
37: *dm = NULL;
38: PetscSysInitializePackage();
39: VecInitializePackage();
40: MatInitializePackage();
41: DMInitializePackage();
43: PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);
44: PetscMemzero(v->ops, sizeof(struct _DMOps));
47: v->ltogmap = NULL;
48: v->bs = 1;
49: v->coloringtype = IS_COLORING_GLOBAL;
50: PetscSFCreate(comm, &v->sf);
51: PetscSFCreate(comm, &v->defaultSF);
52: v->defaultSection = NULL;
53: v->defaultGlobalSection = NULL;
54: v->L = NULL;
55: v->maxCell = NULL;
56: {
57: PetscInt i;
58: for (i = 0; i < 10; ++i) {
59: v->nullspaceConstructors[i] = NULL;
60: }
61: }
62: PetscDSCreate(comm, &v->prob);
63: v->dmBC = NULL;
64: v->outputSequenceNum = -1;
65: v->outputSequenceVal = 0.0;
66: DMSetVecType(v,VECSTANDARD);
67: DMSetMatType(v,MATAIJ);
68: *dm = v;
69: return(0);
70: }
74: /*@
75: DMClone - Creates a DM object with the same topology as the original.
77: Collective on MPI_Comm
79: Input Parameter:
80: . dm - The original DM object
82: Output Parameter:
83: . newdm - The new DM object
85: Level: beginner
87: .keywords: DM, topology, create
88: @*/
89: PetscErrorCode DMClone(DM dm, DM *newdm)
90: {
91: PetscSF sf;
92: Vec coords;
93: void *ctx;
99: DMCreate(PetscObjectComm((PetscObject)dm), newdm);
100: if (dm->ops->clone) {
101: (*dm->ops->clone)(dm, newdm);
102: }
103: (*newdm)->setupcalled = PETSC_TRUE;
104: DMGetPointSF(dm, &sf);
105: DMSetPointSF(*newdm, sf);
106: DMGetApplicationContext(dm, &ctx);
107: DMSetApplicationContext(*newdm, ctx);
108: DMGetCoordinatesLocal(dm, &coords);
109: if (coords) {
110: DMSetCoordinatesLocal(*newdm, coords);
111: } else {
112: DMGetCoordinates(dm, &coords);
113: if (coords) {DMSetCoordinates(*newdm, coords);}
114: }
115: if (dm->maxCell) {
116: const PetscReal *maxCell, *L;
117: DMGetPeriodicity(dm, &maxCell, &L);
118: DMSetPeriodicity(*newdm, maxCell, L);
119: }
120: return(0);
121: }
125: /*@C
126: DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
128: Logically Collective on DMDA
130: Input Parameter:
131: + da - initial distributed array
132: . ctype - the vector type, currently either VECSTANDARD or VECCUSP
134: Options Database:
135: . -dm_vec_type ctype
137: Level: intermediate
139: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType()
140: @*/
141: PetscErrorCode DMSetVecType(DM da,VecType ctype)
142: {
147: PetscFree(da->vectype);
148: PetscStrallocpy(ctype,(char**)&da->vectype);
149: return(0);
150: }
154: /*@C
155: DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
157: Logically Collective on DMDA
159: Input Parameter:
160: . da - initial distributed array
162: Output Parameter:
163: . ctype - the vector type
165: Level: intermediate
167: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
168: @*/
169: PetscErrorCode DMGetVecType(DM da,VecType *ctype)
170: {
173: *ctype = da->vectype;
174: return(0);
175: }
179: /*@
180: VecGetDM - Gets the DM defining the data layout of the vector
182: Not collective
184: Input Parameter:
185: . v - The Vec
187: Output Parameter:
188: . dm - The DM
190: Level: intermediate
192: .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
193: @*/
194: PetscErrorCode VecGetDM(Vec v, DM *dm)
195: {
201: PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);
202: return(0);
203: }
207: /*@
208: VecSetDM - Sets the DM defining the data layout of the vector
210: Not collective
212: Input Parameters:
213: + v - The Vec
214: - dm - The DM
216: Level: intermediate
218: .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
219: @*/
220: PetscErrorCode VecSetDM(Vec v, DM dm)
221: {
227: PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);
228: return(0);
229: }
233: /*@C
234: DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
236: Logically Collective on DM
238: Input Parameter:
239: + dm - the DM context
240: . ctype - the matrix type
242: Options Database:
243: . -dm_mat_type ctype
245: Level: intermediate
247: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType()
248: @*/
249: PetscErrorCode DMSetMatType(DM dm,MatType ctype)
250: {
255: PetscFree(dm->mattype);
256: PetscStrallocpy(ctype,(char**)&dm->mattype);
257: return(0);
258: }
262: /*@C
263: DMGetMatType - Gets the type of matrix created with DMCreateMatrix()
265: Logically Collective on DM
267: Input Parameter:
268: . dm - the DM context
270: Output Parameter:
271: . ctype - the matrix type
273: Options Database:
274: . -dm_mat_type ctype
276: Level: intermediate
278: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType()
279: @*/
280: PetscErrorCode DMGetMatType(DM dm,MatType *ctype)
281: {
284: *ctype = dm->mattype;
285: return(0);
286: }
290: /*@
291: MatGetDM - Gets the DM defining the data layout of the matrix
293: Not collective
295: Input Parameter:
296: . A - The Mat
298: Output Parameter:
299: . dm - The DM
301: Level: intermediate
303: .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
304: @*/
305: PetscErrorCode MatGetDM(Mat A, DM *dm)
306: {
312: PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);
313: return(0);
314: }
318: /*@
319: MatSetDM - Sets the DM defining the data layout of the matrix
321: Not collective
323: Input Parameters:
324: + A - The Mat
325: - dm - The DM
327: Level: intermediate
329: .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
330: @*/
331: PetscErrorCode MatSetDM(Mat A, DM dm)
332: {
338: PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);
339: return(0);
340: }
344: /*@C
345: DMSetOptionsPrefix - Sets the prefix used for searching for all
346: DMDA options in the database.
348: Logically Collective on DMDA
350: Input Parameter:
351: + da - the DMDA context
352: - prefix - the prefix to prepend to all option names
354: Notes:
355: A hyphen (-) must NOT be given at the beginning of the prefix name.
356: The first character of all runtime options is AUTOMATICALLY the hyphen.
358: Level: advanced
360: .keywords: DMDA, set, options, prefix, database
362: .seealso: DMSetFromOptions()
363: @*/
364: PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[])
365: {
370: PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);
371: return(0);
372: }
376: /*@
377: DMDestroy - Destroys a vector packer or DMDA.
379: Collective on DM
381: Input Parameter:
382: . dm - the DM object to destroy
384: Level: developer
386: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
388: @*/
389: PetscErrorCode DMDestroy(DM *dm)
390: {
391: PetscInt i, cnt = 0, Nf = 0, f;
392: DMNamedVecLink nlink,nnext;
396: if (!*dm) return(0);
399: if ((*dm)->prob) {
400: PetscObject disc;
402: /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */
403: PetscDSGetNumFields((*dm)->prob, &Nf);
404: for (f = 0; f < Nf; ++f) {
405: PetscDSGetDiscretization((*dm)->prob, f, &disc);
406: PetscObjectCompose(disc, "pmat", NULL);
407: PetscObjectCompose(disc, "nullspace", NULL);
408: PetscObjectCompose(disc, "nearnullspace", NULL);
409: }
410: }
411: /* count all the circular references of DM and its contained Vecs */
412: for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
413: if ((*dm)->localin[i]) cnt++;
414: if ((*dm)->globalin[i]) cnt++;
415: }
416: for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
417: for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
418: if ((*dm)->x) {
419: DM obj;
420: VecGetDM((*dm)->x, &obj);
421: if (obj == *dm) cnt++;
422: }
424: if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; return(0);}
425: /*
426: Need this test because the dm references the vectors that
427: reference the dm, so destroying the dm calls destroy on the
428: vectors that cause another destroy on the dm
429: */
430: if (((PetscObject)(*dm))->refct < 0) return(0);
431: ((PetscObject) (*dm))->refct = 0;
432: for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
433: if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
434: VecDestroy(&(*dm)->localin[i]);
435: }
436: nnext=(*dm)->namedglobal;
437: (*dm)->namedglobal = NULL;
438: for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named vectors */
439: nnext = nlink->next;
440: if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
441: PetscFree(nlink->name);
442: VecDestroy(&nlink->X);
443: PetscFree(nlink);
444: }
445: nnext=(*dm)->namedlocal;
446: (*dm)->namedlocal = NULL;
447: for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named local vectors */
448: nnext = nlink->next;
449: if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
450: PetscFree(nlink->name);
451: VecDestroy(&nlink->X);
452: PetscFree(nlink);
453: }
455: /* Destroy the list of hooks */
456: {
457: DMCoarsenHookLink link,next;
458: for (link=(*dm)->coarsenhook; link; link=next) {
459: next = link->next;
460: PetscFree(link);
461: }
462: (*dm)->coarsenhook = NULL;
463: }
464: {
465: DMRefineHookLink link,next;
466: for (link=(*dm)->refinehook; link; link=next) {
467: next = link->next;
468: PetscFree(link);
469: }
470: (*dm)->refinehook = NULL;
471: }
472: {
473: DMSubDomainHookLink link,next;
474: for (link=(*dm)->subdomainhook; link; link=next) {
475: next = link->next;
476: PetscFree(link);
477: }
478: (*dm)->subdomainhook = NULL;
479: }
480: {
481: DMGlobalToLocalHookLink link,next;
482: for (link=(*dm)->gtolhook; link; link=next) {
483: next = link->next;
484: PetscFree(link);
485: }
486: (*dm)->gtolhook = NULL;
487: }
488: /* Destroy the work arrays */
489: {
490: DMWorkLink link,next;
491: if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
492: for (link=(*dm)->workin; link; link=next) {
493: next = link->next;
494: PetscFree(link->mem);
495: PetscFree(link);
496: }
497: (*dm)->workin = NULL;
498: }
500: PetscObjectDestroy(&(*dm)->dmksp);
501: PetscObjectDestroy(&(*dm)->dmsnes);
502: PetscObjectDestroy(&(*dm)->dmts);
504: if ((*dm)->ctx && (*dm)->ctxdestroy) {
505: (*(*dm)->ctxdestroy)(&(*dm)->ctx);
506: }
507: VecDestroy(&(*dm)->x);
508: MatFDColoringDestroy(&(*dm)->fd);
509: DMClearGlobalVectors(*dm);
510: ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);
511: PetscFree((*dm)->vectype);
512: PetscFree((*dm)->mattype);
514: PetscSectionDestroy(&(*dm)->defaultSection);
515: PetscSectionDestroy(&(*dm)->defaultGlobalSection);
516: PetscLayoutDestroy(&(*dm)->map);
517: PetscSFDestroy(&(*dm)->sf);
518: PetscSFDestroy(&(*dm)->defaultSF);
520: DMDestroy(&(*dm)->coordinateDM);
521: VecDestroy(&(*dm)->coordinates);
522: VecDestroy(&(*dm)->coordinatesLocal);
523: PetscFree((*dm)->maxCell);
524: PetscFree((*dm)->L);
526: PetscDSDestroy(&(*dm)->prob);
527: DMDestroy(&(*dm)->dmBC);
528: /* if memory was published with SAWs then destroy it */
529: PetscObjectSAWsViewOff((PetscObject)*dm);
531: (*(*dm)->ops->destroy)(*dm);
532: /* We do not destroy (*dm)->data here so that we can reference count backend objects */
533: PetscHeaderDestroy(dm);
534: return(0);
535: }
539: /*@
540: DMSetUp - sets up the data structures inside a DM object
542: Collective on DM
544: Input Parameter:
545: . dm - the DM object to setup
547: Level: developer
549: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
551: @*/
552: PetscErrorCode DMSetUp(DM dm)
553: {
558: if (dm->setupcalled) return(0);
559: if (dm->ops->setup) {
560: (*dm->ops->setup)(dm);
561: }
562: dm->setupcalled = PETSC_TRUE;
563: return(0);
564: }
568: /*@
569: DMSetFromOptions - sets parameters in a DM from the options database
571: Collective on DM
573: Input Parameter:
574: . dm - the DM object to set options for
576: Options Database:
577: + -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
578: . -dm_vec_type <type> type of vector to create inside DM
579: . -dm_mat_type <type> type of matrix to create inside DM
580: - -dm_coloring_type <global or ghosted>
582: Level: developer
584: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
586: @*/
587: PetscErrorCode DMSetFromOptions(DM dm)
588: {
589: char typeName[256];
590: PetscBool flg;
595: PetscObjectOptionsBegin((PetscObject)dm);
596: PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);
597: PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);
598: if (flg) {
599: DMSetVecType(dm,typeName);
600: }
601: PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);
602: if (flg) {
603: DMSetMatType(dm,typeName);
604: }
605: PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);
606: if (dm->ops->setfromoptions) {
607: (*dm->ops->setfromoptions)(dm);
608: }
609: /* process any options handlers added with PetscObjectAddOptionsHandler() */
610: PetscObjectProcessOptionsHandlers((PetscObject) dm);
611: PetscOptionsEnd();
612: return(0);
613: }
617: /*@C
618: DMView - Views a vector packer or DMDA.
620: Collective on DM
622: Input Parameter:
623: + dm - the DM object to view
624: - v - the viewer
626: Level: developer
628: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
630: @*/
631: PetscErrorCode DMView(DM dm,PetscViewer v)
632: {
634: PetscBool isbinary;
638: if (!v) {
639: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);
640: }
641: PetscObjectPrintClassNamePrefixType((PetscObject)dm,v);
642: PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);
643: if (isbinary) {
644: PetscInt classid = DM_FILE_CLASSID;
645: char type[256];
647: PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);
648: PetscStrncpy(type,((PetscObject)dm)->type_name,256);
649: PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);
650: }
651: if (dm->ops->view) {
652: (*dm->ops->view)(dm,v);
653: }
654: return(0);
655: }
659: /*@
660: DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
662: Collective on DM
664: Input Parameter:
665: . dm - the DM object
667: Output Parameter:
668: . vec - the global vector
670: Level: beginner
672: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
674: @*/
675: PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec)
676: {
681: (*dm->ops->createglobalvector)(dm,vec);
682: return(0);
683: }
687: /*@
688: DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
690: Not Collective
692: Input Parameter:
693: . dm - the DM object
695: Output Parameter:
696: . vec - the local vector
698: Level: beginner
700: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
702: @*/
703: PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec)
704: {
709: (*dm->ops->createlocalvector)(dm,vec);
710: return(0);
711: }
715: /*@
716: DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
718: Collective on DM
720: Input Parameter:
721: . dm - the DM that provides the mapping
723: Output Parameter:
724: . ltog - the mapping
726: Level: intermediate
728: Notes:
729: This mapping can then be used by VecSetLocalToGlobalMapping() or
730: MatSetLocalToGlobalMapping().
732: .seealso: DMCreateLocalVector()
733: @*/
734: PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
735: {
741: if (!dm->ltogmap) {
742: PetscSection section, sectionGlobal;
744: DMGetDefaultSection(dm, §ion);
745: if (section) {
746: PetscInt *ltog;
747: PetscInt pStart, pEnd, size, p, l;
749: DMGetDefaultGlobalSection(dm, §ionGlobal);
750: PetscSectionGetChart(section, &pStart, &pEnd);
751: PetscSectionGetStorageSize(section, &size);
752: PetscMalloc1(size, <og); /* We want the local+overlap size */
753: for (p = pStart, l = 0; p < pEnd; ++p) {
754: PetscInt dof, off, c;
756: /* Should probably use constrained dofs */
757: PetscSectionGetDof(section, p, &dof);
758: PetscSectionGetOffset(sectionGlobal, p, &off);
759: for (c = 0; c < dof; ++c, ++l) {
760: ltog[l] = off+c;
761: }
762: }
763: ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, 1,size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);
764: PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);
765: } else {
766: if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
767: (*dm->ops->getlocaltoglobalmapping)(dm);
768: }
769: }
770: *ltog = dm->ltogmap;
771: return(0);
772: }
776: /*@
777: DMGetBlockSize - Gets the inherent block size associated with a DM
779: Not Collective
781: Input Parameter:
782: . dm - the DM with block structure
784: Output Parameter:
785: . bs - the block size, 1 implies no exploitable block structure
787: Level: intermediate
789: .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMapping()
790: @*/
791: PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs)
792: {
796: if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
797: *bs = dm->bs;
798: return(0);
799: }
803: /*@
804: DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
806: Collective on DM
808: Input Parameter:
809: + dm1 - the DM object
810: - dm2 - the second, finer DM object
812: Output Parameter:
813: + mat - the interpolation
814: - vec - the scaling (optional)
816: Level: developer
818: Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
819: DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
821: For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
822: EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
825: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
827: @*/
828: PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
829: {
835: (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);
836: return(0);
837: }
841: /*@
842: DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
844: Collective on DM
846: Input Parameter:
847: + dm1 - the DM object
848: - dm2 - the second, finer DM object
850: Output Parameter:
851: . ctx - the injection
853: Level: developer
855: Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
856: DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
858: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
860: @*/
861: PetscErrorCode DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
862: {
868: (*dm1->ops->getinjection)(dm1,dm2,ctx);
869: return(0);
870: }
874: /*@
875: DMCreateColoring - Gets coloring for a DM
877: Collective on DM
879: Input Parameter:
880: + dm - the DM object
881: - ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
883: Output Parameter:
884: . coloring - the coloring
886: Level: developer
888: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()
890: @*/
891: PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
892: {
897: if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
898: (*dm->ops->getcoloring)(dm,ctype,coloring);
899: return(0);
900: }
904: /*@
905: DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
907: Collective on DM
909: Input Parameter:
910: . dm - the DM object
912: Output Parameter:
913: . mat - the empty Jacobian
915: Level: beginner
917: Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
918: do not need to do it yourself.
920: By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
921: the nonzero pattern call DMDASetMatPreallocateOnly()
923: For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
924: internally by PETSc.
926: For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
927: the indices for the global numbering for DMDAs which is complicated.
929: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType()
931: @*/
932: PetscErrorCode DMCreateMatrix(DM dm,Mat *mat)
933: {
938: MatInitializePackage();
941: (*dm->ops->creatematrix)(dm,mat);
942: return(0);
943: }
947: /*@
948: DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
949: preallocated but the nonzero structure and zero values will not be set.
951: Logically Collective on DMDA
953: Input Parameter:
954: + dm - the DM
955: - only - PETSC_TRUE if only want preallocation
957: Level: developer
958: .seealso DMCreateMatrix()
959: @*/
960: PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
961: {
964: dm->prealloc_only = only;
965: return(0);
966: }
970: /*@C
971: DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
973: Not Collective
975: Input Parameters:
976: + dm - the DM object
977: . count - The minium size
978: - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
980: Output Parameter:
981: . array - the work array
983: Level: developer
985: .seealso DMDestroy(), DMCreate()
986: @*/
987: PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
988: {
990: DMWorkLink link;
991: size_t size;
996: if (dm->workin) {
997: link = dm->workin;
998: dm->workin = dm->workin->next;
999: } else {
1000: PetscNewLog(dm,&link);
1001: }
1002: PetscDataTypeGetSize(dtype,&size);
1003: if (size*count > link->bytes) {
1004: PetscFree(link->mem);
1005: PetscMalloc(size*count,&link->mem);
1006: link->bytes = size*count;
1007: }
1008: link->next = dm->workout;
1009: dm->workout = link;
1010: *(void**)mem = link->mem;
1011: return(0);
1012: }
1016: /*@C
1017: DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1019: Not Collective
1021: Input Parameters:
1022: + dm - the DM object
1023: . count - The minium size
1024: - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1026: Output Parameter:
1027: . array - the work array
1029: Level: developer
1031: .seealso DMDestroy(), DMCreate()
1032: @*/
1033: PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1034: {
1035: DMWorkLink *p,link;
1040: for (p=&dm->workout; (link=*p); p=&link->next) {
1041: if (link->mem == *(void**)mem) {
1042: *p = link->next;
1043: link->next = dm->workin;
1044: dm->workin = link;
1045: *(void**)mem = NULL;
1046: return(0);
1047: }
1048: }
1049: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1050: return(0);
1051: }
1055: PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1056: {
1059: if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1060: dm->nullspaceConstructors[field] = nullsp;
1061: return(0);
1062: }
1066: /*@C
1067: DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1069: Not collective
1071: Input Parameter:
1072: . dm - the DM object
1074: Output Parameters:
1075: + numFields - The number of fields (or NULL if not requested)
1076: . fieldNames - The name for each field (or NULL if not requested)
1077: - fields - The global indices for each field (or NULL if not requested)
1079: Level: intermediate
1081: Notes:
1082: The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1083: PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1084: PetscFree().
1086: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1087: @*/
1088: PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1089: {
1090: PetscSection section, sectionGlobal;
1095: if (numFields) {
1097: *numFields = 0;
1098: }
1099: if (fieldNames) {
1101: *fieldNames = NULL;
1102: }
1103: if (fields) {
1105: *fields = NULL;
1106: }
1107: DMGetDefaultSection(dm, §ion);
1108: if (section) {
1109: PetscInt *fieldSizes, **fieldIndices;
1110: PetscInt nF, f, pStart, pEnd, p;
1112: DMGetDefaultGlobalSection(dm, §ionGlobal);
1113: PetscSectionGetNumFields(section, &nF);
1114: PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);
1115: PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);
1116: for (f = 0; f < nF; ++f) {
1117: fieldSizes[f] = 0;
1118: }
1119: for (p = pStart; p < pEnd; ++p) {
1120: PetscInt gdof;
1122: PetscSectionGetDof(sectionGlobal, p, &gdof);
1123: if (gdof > 0) {
1124: for (f = 0; f < nF; ++f) {
1125: PetscInt fdof, fcdof;
1127: PetscSectionGetFieldDof(section, p, f, &fdof);
1128: PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);
1129: fieldSizes[f] += fdof-fcdof;
1130: }
1131: }
1132: }
1133: for (f = 0; f < nF; ++f) {
1134: PetscMalloc1(fieldSizes[f], &fieldIndices[f]);
1135: fieldSizes[f] = 0;
1136: }
1137: for (p = pStart; p < pEnd; ++p) {
1138: PetscInt gdof, goff;
1140: PetscSectionGetDof(sectionGlobal, p, &gdof);
1141: if (gdof > 0) {
1142: PetscSectionGetOffset(sectionGlobal, p, &goff);
1143: for (f = 0; f < nF; ++f) {
1144: PetscInt fdof, fcdof, fc;
1146: PetscSectionGetFieldDof(section, p, f, &fdof);
1147: PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);
1148: for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1149: fieldIndices[f][fieldSizes[f]] = goff++;
1150: }
1151: }
1152: }
1153: }
1154: if (numFields) *numFields = nF;
1155: if (fieldNames) {
1156: PetscMalloc1(nF, fieldNames);
1157: for (f = 0; f < nF; ++f) {
1158: const char *fieldName;
1160: PetscSectionGetFieldName(section, f, &fieldName);
1161: PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);
1162: }
1163: }
1164: if (fields) {
1165: PetscMalloc1(nF, fields);
1166: for (f = 0; f < nF; ++f) {
1167: ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);
1168: }
1169: }
1170: PetscFree2(fieldSizes,fieldIndices);
1171: } else if (dm->ops->createfieldis) {
1172: (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);
1173: }
1174: return(0);
1175: }
1180: /*@C
1181: DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1182: corresponding to different fields: each IS contains the global indices of the dofs of the
1183: corresponding field. The optional list of DMs define the DM for each subproblem.
1184: Generalizes DMCreateFieldIS().
1186: Not collective
1188: Input Parameter:
1189: . dm - the DM object
1191: Output Parameters:
1192: + len - The number of subproblems in the field decomposition (or NULL if not requested)
1193: . namelist - The name for each field (or NULL if not requested)
1194: . islist - The global indices for each field (or NULL if not requested)
1195: - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1197: Level: intermediate
1199: Notes:
1200: The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1201: PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1202: and all of the arrays should be freed with PetscFree().
1204: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1205: @*/
1206: PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1207: {
1212: if (len) {
1214: *len = 0;
1215: }
1216: if (namelist) {
1218: *namelist = 0;
1219: }
1220: if (islist) {
1222: *islist = 0;
1223: }
1224: if (dmlist) {
1226: *dmlist = 0;
1227: }
1228: /*
1229: Is it a good idea to apply the following check across all impls?
1230: Perhaps some impls can have a well-defined decomposition before DMSetUp?
1231: This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1232: */
1233: if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1234: if (!dm->ops->createfielddecomposition) {
1235: PetscSection section;
1236: PetscInt numFields, f;
1238: DMGetDefaultSection(dm, §ion);
1239: if (section) {PetscSectionGetNumFields(section, &numFields);}
1240: if (section && numFields && dm->ops->createsubdm) {
1241: *len = numFields;
1242: if (namelist) {PetscMalloc1(numFields,namelist);}
1243: if (islist) {PetscMalloc1(numFields,islist);}
1244: if (dmlist) {PetscMalloc1(numFields,dmlist);}
1245: for (f = 0; f < numFields; ++f) {
1246: const char *fieldName;
1248: DMCreateSubDM(dm, 1, &f, islist ? &(*islist)[f] : NULL, dmlist ? &(*dmlist)[f] : NULL);
1249: if (namelist) {
1250: PetscSectionGetFieldName(section, f, &fieldName);
1251: PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);
1252: }
1253: }
1254: } else {
1255: DMCreateFieldIS(dm, len, namelist, islist);
1256: /* By default there are no DMs associated with subproblems. */
1257: if (dmlist) *dmlist = NULL;
1258: }
1259: } else {
1260: (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);
1261: }
1262: return(0);
1263: }
1267: /*@C
1268: DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1269: The fields are defined by DMCreateFieldIS().
1271: Not collective
1273: Input Parameters:
1274: + dm - the DM object
1275: . numFields - number of fields in this subproblem
1276: - len - The number of subproblems in the decomposition (or NULL if not requested)
1278: Output Parameters:
1279: . is - The global indices for the subproblem
1280: - dm - The DM for the subproblem
1282: Level: intermediate
1284: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1285: @*/
1286: PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1287: {
1295: if (dm->ops->createsubdm) {
1296: (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);
1297: } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1298: return(0);
1299: }
1304: /*@C
1305: DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1306: corresponding to restrictions to pairs nested subdomains: each IS contains the global
1307: indices of the dofs of the corresponding subdomains. The inner subdomains conceptually
1308: define a nonoverlapping covering, while outer subdomains can overlap.
1309: The optional list of DMs define the DM for each subproblem.
1311: Not collective
1313: Input Parameter:
1314: . dm - the DM object
1316: Output Parameters:
1317: + len - The number of subproblems in the domain decomposition (or NULL if not requested)
1318: . namelist - The name for each subdomain (or NULL if not requested)
1319: . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1320: . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1321: - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1323: Level: intermediate
1325: Notes:
1326: The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1327: PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1328: and all of the arrays should be freed with PetscFree().
1330: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1331: @*/
1332: PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1333: {
1334: PetscErrorCode ierr;
1335: DMSubDomainHookLink link;
1336: PetscInt i,l;
1345: /*
1346: Is it a good idea to apply the following check across all impls?
1347: Perhaps some impls can have a well-defined decomposition before DMSetUp?
1348: This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1349: */
1350: if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1351: if (dm->ops->createdomaindecomposition) {
1352: (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);
1353: /* copy subdomain hooks and context over to the subdomain DMs */
1354: if (dmlist) {
1355: for (i = 0; i < l; i++) {
1356: for (link=dm->subdomainhook; link; link=link->next) {
1357: if (link->ddhook) {(*link->ddhook)(dm,(*dmlist)[i],link->ctx);}
1358: }
1359: (*dmlist)[i]->ctx = dm->ctx;
1360: }
1361: }
1362: if (len) *len = l;
1363: }
1364: return(0);
1365: }
1370: /*@C
1371: DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1373: Not collective
1375: Input Parameters:
1376: + dm - the DM object
1377: . n - the number of subdomain scatters
1378: - subdms - the local subdomains
1380: Output Parameters:
1381: + n - the number of scatters returned
1382: . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1383: . oscat - scatter from global vector to overlapping global vector entries on subdomain
1384: - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1386: Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1387: of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets
1388: of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1389: solution and residual data.
1391: Level: developer
1393: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1394: @*/
1395: PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1396: {
1402: if (dm->ops->createddscatters) {
1403: (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);
1404: } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1405: return(0);
1406: }
1410: /*@
1411: DMRefine - Refines a DM object
1413: Collective on DM
1415: Input Parameter:
1416: + dm - the DM object
1417: - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1419: Output Parameter:
1420: . dmf - the refined DM, or NULL
1422: Note: If no refinement was done, the return value is NULL
1424: Level: developer
1426: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1427: @*/
1428: PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1429: {
1430: PetscErrorCode ierr;
1431: DMRefineHookLink link;
1435: (*dm->ops->refine)(dm,comm,dmf);
1436: if (*dmf) {
1437: (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1439: PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);
1441: (*dmf)->ctx = dm->ctx;
1442: (*dmf)->leveldown = dm->leveldown;
1443: (*dmf)->levelup = dm->levelup + 1;
1445: DMSetMatType(*dmf,dm->mattype);
1446: for (link=dm->refinehook; link; link=link->next) {
1447: if (link->refinehook) {
1448: (*link->refinehook)(dm,*dmf,link->ctx);
1449: }
1450: }
1451: }
1452: return(0);
1453: }
1457: /*@C
1458: DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1460: Logically Collective
1462: Input Arguments:
1463: + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1464: . refinehook - function to run when setting up a coarser level
1465: . interphook - function to run to update data on finer levels (once per SNESSolve())
1466: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1468: Calling sequence of refinehook:
1469: $ refinehook(DM coarse,DM fine,void *ctx);
1471: + coarse - coarse level DM
1472: . fine - fine level DM to interpolate problem to
1473: - ctx - optional user-defined function context
1475: Calling sequence for interphook:
1476: $ interphook(DM coarse,Mat interp,DM fine,void *ctx)
1478: + coarse - coarse level DM
1479: . interp - matrix interpolating a coarse-level solution to the finer grid
1480: . fine - fine level DM to update
1481: - ctx - optional user-defined function context
1483: Level: advanced
1485: Notes:
1486: This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1488: If this function is called multiple times, the hooks will be run in the order they are added.
1490: This function is currently not available from Fortran.
1492: .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1493: @*/
1494: PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1495: {
1496: PetscErrorCode ierr;
1497: DMRefineHookLink link,*p;
1501: for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1502: PetscMalloc(sizeof(struct _DMRefineHookLink),&link);
1503: link->refinehook = refinehook;
1504: link->interphook = interphook;
1505: link->ctx = ctx;
1506: link->next = NULL;
1507: *p = link;
1508: return(0);
1509: }
1513: /*@
1514: DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1516: Collective if any hooks are
1518: Input Arguments:
1519: + coarse - coarser DM to use as a base
1520: . restrct - interpolation matrix, apply using MatInterpolate()
1521: - fine - finer DM to update
1523: Level: developer
1525: .seealso: DMRefineHookAdd(), MatInterpolate()
1526: @*/
1527: PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1528: {
1529: PetscErrorCode ierr;
1530: DMRefineHookLink link;
1533: for (link=fine->refinehook; link; link=link->next) {
1534: if (link->interphook) {
1535: (*link->interphook)(coarse,interp,fine,link->ctx);
1536: }
1537: }
1538: return(0);
1539: }
1543: /*@
1544: DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1546: Not Collective
1548: Input Parameter:
1549: . dm - the DM object
1551: Output Parameter:
1552: . level - number of refinements
1554: Level: developer
1556: .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1558: @*/
1559: PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level)
1560: {
1563: *level = dm->levelup;
1564: return(0);
1565: }
1569: /*@C
1570: DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1572: Logically Collective
1574: Input Arguments:
1575: + dm - the DM
1576: . beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1577: . endhook - function to run after DMGlobalToLocalEnd() has completed
1578: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1580: Calling sequence for beginhook:
1581: $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1583: + dm - global DM
1584: . g - global vector
1585: . mode - mode
1586: . l - local vector
1587: - ctx - optional user-defined function context
1590: Calling sequence for endhook:
1591: $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1593: + global - global DM
1594: - ctx - optional user-defined function context
1596: Level: advanced
1598: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1599: @*/
1600: PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1601: {
1602: PetscErrorCode ierr;
1603: DMGlobalToLocalHookLink link,*p;
1607: for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1608: PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);
1609: link->beginhook = beginhook;
1610: link->endhook = endhook;
1611: link->ctx = ctx;
1612: link->next = NULL;
1613: *p = link;
1614: return(0);
1615: }
1619: /*@
1620: DMGlobalToLocalBegin - Begins updating local vectors from global vector
1622: Neighbor-wise Collective on DM
1624: Input Parameters:
1625: + dm - the DM object
1626: . g - the global vector
1627: . mode - INSERT_VALUES or ADD_VALUES
1628: - l - the local vector
1631: Level: beginner
1633: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1635: @*/
1636: PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1637: {
1638: PetscSF sf;
1639: PetscErrorCode ierr;
1640: DMGlobalToLocalHookLink link;
1644: for (link=dm->gtolhook; link; link=link->next) {
1645: if (link->beginhook) {
1646: (*link->beginhook)(dm,g,mode,l,link->ctx);
1647: }
1648: }
1649: DMGetDefaultSF(dm, &sf);
1650: if (sf) {
1651: PetscScalar *lArray, *gArray;
1653: if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1654: VecGetArray(l, &lArray);
1655: VecGetArray(g, &gArray);
1656: PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);
1657: VecRestoreArray(l, &lArray);
1658: VecRestoreArray(g, &gArray);
1659: } else {
1660: (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1661: }
1662: return(0);
1663: }
1667: /*@
1668: DMGlobalToLocalEnd - Ends updating local vectors from global vector
1670: Neighbor-wise Collective on DM
1672: Input Parameters:
1673: + dm - the DM object
1674: . g - the global vector
1675: . mode - INSERT_VALUES or ADD_VALUES
1676: - l - the local vector
1679: Level: beginner
1681: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1683: @*/
1684: PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1685: {
1686: PetscSF sf;
1687: PetscErrorCode ierr;
1688: PetscScalar *lArray, *gArray;
1689: DMGlobalToLocalHookLink link;
1693: DMGetDefaultSF(dm, &sf);
1694: if (sf) {
1695: if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1697: VecGetArray(l, &lArray);
1698: VecGetArray(g, &gArray);
1699: PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);
1700: VecRestoreArray(l, &lArray);
1701: VecRestoreArray(g, &gArray);
1702: } else {
1703: (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1704: }
1705: for (link=dm->gtolhook; link; link=link->next) {
1706: if (link->endhook) {(*link->endhook)(dm,g,mode,l,link->ctx);}
1707: }
1708: return(0);
1709: }
1713: /*@
1714: DMLocalToGlobalBegin - updates global vectors from local vectors
1716: Neighbor-wise Collective on DM
1718: Input Parameters:
1719: + dm - the DM object
1720: . l - the local vector
1721: . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1722: base point.
1723: - - the global vector
1725: Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1726: array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1727: global array to the final global array with VecAXPY().
1729: Level: beginner
1731: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1733: @*/
1734: PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1735: {
1736: PetscSF sf;
1741: DMGetDefaultSF(dm, &sf);
1742: if (sf) {
1743: MPI_Op op;
1744: PetscScalar *lArray, *gArray;
1746: switch (mode) {
1747: case INSERT_VALUES:
1748: case INSERT_ALL_VALUES:
1749: op = MPIU_REPLACE; break;
1750: case ADD_VALUES:
1751: case ADD_ALL_VALUES:
1752: op = MPI_SUM; break;
1753: default:
1754: SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1755: }
1756: VecGetArray(l, &lArray);
1757: VecGetArray(g, &gArray);
1758: PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);
1759: VecRestoreArray(l, &lArray);
1760: VecRestoreArray(g, &gArray);
1761: } else {
1762: (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);
1763: }
1764: return(0);
1765: }
1769: /*@
1770: DMLocalToGlobalEnd - updates global vectors from local vectors
1772: Neighbor-wise Collective on DM
1774: Input Parameters:
1775: + dm - the DM object
1776: . l - the local vector
1777: . mode - INSERT_VALUES or ADD_VALUES
1778: - g - the global vector
1781: Level: beginner
1783: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1785: @*/
1786: PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1787: {
1788: PetscSF sf;
1793: DMGetDefaultSF(dm, &sf);
1794: if (sf) {
1795: MPI_Op op;
1796: PetscScalar *lArray, *gArray;
1798: switch (mode) {
1799: case INSERT_VALUES:
1800: case INSERT_ALL_VALUES:
1801: op = MPIU_REPLACE; break;
1802: case ADD_VALUES:
1803: case ADD_ALL_VALUES:
1804: op = MPI_SUM; break;
1805: default:
1806: SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1807: }
1808: VecGetArray(l, &lArray);
1809: VecGetArray(g, &gArray);
1810: PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);
1811: VecRestoreArray(l, &lArray);
1812: VecRestoreArray(g, &gArray);
1813: } else {
1814: (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);
1815: }
1816: return(0);
1817: }
1821: /*@
1822: DMLocalToLocalBegin - Maps from a local vector (including ghost points
1823: that contain irrelevant values) to another local vector where the ghost
1824: points in the second are set correctly. Must be followed by DMLocalToLocalEnd().
1826: Neighbor-wise Collective on DM and Vec
1828: Input Parameters:
1829: + dm - the DM object
1830: . g - the original local vector
1831: - mode - one of INSERT_VALUES or ADD_VALUES
1833: Output Parameter:
1834: . l - the local vector with correct ghost values
1836: Level: intermediate
1838: Notes:
1839: The local vectors used here need not be the same as those
1840: obtained from DMCreateLocalVector(), BUT they
1841: must have the same parallel data layout; they could, for example, be
1842: obtained with VecDuplicate() from the DM originating vectors.
1844: .keywords: DM, local-to-local, begin
1845: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1847: @*/
1848: PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1849: {
1850: PetscErrorCode ierr;
1854: (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1855: return(0);
1856: }
1860: /*@
1861: DMLocalToLocalEnd - Maps from a local vector (including ghost points
1862: that contain irrelevant values) to another local vector where the ghost
1863: points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().
1865: Neighbor-wise Collective on DM and Vec
1867: Input Parameters:
1868: + da - the DM object
1869: . g - the original local vector
1870: - mode - one of INSERT_VALUES or ADD_VALUES
1872: Output Parameter:
1873: . l - the local vector with correct ghost values
1875: Level: intermediate
1877: Notes:
1878: The local vectors used here need not be the same as those
1879: obtained from DMCreateLocalVector(), BUT they
1880: must have the same parallel data layout; they could, for example, be
1881: obtained with VecDuplicate() from the DM originating vectors.
1883: .keywords: DM, local-to-local, end
1884: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1886: @*/
1887: PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1888: {
1889: PetscErrorCode ierr;
1893: (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1894: return(0);
1895: }
1900: /*@
1901: DMCoarsen - Coarsens a DM object
1903: Collective on DM
1905: Input Parameter:
1906: + dm - the DM object
1907: - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1909: Output Parameter:
1910: . dmc - the coarsened DM
1912: Level: developer
1914: .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1916: @*/
1917: PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1918: {
1919: PetscErrorCode ierr;
1920: DMCoarsenHookLink link;
1924: (*dm->ops->coarsen)(dm, comm, dmc);
1925: (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1926: PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);
1927: (*dmc)->ctx = dm->ctx;
1928: (*dmc)->levelup = dm->levelup;
1929: (*dmc)->leveldown = dm->leveldown + 1;
1930: DMSetMatType(*dmc,dm->mattype);
1931: for (link=dm->coarsenhook; link; link=link->next) {
1932: if (link->coarsenhook) {(*link->coarsenhook)(dm,*dmc,link->ctx);}
1933: }
1934: return(0);
1935: }
1939: /*@C
1940: DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1942: Logically Collective
1944: Input Arguments:
1945: + fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1946: . coarsenhook - function to run when setting up a coarser level
1947: . restricthook - function to run to update data on coarser levels (once per SNESSolve())
1948: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1950: Calling sequence of coarsenhook:
1951: $ coarsenhook(DM fine,DM coarse,void *ctx);
1953: + fine - fine level DM
1954: . coarse - coarse level DM to restrict problem to
1955: - ctx - optional user-defined function context
1957: Calling sequence for restricthook:
1958: $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1960: + fine - fine level DM
1961: . mrestrict - matrix restricting a fine-level solution to the coarse grid
1962: . rscale - scaling vector for restriction
1963: . inject - matrix restricting by injection
1964: . coarse - coarse level DM to update
1965: - ctx - optional user-defined function context
1967: Level: advanced
1969: Notes:
1970: This function is only needed if auxiliary data needs to be set up on coarse grids.
1972: If this function is called multiple times, the hooks will be run in the order they are added.
1974: In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1975: extract the finest level information from its context (instead of from the SNES).
1977: This function is currently not available from Fortran.
1979: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1980: @*/
1981: PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1982: {
1983: PetscErrorCode ierr;
1984: DMCoarsenHookLink link,*p;
1988: for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1989: PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);
1990: link->coarsenhook = coarsenhook;
1991: link->restricthook = restricthook;
1992: link->ctx = ctx;
1993: link->next = NULL;
1994: *p = link;
1995: return(0);
1996: }
2000: /*@
2001: DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
2003: Collective if any hooks are
2005: Input Arguments:
2006: + fine - finer DM to use as a base
2007: . restrct - restriction matrix, apply using MatRestrict()
2008: . inject - injection matrix, also use MatRestrict()
2009: - coarse - coarer DM to update
2011: Level: developer
2013: .seealso: DMCoarsenHookAdd(), MatRestrict()
2014: @*/
2015: PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2016: {
2017: PetscErrorCode ierr;
2018: DMCoarsenHookLink link;
2021: for (link=fine->coarsenhook; link; link=link->next) {
2022: if (link->restricthook) {
2023: (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);
2024: }
2025: }
2026: return(0);
2027: }
2031: /*@C
2032: DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2034: Logically Collective
2036: Input Arguments:
2037: + global - global DM
2038: . ddhook - function to run to pass data to the decomposition DM upon its creation
2039: . restricthook - function to run to update data on block solve (at the beginning of the block solve)
2040: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2043: Calling sequence for ddhook:
2044: $ ddhook(DM global,DM block,void *ctx)
2046: + global - global DM
2047: . block - block DM
2048: - ctx - optional user-defined function context
2050: Calling sequence for restricthook:
2051: $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
2053: + global - global DM
2054: . out - scatter to the outer (with ghost and overlap points) block vector
2055: . in - scatter to block vector values only owned locally
2056: . block - block DM
2057: - ctx - optional user-defined function context
2059: Level: advanced
2061: Notes:
2062: This function is only needed if auxiliary data needs to be set up on subdomain DMs.
2064: If this function is called multiple times, the hooks will be run in the order they are added.
2066: In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2067: extract the global information from its context (instead of from the SNES).
2069: This function is currently not available from Fortran.
2071: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2072: @*/
2073: PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2074: {
2075: PetscErrorCode ierr;
2076: DMSubDomainHookLink link,*p;
2080: for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2081: PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);
2082: link->restricthook = restricthook;
2083: link->ddhook = ddhook;
2084: link->ctx = ctx;
2085: link->next = NULL;
2086: *p = link;
2087: return(0);
2088: }
2092: /*@
2093: DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2095: Collective if any hooks are
2097: Input Arguments:
2098: + fine - finer DM to use as a base
2099: . oscatter - scatter from domain global vector filling subdomain global vector with overlap
2100: . gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2101: - coarse - coarer DM to update
2103: Level: developer
2105: .seealso: DMCoarsenHookAdd(), MatRestrict()
2106: @*/
2107: PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2108: {
2109: PetscErrorCode ierr;
2110: DMSubDomainHookLink link;
2113: for (link=global->subdomainhook; link; link=link->next) {
2114: if (link->restricthook) {
2115: (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);
2116: }
2117: }
2118: return(0);
2119: }
2123: /*@
2124: DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2126: Not Collective
2128: Input Parameter:
2129: . dm - the DM object
2131: Output Parameter:
2132: . level - number of coarsenings
2134: Level: developer
2136: .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2138: @*/
2139: PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level)
2140: {
2143: *level = dm->leveldown;
2144: return(0);
2145: }
2151: /*@C
2152: DMRefineHierarchy - Refines a DM object, all levels at once
2154: Collective on DM
2156: Input Parameter:
2157: + dm - the DM object
2158: - nlevels - the number of levels of refinement
2160: Output Parameter:
2161: . dmf - the refined DM hierarchy
2163: Level: developer
2165: .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2167: @*/
2168: PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2169: {
2174: if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2175: if (nlevels == 0) return(0);
2176: if (dm->ops->refinehierarchy) {
2177: (*dm->ops->refinehierarchy)(dm,nlevels,dmf);
2178: } else if (dm->ops->refine) {
2179: PetscInt i;
2181: DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);
2182: for (i=1; i<nlevels; i++) {
2183: DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);
2184: }
2185: } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2186: return(0);
2187: }
2191: /*@C
2192: DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2194: Collective on DM
2196: Input Parameter:
2197: + dm - the DM object
2198: - nlevels - the number of levels of coarsening
2200: Output Parameter:
2201: . dmc - the coarsened DM hierarchy
2203: Level: developer
2205: .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2207: @*/
2208: PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2209: {
2214: if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2215: if (nlevels == 0) return(0);
2217: if (dm->ops->coarsenhierarchy) {
2218: (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);
2219: } else if (dm->ops->coarsen) {
2220: PetscInt i;
2222: DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);
2223: for (i=1; i<nlevels; i++) {
2224: DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);
2225: }
2226: } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2227: return(0);
2228: }
2232: /*@
2233: DMCreateAggregates - Gets the aggregates that map between
2234: grids associated with two DMs.
2236: Collective on DM
2238: Input Parameters:
2239: + dmc - the coarse grid DM
2240: - dmf - the fine grid DM
2242: Output Parameters:
2243: . rest - the restriction matrix (transpose of the projection matrix)
2245: Level: intermediate
2247: .keywords: interpolation, restriction, multigrid
2249: .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2250: @*/
2251: PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2252: {
2258: (*dmc->ops->getaggregates)(dmc, dmf, rest);
2259: return(0);
2260: }
2264: /*@C
2265: DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2267: Not Collective
2269: Input Parameters:
2270: + dm - the DM object
2271: - destroy - the destroy function
2273: Level: intermediate
2275: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2277: @*/
2278: PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2279: {
2282: dm->ctxdestroy = destroy;
2283: return(0);
2284: }
2288: /*@
2289: DMSetApplicationContext - Set a user context into a DM object
2291: Not Collective
2293: Input Parameters:
2294: + dm - the DM object
2295: - ctx - the user context
2297: Level: intermediate
2299: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2301: @*/
2302: PetscErrorCode DMSetApplicationContext(DM dm,void *ctx)
2303: {
2306: dm->ctx = ctx;
2307: return(0);
2308: }
2312: /*@
2313: DMGetApplicationContext - Gets a user context from a DM object
2315: Not Collective
2317: Input Parameter:
2318: . dm - the DM object
2320: Output Parameter:
2321: . ctx - the user context
2323: Level: intermediate
2325: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2327: @*/
2328: PetscErrorCode DMGetApplicationContext(DM dm,void *ctx)
2329: {
2332: *(void**)ctx = dm->ctx;
2333: return(0);
2334: }
2338: /*@C
2339: DMSetVariableBounds - sets a function to compute the lower and upper bound vectors for SNESVI.
2341: Logically Collective on DM
2343: Input Parameter:
2344: + dm - the DM object
2345: - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2347: Level: intermediate
2349: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2350: DMSetJacobian()
2352: @*/
2353: PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2354: {
2356: dm->ops->computevariablebounds = f;
2357: return(0);
2358: }
2362: /*@
2363: DMHasVariableBounds - does the DM object have a variable bounds function?
2365: Not Collective
2367: Input Parameter:
2368: . dm - the DM object to destroy
2370: Output Parameter:
2371: . flg - PETSC_TRUE if the variable bounds function exists
2373: Level: developer
2375: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2377: @*/
2378: PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg)
2379: {
2381: *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2382: return(0);
2383: }
2387: /*@C
2388: DMComputeVariableBounds - compute variable bounds used by SNESVI.
2390: Logically Collective on DM
2392: Input Parameters:
2393: + dm - the DM object to destroy
2394: - x - current solution at which the bounds are computed
2396: Output parameters:
2397: + xl - lower bound
2398: - xu - upper bound
2400: Level: intermediate
2402: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2404: @*/
2405: PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2406: {
2412: if (dm->ops->computevariablebounds) {
2413: (*dm->ops->computevariablebounds)(dm, xl,xu);
2414: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2415: return(0);
2416: }
2420: /*@
2421: DMHasColoring - does the DM object have a method of providing a coloring?
2423: Not Collective
2425: Input Parameter:
2426: . dm - the DM object
2428: Output Parameter:
2429: . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2431: Level: developer
2433: .seealso DMHasFunction(), DMCreateColoring()
2435: @*/
2436: PetscErrorCode DMHasColoring(DM dm,PetscBool *flg)
2437: {
2439: *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2440: return(0);
2441: }
2443: #undef __FUNCT__
2445: /*@C
2446: DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2448: Collective on DM
2450: Input Parameter:
2451: + dm - the DM object
2452: - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2454: Level: developer
2456: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2458: @*/
2459: PetscErrorCode DMSetVec(DM dm,Vec x)
2460: {
2464: if (x) {
2465: if (!dm->x) {
2466: DMCreateGlobalVector(dm,&dm->x);
2467: }
2468: VecCopy(x,dm->x);
2469: } else if (dm->x) {
2470: VecDestroy(&dm->x);
2471: }
2472: return(0);
2473: }
2475: PetscFunctionList DMList = NULL;
2476: PetscBool DMRegisterAllCalled = PETSC_FALSE;
2480: /*@C
2481: DMSetType - Builds a DM, for a particular DM implementation.
2483: Collective on DM
2485: Input Parameters:
2486: + dm - The DM object
2487: - method - The name of the DM type
2489: Options Database Key:
2490: . -dm_type <type> - Sets the DM type; use -help for a list of available types
2492: Notes:
2493: See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2495: Level: intermediate
2497: .keywords: DM, set, type
2498: .seealso: DMGetType(), DMCreate()
2499: @*/
2500: PetscErrorCode DMSetType(DM dm, DMType method)
2501: {
2502: PetscErrorCode (*r)(DM);
2503: PetscBool match;
2508: PetscObjectTypeCompare((PetscObject) dm, method, &match);
2509: if (match) return(0);
2511: if (!DMRegisterAllCalled) {DMRegisterAll();}
2512: PetscFunctionListFind(DMList,method,&r);
2513: if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2515: if (dm->ops->destroy) {
2516: (*dm->ops->destroy)(dm);
2517: dm->ops->destroy = NULL;
2518: }
2519: (*r)(dm);
2520: PetscObjectChangeTypeName((PetscObject)dm,method);
2521: return(0);
2522: }
2526: /*@C
2527: DMGetType - Gets the DM type name (as a string) from the DM.
2529: Not Collective
2531: Input Parameter:
2532: . dm - The DM
2534: Output Parameter:
2535: . type - The DM type name
2537: Level: intermediate
2539: .keywords: DM, get, type, name
2540: .seealso: DMSetType(), DMCreate()
2541: @*/
2542: PetscErrorCode DMGetType(DM dm, DMType *type)
2543: {
2549: if (!DMRegisterAllCalled) {
2550: DMRegisterAll();
2551: }
2552: *type = ((PetscObject)dm)->type_name;
2553: return(0);
2554: }
2558: /*@C
2559: DMConvert - Converts a DM to another DM, either of the same or different type.
2561: Collective on DM
2563: Input Parameters:
2564: + dm - the DM
2565: - newtype - new DM type (use "same" for the same type)
2567: Output Parameter:
2568: . M - pointer to new DM
2570: Notes:
2571: Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2572: the MPI communicator of the generated DM is always the same as the communicator
2573: of the input DM.
2575: Level: intermediate
2577: .seealso: DMCreate()
2578: @*/
2579: PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2580: {
2581: DM B;
2582: char convname[256];
2583: PetscBool sametype, issame;
2590: PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);
2591: PetscStrcmp(newtype, "same", &issame);
2592: {
2593: PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2595: /*
2596: Order of precedence:
2597: 1) See if a specialized converter is known to the current DM.
2598: 2) See if a specialized converter is known to the desired DM class.
2599: 3) See if a good general converter is registered for the desired class
2600: 4) See if a good general converter is known for the current matrix.
2601: 5) Use a really basic converter.
2602: */
2604: /* 1) See if a specialized converter is known to the current DM and the desired class */
2605: PetscStrcpy(convname,"DMConvert_");
2606: PetscStrcat(convname,((PetscObject) dm)->type_name);
2607: PetscStrcat(convname,"_");
2608: PetscStrcat(convname,newtype);
2609: PetscStrcat(convname,"_C");
2610: PetscObjectQueryFunction((PetscObject)dm,convname,&conv);
2611: if (conv) goto foundconv;
2613: /* 2) See if a specialized converter is known to the desired DM class. */
2614: DMCreate(PetscObjectComm((PetscObject)dm), &B);
2615: DMSetType(B, newtype);
2616: PetscStrcpy(convname,"DMConvert_");
2617: PetscStrcat(convname,((PetscObject) dm)->type_name);
2618: PetscStrcat(convname,"_");
2619: PetscStrcat(convname,newtype);
2620: PetscStrcat(convname,"_C");
2621: PetscObjectQueryFunction((PetscObject)B,convname,&conv);
2622: if (conv) {
2623: DMDestroy(&B);
2624: goto foundconv;
2625: }
2627: #if 0
2628: /* 3) See if a good general converter is registered for the desired class */
2629: conv = B->ops->convertfrom;
2630: DMDestroy(&B);
2631: if (conv) goto foundconv;
2633: /* 4) See if a good general converter is known for the current matrix */
2634: if (dm->ops->convert) {
2635: conv = dm->ops->convert;
2636: }
2637: if (conv) goto foundconv;
2638: #endif
2640: /* 5) Use a really basic converter. */
2641: SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2643: foundconv:
2644: PetscLogEventBegin(DM_Convert,dm,0,0,0);
2645: (*conv)(dm,newtype,M);
2646: PetscLogEventEnd(DM_Convert,dm,0,0,0);
2647: }
2648: PetscObjectStateIncrease((PetscObject) *M);
2649: return(0);
2650: }
2652: /*--------------------------------------------------------------------------------------------------------------------*/
2656: /*@C
2657: DMRegister - Adds a new DM component implementation
2659: Not Collective
2661: Input Parameters:
2662: + name - The name of a new user-defined creation routine
2663: - create_func - The creation routine itself
2665: Notes:
2666: DMRegister() may be called multiple times to add several user-defined DMs
2669: Sample usage:
2670: .vb
2671: DMRegister("my_da", MyDMCreate);
2672: .ve
2674: Then, your DM type can be chosen with the procedural interface via
2675: .vb
2676: DMCreate(MPI_Comm, DM *);
2677: DMSetType(DM,"my_da");
2678: .ve
2679: or at runtime via the option
2680: .vb
2681: -da_type my_da
2682: .ve
2684: Level: advanced
2686: .keywords: DM, register
2687: .seealso: DMRegisterAll(), DMRegisterDestroy()
2689: @*/
2690: PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2691: {
2695: PetscFunctionListAdd(&DMList,sname,function);
2696: return(0);
2697: }
2701: /*@C
2702: DMLoad - Loads a DM that has been stored in binary with DMView().
2704: Collective on PetscViewer
2706: Input Parameters:
2707: + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2708: some related function before a call to DMLoad().
2709: - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2710: HDF5 file viewer, obtained from PetscViewerHDF5Open()
2712: Level: intermediate
2714: Notes:
2715: The type is determined by the data in the file, any type set into the DM before this call is ignored.
2717: Notes for advanced users:
2718: Most users should not need to know the details of the binary storage
2719: format, since DMLoad() and DMView() completely hide these details.
2720: But for anyone who's interested, the standard binary matrix storage
2721: format is
2722: .vb
2723: has not yet been determined
2724: .ve
2726: .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2727: @*/
2728: PetscErrorCode DMLoad(DM newdm, PetscViewer viewer)
2729: {
2730: PetscBool isbinary, ishdf5;
2736: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
2737: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
2738: if (isbinary) {
2739: PetscInt classid;
2740: char type[256];
2742: PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);
2743: if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid);
2744: PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);
2745: DMSetType(newdm, type);
2746: if (newdm->ops->load) {(*newdm->ops->load)(newdm,viewer);}
2747: } else if (ishdf5) {
2748: if (newdm->ops->load) {(*newdm->ops->load)(newdm,viewer);}
2749: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen() or PetscViewerHDF5Open()");
2750: return(0);
2751: }
2753: /******************************** FEM Support **********************************/
2757: PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2758: {
2759: PetscInt f;
2763: PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);
2764: for (f = 0; f < len; ++f) {
2765: PetscPrintf(PETSC_COMM_SELF, " | %g |\n", (double)PetscRealPart(x[f]));
2766: }
2767: return(0);
2768: }
2772: PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2773: {
2774: PetscInt f, g;
2778: PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);
2779: for (f = 0; f < rows; ++f) {
2780: PetscPrintf(PETSC_COMM_SELF, " |");
2781: for (g = 0; g < cols; ++g) {
2782: PetscPrintf(PETSC_COMM_SELF, " % 9.5g", PetscRealPart(A[f*cols+g]));
2783: }
2784: PetscPrintf(PETSC_COMM_SELF, " |\n");
2785: }
2786: return(0);
2787: }
2791: PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X)
2792: {
2793: PetscMPIInt rank, numProcs;
2794: PetscInt p;
2798: MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
2799: MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);
2800: PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);
2801: for (p = 0; p < numProcs; ++p) {
2802: if (p == rank) {
2803: Vec x;
2805: VecDuplicate(X, &x);
2806: VecCopy(X, x);
2807: VecChop(x, tol);
2808: VecView(x, PETSC_VIEWER_STDOUT_SELF);
2809: VecDestroy(&x);
2810: PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);
2811: }
2812: PetscBarrier((PetscObject) dm);
2813: }
2814: return(0);
2815: }
2819: /*@
2820: DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2822: Input Parameter:
2823: . dm - The DM
2825: Output Parameter:
2826: . section - The PetscSection
2828: Level: intermediate
2830: Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2832: .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2833: @*/
2834: PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2835: {
2841: if (!dm->defaultSection && dm->ops->createdefaultsection) {(*dm->ops->createdefaultsection)(dm);}
2842: *section = dm->defaultSection;
2843: return(0);
2844: }
2848: /*@
2849: DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2851: Input Parameters:
2852: + dm - The DM
2853: - section - The PetscSection
2855: Level: intermediate
2857: Note: Any existing Section will be destroyed
2859: .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2860: @*/
2861: PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2862: {
2863: PetscInt numFields;
2864: PetscInt f;
2870: PetscObjectReference((PetscObject)section);
2871: PetscSectionDestroy(&dm->defaultSection);
2872: dm->defaultSection = section;
2873: PetscSectionGetNumFields(dm->defaultSection, &numFields);
2874: if (numFields) {
2875: DMSetNumFields(dm, numFields);
2876: for (f = 0; f < numFields; ++f) {
2877: PetscObject disc;
2878: const char *name;
2880: PetscSectionGetFieldName(dm->defaultSection, f, &name);
2881: DMGetField(dm, f, &disc);
2882: PetscObjectSetName(disc, name);
2883: }
2884: }
2885: /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2886: PetscSectionDestroy(&dm->defaultGlobalSection);
2887: return(0);
2888: }
2892: /*@
2893: DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2895: Collective on DM
2897: Input Parameter:
2898: . dm - The DM
2900: Output Parameter:
2901: . section - The PetscSection
2903: Level: intermediate
2905: Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2907: .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2908: @*/
2909: PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2910: {
2916: if (!dm->defaultGlobalSection) {
2917: PetscSection s;
2919: DMGetDefaultSection(dm, &s);
2920: if (!s) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection in order to create a global PetscSection");
2921: if (!dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSF in order to create a global PetscSection");
2922: PetscSectionCreateGlobalSection(s, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);
2923: PetscLayoutDestroy(&dm->map);
2924: PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm), dm->defaultGlobalSection, &dm->map);
2925: }
2926: *section = dm->defaultGlobalSection;
2927: return(0);
2928: }
2932: /*@
2933: DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2935: Input Parameters:
2936: + dm - The DM
2937: - section - The PetscSection, or NULL
2939: Level: intermediate
2941: Note: Any existing Section will be destroyed
2943: .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2944: @*/
2945: PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2946: {
2952: PetscObjectReference((PetscObject)section);
2953: PetscSectionDestroy(&dm->defaultGlobalSection);
2954: dm->defaultGlobalSection = section;
2955: return(0);
2956: }
2960: /*@
2961: DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2962: it is created from the default PetscSection layouts in the DM.
2964: Input Parameter:
2965: . dm - The DM
2967: Output Parameter:
2968: . sf - The PetscSF
2970: Level: intermediate
2972: Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2974: .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2975: @*/
2976: PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2977: {
2978: PetscInt nroots;
2984: PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);
2985: if (nroots < 0) {
2986: PetscSection section, gSection;
2988: DMGetDefaultSection(dm, §ion);
2989: if (section) {
2990: DMGetDefaultGlobalSection(dm, &gSection);
2991: DMCreateDefaultSF(dm, section, gSection);
2992: } else {
2993: *sf = NULL;
2994: return(0);
2995: }
2996: }
2997: *sf = dm->defaultSF;
2998: return(0);
2999: }
3003: /*@
3004: DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3006: Input Parameters:
3007: + dm - The DM
3008: - sf - The PetscSF
3010: Level: intermediate
3012: Note: Any previous SF is destroyed
3014: .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3015: @*/
3016: PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
3017: {
3023: PetscSFDestroy(&dm->defaultSF);
3024: dm->defaultSF = sf;
3025: return(0);
3026: }
3030: /*@C
3031: DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3032: describing the data layout.
3034: Input Parameters:
3035: + dm - The DM
3036: . localSection - PetscSection describing the local data layout
3037: - globalSection - PetscSection describing the global data layout
3039: Level: intermediate
3041: .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3042: @*/
3043: PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3044: {
3045: MPI_Comm comm;
3046: PetscLayout layout;
3047: const PetscInt *ranges;
3048: PetscInt *local;
3049: PetscSFNode *remote;
3050: PetscInt pStart, pEnd, p, nroots, nleaves = 0, l;
3051: PetscMPIInt size, rank;
3055: PetscObjectGetComm((PetscObject)dm,&comm);
3057: MPI_Comm_size(comm, &size);
3058: MPI_Comm_rank(comm, &rank);
3059: PetscSectionGetChart(globalSection, &pStart, &pEnd);
3060: PetscSectionGetConstrainedStorageSize(globalSection, &nroots);
3061: PetscLayoutCreate(comm, &layout);
3062: PetscLayoutSetBlockSize(layout, 1);
3063: PetscLayoutSetLocalSize(layout, nroots);
3064: PetscLayoutSetUp(layout);
3065: PetscLayoutGetRanges(layout, &ranges);
3066: for (p = pStart; p < pEnd; ++p) {
3067: PetscInt gdof, gcdof;
3069: PetscSectionGetDof(globalSection, p, &gdof);
3070: PetscSectionGetConstraintDof(globalSection, p, &gcdof);
3071: if (gcdof > (gdof < 0 ? -(gdof+1) : gdof)) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d has %d constraints > %d dof", p, gcdof, (gdof < 0 ? -(gdof+1) : gdof));
3072: nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3073: }
3074: PetscMalloc1(nleaves, &local);
3075: PetscMalloc1(nleaves, &remote);
3076: for (p = pStart, l = 0; p < pEnd; ++p) {
3077: const PetscInt *cind;
3078: PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3080: PetscSectionGetDof(localSection, p, &dof);
3081: PetscSectionGetOffset(localSection, p, &off);
3082: PetscSectionGetConstraintDof(localSection, p, &cdof);
3083: PetscSectionGetConstraintIndices(localSection, p, &cind);
3084: PetscSectionGetDof(globalSection, p, &gdof);
3085: PetscSectionGetConstraintDof(globalSection, p, &gcdof);
3086: PetscSectionGetOffset(globalSection, p, &goff);
3087: if (!gdof) continue; /* Censored point */
3088: gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3089: if (gsize != dof-cdof) {
3090: if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3091: cdof = 0; /* Ignore constraints */
3092: }
3093: for (d = 0, c = 0; d < dof; ++d) {
3094: if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3095: local[l+d-c] = off+d;
3096: }
3097: if (gdof < 0) {
3098: for (d = 0; d < gsize; ++d, ++l) {
3099: PetscInt offset = -(goff+1) + d, r;
3101: PetscFindInt(offset,size+1,ranges,&r);
3102: if (r < 0) r = -(r+2);
3103: if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff);
3104: remote[l].rank = r;
3105: remote[l].index = offset - ranges[r];
3106: }
3107: } else {
3108: for (d = 0; d < gsize; ++d, ++l) {
3109: remote[l].rank = rank;
3110: remote[l].index = goff+d - ranges[rank];
3111: }
3112: }
3113: }
3114: if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3115: PetscLayoutDestroy(&layout);
3116: PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);
3117: return(0);
3118: }
3122: /*@
3123: DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3125: Input Parameter:
3126: . dm - The DM
3128: Output Parameter:
3129: . sf - The PetscSF
3131: Level: intermediate
3133: Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3135: .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3136: @*/
3137: PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3138: {
3142: *sf = dm->sf;
3143: return(0);
3144: }
3148: /*@
3149: DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3151: Input Parameters:
3152: + dm - The DM
3153: - sf - The PetscSF
3155: Level: intermediate
3157: .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3158: @*/
3159: PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3160: {
3166: PetscSFDestroy(&dm->sf);
3167: PetscObjectReference((PetscObject) sf);
3168: dm->sf = sf;
3169: return(0);
3170: }
3174: /*@
3175: DMGetDS - Get the PetscDS
3177: Input Parameter:
3178: . dm - The DM
3180: Output Parameter:
3181: . prob - The PetscDS
3183: Level: developer
3185: .seealso: DMSetDS()
3186: @*/
3187: PetscErrorCode DMGetDS(DM dm, PetscDS *prob)
3188: {
3192: *prob = dm->prob;
3193: return(0);
3194: }
3198: /*@
3199: DMSetDS - Set the PetscDS
3201: Input Parameters:
3202: + dm - The DM
3203: - prob - The PetscDS
3205: Level: developer
3207: .seealso: DMGetDS()
3208: @*/
3209: PetscErrorCode DMSetDS(DM dm, PetscDS prob)
3210: {
3216: PetscDSDestroy(&dm->prob);
3217: dm->prob = prob;
3218: PetscObjectReference((PetscObject) dm->prob);
3219: return(0);
3220: }
3224: PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3225: {
3230: PetscDSGetNumFields(dm->prob, numFields);
3231: return(0);
3232: }
3236: PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3237: {
3238: PetscInt Nf, f;
3243: PetscDSGetNumFields(dm->prob, &Nf);
3244: for (f = Nf; f < numFields; ++f) {
3245: PetscContainer obj;
3247: PetscContainerCreate(PetscObjectComm((PetscObject) dm), &obj);
3248: PetscDSSetDiscretization(dm->prob, f, (PetscObject) obj);
3249: PetscContainerDestroy(&obj);
3250: }
3251: return(0);
3252: }
3256: /*@
3257: DMGetField - Return the discretization object for a given DM field
3259: Not collective
3261: Input Parameters:
3262: + dm - The DM
3263: - f - The field number
3265: Output Parameter:
3266: . field - The discretization object
3268: Level: developer
3270: .seealso: DMSetField()
3271: @*/
3272: PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3273: {
3278: PetscDSGetDiscretization(dm->prob, f, field);
3279: return(0);
3280: }
3284: /*@
3285: DMSetField - Set the discretization object for a given DM field
3287: Logically collective on DM
3289: Input Parameters:
3290: + dm - The DM
3291: . f - The field number
3292: - field - The discretization object
3294: Level: developer
3296: .seealso: DMGetField()
3297: @*/
3298: PetscErrorCode DMSetField(DM dm, PetscInt f, PetscObject field)
3299: {
3304: PetscDSSetDiscretization(dm->prob, f, field);
3305: return(0);
3306: }
3310: PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3311: {
3312: DM dm_coord,dmc_coord;
3314: Vec coords,ccoords;
3315: VecScatter scat;
3317: DMGetCoordinateDM(dm,&dm_coord);
3318: DMGetCoordinateDM(dmc,&dmc_coord);
3319: DMGetCoordinates(dm,&coords);
3320: DMGetCoordinates(dmc,&ccoords);
3321: if (coords && !ccoords) {
3322: DMCreateGlobalVector(dmc_coord,&ccoords);
3323: DMCreateInjection(dmc_coord,dm_coord,&scat);
3324: VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3325: VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3326: DMSetCoordinates(dmc,ccoords);
3327: VecScatterDestroy(&scat);
3328: VecDestroy(&ccoords);
3329: }
3330: return(0);
3331: }
3335: static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3336: {
3337: DM dm_coord,subdm_coord;
3339: Vec coords,ccoords,clcoords;
3340: VecScatter *scat_i,*scat_g;
3342: DMGetCoordinateDM(dm,&dm_coord);
3343: DMGetCoordinateDM(subdm,&subdm_coord);
3344: DMGetCoordinates(dm,&coords);
3345: DMGetCoordinates(subdm,&ccoords);
3346: if (coords && !ccoords) {
3347: DMCreateGlobalVector(subdm_coord,&ccoords);
3348: DMCreateLocalVector(subdm_coord,&clcoords);
3349: DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);
3350: VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3351: VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);
3352: VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3353: VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);
3354: DMSetCoordinates(subdm,ccoords);
3355: DMSetCoordinatesLocal(subdm,clcoords);
3356: VecScatterDestroy(&scat_i[0]);
3357: VecScatterDestroy(&scat_g[0]);
3358: VecDestroy(&ccoords);
3359: VecDestroy(&clcoords);
3360: PetscFree(scat_i);
3361: PetscFree(scat_g);
3362: }
3363: return(0);
3364: }
3368: /*@
3369: DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3371: Collective on DM
3373: Input Parameters:
3374: + dm - the DM
3375: - c - coordinate vector
3377: Note:
3378: The coordinates do include those for ghost points, which are in the local vector
3380: Level: intermediate
3382: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3383: .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3384: @*/
3385: PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3386: {
3392: PetscObjectReference((PetscObject) c);
3393: VecDestroy(&dm->coordinates);
3394: dm->coordinates = c;
3395: VecDestroy(&dm->coordinatesLocal);
3396: DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);
3397: DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);
3398: return(0);
3399: }
3403: /*@
3404: DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3406: Collective on DM
3408: Input Parameters:
3409: + dm - the DM
3410: - c - coordinate vector
3412: Note:
3413: The coordinates of ghost points can be set using DMSetCoordinates()
3414: followed by DMGetCoordinatesLocal(). This is intended to enable the
3415: setting of ghost coordinates outside of the domain.
3417: Level: intermediate
3419: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3420: .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3421: @*/
3422: PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3423: {
3429: PetscObjectReference((PetscObject) c);
3430: VecDestroy(&dm->coordinatesLocal);
3432: dm->coordinatesLocal = c;
3434: VecDestroy(&dm->coordinates);
3435: return(0);
3436: }
3440: /*@
3441: DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3443: Not Collective
3445: Input Parameter:
3446: . dm - the DM
3448: Output Parameter:
3449: . c - global coordinate vector
3451: Note:
3452: This is a borrowed reference, so the user should NOT destroy this vector
3454: Each process has only the local coordinates (does NOT have the ghost coordinates).
3456: For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3457: and (x_0,y_0,z_0,x_1,y_1,z_1...)
3459: Level: intermediate
3461: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3462: .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3463: @*/
3464: PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3465: {
3471: if (!dm->coordinates && dm->coordinatesLocal) {
3472: DM cdm = NULL;
3474: DMGetCoordinateDM(dm, &cdm);
3475: DMCreateGlobalVector(cdm, &dm->coordinates);
3476: PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");
3477: DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);
3478: DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);
3479: }
3480: *c = dm->coordinates;
3481: return(0);
3482: }
3486: /*@
3487: DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3489: Collective on DM
3491: Input Parameter:
3492: . dm - the DM
3494: Output Parameter:
3495: . c - coordinate vector
3497: Note:
3498: This is a borrowed reference, so the user should NOT destroy this vector
3500: Each process has the local and ghost coordinates
3502: For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3503: and (x_0,y_0,z_0,x_1,y_1,z_1...)
3505: Level: intermediate
3507: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3508: .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3509: @*/
3510: PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3511: {
3517: if (!dm->coordinatesLocal && dm->coordinates) {
3518: DM cdm = NULL;
3520: DMGetCoordinateDM(dm, &cdm);
3521: DMCreateLocalVector(cdm, &dm->coordinatesLocal);
3522: PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");
3523: DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);
3524: DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);
3525: }
3526: *c = dm->coordinatesLocal;
3527: return(0);
3528: }
3532: /*@
3533: DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates
3535: Collective on DM
3537: Input Parameter:
3538: . dm - the DM
3540: Output Parameter:
3541: . cdm - coordinate DM
3543: Level: intermediate
3545: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3546: .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3547: @*/
3548: PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3549: {
3555: if (!dm->coordinateDM) {
3556: if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3557: (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);
3558: }
3559: *cdm = dm->coordinateDM;
3560: return(0);
3561: }
3565: /*@
3566: DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates
3568: Logically Collective on DM
3570: Input Parameters:
3571: + dm - the DM
3572: - cdm - coordinate DM
3574: Level: intermediate
3576: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3577: .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3578: @*/
3579: PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm)
3580: {
3586: DMDestroy(&dm->coordinateDM);
3587: dm->coordinateDM = cdm;
3588: PetscObjectReference((PetscObject) dm->coordinateDM);
3589: return(0);
3590: }
3594: /*@
3595: DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh.
3597: Not Collective
3599: Input Parameter:
3600: . dm - The DM object
3602: Output Parameter:
3603: . section - The PetscSection object
3605: Level: intermediate
3607: .keywords: mesh, coordinates
3608: .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection()
3609: @*/
3610: PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section)
3611: {
3612: DM cdm;
3618: DMGetCoordinateDM(dm, &cdm);
3619: DMGetDefaultSection(cdm, section);
3620: return(0);
3621: }
3625: /*@
3626: DMSetCoordinateSection - Set the layout of coordinate values over the mesh.
3628: Not Collective
3630: Input Parameters:
3631: + dm - The DM object
3632: - section - The PetscSection object
3634: Level: intermediate
3636: .keywords: mesh, coordinates
3637: .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection()
3638: @*/
3639: PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section)
3640: {
3641: DM cdm;
3647: DMGetCoordinateDM(dm, &cdm);
3648: DMSetDefaultSection(cdm, section);
3649: return(0);
3650: }
3654: PetscErrorCode DMGetPeriodicity(DM dm, const PetscReal **maxCell, const PetscReal **L)
3655: {
3658: if (L) *L = dm->L;
3659: if (maxCell) *maxCell = dm->maxCell;
3660: return(0);
3661: }
3665: PetscErrorCode DMSetPeriodicity(DM dm, const PetscReal maxCell[], const PetscReal L[])
3666: {
3667: Vec coordinates;
3668: PetscInt dim, d;
3673: DMGetCoordinatesLocal(dm, &coordinates);
3674: VecGetBlockSize(coordinates, &dim);
3675: PetscMalloc1(dim,&dm->L);
3676: PetscMalloc1(dim,&dm->maxCell);
3677: for (d = 0; d < dim; ++d) {dm->L[d] = L[d]; dm->maxCell[d] = maxCell[d];}
3678: return(0);
3679: }
3683: /*@
3684: DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3686: Not collective
3688: Input Parameters:
3689: + dm - The DM
3690: - v - The Vec of points
3692: Output Parameter:
3693: . cells - The local cell numbers for cells which contain the points
3695: Level: developer
3697: .keywords: point location, mesh
3698: .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3699: @*/
3700: PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3701: {
3708: if (dm->ops->locatepoints) {
3709: (*dm->ops->locatepoints)(dm,v,cells);
3710: } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3711: return(0);
3712: }
3716: /*@
3717: DMGetOutputDM - Retrieve the DM associated with the layout for output
3719: Input Parameter:
3720: . dm - The original DM
3722: Output Parameter:
3723: . odm - The DM which provides the layout for output
3725: Level: intermediate
3727: .seealso: VecView(), DMGetDefaultGlobalSection()
3728: @*/
3729: PetscErrorCode DMGetOutputDM(DM dm, DM *odm)
3730: {
3731: PetscSection section;
3732: PetscBool hasConstraints;
3738: DMGetDefaultSection(dm, §ion);
3739: PetscSectionHasConstraints(section, &hasConstraints);
3740: if (!hasConstraints) {
3741: *odm = dm;
3742: return(0);
3743: }
3744: if (!dm->dmBC) {
3745: PetscSection newSection, gsection;
3746: PetscSF sf;
3748: DMClone(dm, &dm->dmBC);
3749: PetscSectionClone(section, &newSection);
3750: DMSetDefaultSection(dm->dmBC, newSection);
3751: PetscSectionDestroy(&newSection);
3752: DMGetPointSF(dm->dmBC, &sf);
3753: PetscSectionCreateGlobalSection(section, sf, PETSC_TRUE, &gsection);
3754: DMSetDefaultGlobalSection(dm->dmBC, gsection);
3755: PetscSectionDestroy(&gsection);
3756: }
3757: *odm = dm->dmBC;
3758: return(0);
3759: }
3763: /*@
3764: DMGetOutputSequenceNumber - Retrieve the sequence number/value for output
3766: Input Parameter:
3767: . dm - The original DM
3769: Output Parameters:
3770: + num - The output sequence number
3771: - val - The output sequence value
3773: Level: intermediate
3775: Note: This is intended for output that should appear in sequence, for instance
3776: a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system.
3778: .seealso: VecView()
3779: @*/
3780: PetscErrorCode DMGetOutputSequenceNumber(DM dm, PetscInt *num, PetscReal *val)
3781: {
3786: return(0);
3787: }
3791: /*@
3792: DMSetOutputSequenceNumber - Set the sequence number/value for output
3794: Input Parameters:
3795: + dm - The original DM
3796: . num - The output sequence number
3797: - val - The output sequence value
3799: Level: intermediate
3801: Note: This is intended for output that should appear in sequence, for instance
3802: a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system.
3804: .seealso: VecView()
3805: @*/
3806: PetscErrorCode DMSetOutputSequenceNumber(DM dm, PetscInt num, PetscReal val)
3807: {
3810: dm->outputSequenceNum = num;
3811: dm->outputSequenceVal = val;
3812: return(0);
3813: }
3817: /*@C
3818: DMOutputSequenceLoad - Retrieve the sequence value from a Viewer
3820: Input Parameters:
3821: + dm - The original DM
3822: . name - The sequence name
3823: - num - The output sequence number
3825: Output Parameter:
3826: . val - The output sequence value
3828: Level: intermediate
3830: Note: This is intended for output that should appear in sequence, for instance
3831: a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system.
3833: .seealso: DMGetOutputSequenceNumber(), DMSetOutputSequenceNumber(), VecView()
3834: @*/
3835: PetscErrorCode DMOutputSequenceLoad(DM dm, PetscViewer viewer, const char *name, PetscInt num, PetscReal *val)
3836: {
3837: PetscBool ishdf5;
3844: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERHDF5, &ishdf5);
3845: if (ishdf5) {
3846: #if defined(PETSC_HAVE_HDF5)
3847: PetscScalar value;
3849: DMSequenceLoad_HDF5(dm, name, num, &value, viewer);
3850: *val = PetscRealPart(value);
3851: #endif
3852: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid viewer; open viewer with PetscViewerHDF5Open()");
3853: return(0);
3854: }