Actual source code: dm.c
petsc-3.6.1 2015-08-06
1: #include <petsc/private/dmimpl.h> /*I "petscdm.h" I*/
2: #include <petscsf.h>
3: #include <petscds.h>
5: PetscClassId DM_CLASSID;
6: PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;
8: const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DM_BOUNDARY_",0};
12: /*@
13: DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
15: If you never call DMSetType() it will generate an
16: error when you try to use the vector.
18: Collective on MPI_Comm
20: Input Parameter:
21: . comm - The communicator for the DM object
23: Output Parameter:
24: . dm - The DM object
26: Level: beginner
28: .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
29: @*/
30: PetscErrorCode DMCreate(MPI_Comm comm,DM *dm)
31: {
32: DM v;
37: *dm = NULL;
38: PetscSysInitializePackage();
39: VecInitializePackage();
40: MatInitializePackage();
41: DMInitializePackage();
43: PetscHeaderCreate(v, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);
45: v->ltogmap = NULL;
46: v->bs = 1;
47: v->coloringtype = IS_COLORING_GLOBAL;
48: PetscSFCreate(comm, &v->sf);
49: PetscSFCreate(comm, &v->defaultSF);
50: v->defaultSection = NULL;
51: v->defaultGlobalSection = NULL;
52: v->defaultConstraintSection = NULL;
53: v->defaultConstraintMat = NULL;
54: v->L = NULL;
55: v->maxCell = NULL;
56: v->bdtype = NULL;
57: v->dimEmbed = PETSC_DEFAULT;
58: {
59: PetscInt i;
60: for (i = 0; i < 10; ++i) {
61: v->nullspaceConstructors[i] = NULL;
62: }
63: }
64: PetscDSCreate(comm, &v->prob);
65: v->dmBC = NULL;
66: v->outputSequenceNum = -1;
67: v->outputSequenceVal = 0.0;
68: DMSetVecType(v,VECSTANDARD);
69: DMSetMatType(v,MATAIJ);
70: *dm = v;
71: return(0);
72: }
76: /*@
77: DMClone - Creates a DM object with the same topology as the original.
79: Collective on MPI_Comm
81: Input Parameter:
82: . dm - The original DM object
84: Output Parameter:
85: . newdm - The new DM object
87: Level: beginner
89: .keywords: DM, topology, create
90: @*/
91: PetscErrorCode DMClone(DM dm, DM *newdm)
92: {
93: PetscSF sf;
94: Vec coords;
95: void *ctx;
96: PetscInt dim;
102: DMCreate(PetscObjectComm((PetscObject)dm), newdm);
103: DMGetDimension(dm, &dim);
104: DMSetDimension(*newdm, dim);
105: if (dm->ops->clone) {
106: (*dm->ops->clone)(dm, newdm);
107: }
108: (*newdm)->setupcalled = PETSC_TRUE;
109: DMGetPointSF(dm, &sf);
110: DMSetPointSF(*newdm, sf);
111: DMGetApplicationContext(dm, &ctx);
112: DMSetApplicationContext(*newdm, ctx);
113: DMGetCoordinatesLocal(dm, &coords);
114: if (coords) {
115: DMSetCoordinatesLocal(*newdm, coords);
116: } else {
117: DMGetCoordinates(dm, &coords);
118: if (coords) {DMSetCoordinates(*newdm, coords);}
119: }
120: if (dm->maxCell) {
121: const PetscReal *maxCell, *L;
122: const DMBoundaryType *bd;
123: DMGetPeriodicity(dm, &maxCell, &L, &bd);
124: DMSetPeriodicity(*newdm, maxCell, L, bd);
125: }
126: return(0);
127: }
131: /*@C
132: DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
134: Logically Collective on DMDA
136: Input Parameter:
137: + da - initial distributed array
138: . ctype - the vector type, currently either VECSTANDARD or VECCUSP
140: Options Database:
141: . -dm_vec_type ctype
143: Level: intermediate
145: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType()
146: @*/
147: PetscErrorCode DMSetVecType(DM da,VecType ctype)
148: {
153: PetscFree(da->vectype);
154: PetscStrallocpy(ctype,(char**)&da->vectype);
155: return(0);
156: }
160: /*@C
161: DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
163: Logically Collective on DMDA
165: Input Parameter:
166: . da - initial distributed array
168: Output Parameter:
169: . ctype - the vector type
171: Level: intermediate
173: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
174: @*/
175: PetscErrorCode DMGetVecType(DM da,VecType *ctype)
176: {
179: *ctype = da->vectype;
180: return(0);
181: }
185: /*@
186: VecGetDM - Gets the DM defining the data layout of the vector
188: Not collective
190: Input Parameter:
191: . v - The Vec
193: Output Parameter:
194: . dm - The DM
196: Level: intermediate
198: .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
199: @*/
200: PetscErrorCode VecGetDM(Vec v, DM *dm)
201: {
207: PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);
208: return(0);
209: }
213: /*@
214: VecSetDM - Sets the DM defining the data layout of the vector
216: Not collective
218: Input Parameters:
219: + v - The Vec
220: - dm - The DM
222: Level: intermediate
224: .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
225: @*/
226: PetscErrorCode VecSetDM(Vec v, DM dm)
227: {
233: PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);
234: return(0);
235: }
239: /*@C
240: DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
242: Logically Collective on DM
244: Input Parameter:
245: + dm - the DM context
246: . ctype - the matrix type
248: Options Database:
249: . -dm_mat_type ctype
251: Level: intermediate
253: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType()
254: @*/
255: PetscErrorCode DMSetMatType(DM dm,MatType ctype)
256: {
261: PetscFree(dm->mattype);
262: PetscStrallocpy(ctype,(char**)&dm->mattype);
263: return(0);
264: }
268: /*@C
269: DMGetMatType - Gets the type of matrix created with DMCreateMatrix()
271: Logically Collective on DM
273: Input Parameter:
274: . dm - the DM context
276: Output Parameter:
277: . ctype - the matrix type
279: Options Database:
280: . -dm_mat_type ctype
282: Level: intermediate
284: .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType()
285: @*/
286: PetscErrorCode DMGetMatType(DM dm,MatType *ctype)
287: {
290: *ctype = dm->mattype;
291: return(0);
292: }
296: /*@
297: MatGetDM - Gets the DM defining the data layout of the matrix
299: Not collective
301: Input Parameter:
302: . A - The Mat
304: Output Parameter:
305: . dm - The DM
307: Level: intermediate
309: .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
310: @*/
311: PetscErrorCode MatGetDM(Mat A, DM *dm)
312: {
318: PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);
319: return(0);
320: }
324: /*@
325: MatSetDM - Sets the DM defining the data layout of the matrix
327: Not collective
329: Input Parameters:
330: + A - The Mat
331: - dm - The DM
333: Level: intermediate
335: .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
336: @*/
337: PetscErrorCode MatSetDM(Mat A, DM dm)
338: {
344: PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);
345: return(0);
346: }
350: /*@C
351: DMSetOptionsPrefix - Sets the prefix used for searching for all
352: DMDA options in the database.
354: Logically Collective on DMDA
356: Input Parameter:
357: + da - the DMDA context
358: - prefix - the prefix to prepend to all option names
360: Notes:
361: A hyphen (-) must NOT be given at the beginning of the prefix name.
362: The first character of all runtime options is AUTOMATICALLY the hyphen.
364: Level: advanced
366: .keywords: DMDA, set, options, prefix, database
368: .seealso: DMSetFromOptions()
369: @*/
370: PetscErrorCode DMSetOptionsPrefix(DM dm,const char prefix[])
371: {
376: PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);
377: if (dm->sf) {
378: PetscObjectSetOptionsPrefix((PetscObject)dm->sf,prefix);
379: }
380: if (dm->defaultSF) {
381: PetscObjectSetOptionsPrefix((PetscObject)dm->defaultSF,prefix);
382: }
383: return(0);
384: }
388: /*@
389: DMDestroy - Destroys a vector packer or DMDA.
391: Collective on DM
393: Input Parameter:
394: . dm - the DM object to destroy
396: Level: developer
398: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
400: @*/
401: PetscErrorCode DMDestroy(DM *dm)
402: {
403: PetscInt i, cnt = 0;
404: DMNamedVecLink nlink,nnext;
408: if (!*dm) return(0);
411: /* count all the circular references of DM and its contained Vecs */
412: for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
413: if ((*dm)->localin[i]) cnt++;
414: if ((*dm)->globalin[i]) cnt++;
415: }
416: for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
417: for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
418: if ((*dm)->x) {
419: DM obj;
420: VecGetDM((*dm)->x, &obj);
421: if (obj == *dm) cnt++;
422: }
424: if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; return(0);}
425: /*
426: Need this test because the dm references the vectors that
427: reference the dm, so destroying the dm calls destroy on the
428: vectors that cause another destroy on the dm
429: */
430: if (((PetscObject)(*dm))->refct < 0) return(0);
431: ((PetscObject) (*dm))->refct = 0;
432: for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
433: if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
434: VecDestroy(&(*dm)->localin[i]);
435: }
436: nnext=(*dm)->namedglobal;
437: (*dm)->namedglobal = NULL;
438: for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named vectors */
439: nnext = nlink->next;
440: if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
441: PetscFree(nlink->name);
442: VecDestroy(&nlink->X);
443: PetscFree(nlink);
444: }
445: nnext=(*dm)->namedlocal;
446: (*dm)->namedlocal = NULL;
447: for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named local vectors */
448: nnext = nlink->next;
449: if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
450: PetscFree(nlink->name);
451: VecDestroy(&nlink->X);
452: PetscFree(nlink);
453: }
455: /* Destroy the list of hooks */
456: {
457: DMCoarsenHookLink link,next;
458: for (link=(*dm)->coarsenhook; link; link=next) {
459: next = link->next;
460: PetscFree(link);
461: }
462: (*dm)->coarsenhook = NULL;
463: }
464: {
465: DMRefineHookLink link,next;
466: for (link=(*dm)->refinehook; link; link=next) {
467: next = link->next;
468: PetscFree(link);
469: }
470: (*dm)->refinehook = NULL;
471: }
472: {
473: DMSubDomainHookLink link,next;
474: for (link=(*dm)->subdomainhook; link; link=next) {
475: next = link->next;
476: PetscFree(link);
477: }
478: (*dm)->subdomainhook = NULL;
479: }
480: {
481: DMGlobalToLocalHookLink link,next;
482: for (link=(*dm)->gtolhook; link; link=next) {
483: next = link->next;
484: PetscFree(link);
485: }
486: (*dm)->gtolhook = NULL;
487: }
488: {
489: DMLocalToGlobalHookLink link,next;
490: for (link=(*dm)->ltoghook; link; link=next) {
491: next = link->next;
492: PetscFree(link);
493: }
494: (*dm)->ltoghook = NULL;
495: }
496: /* Destroy the work arrays */
497: {
498: DMWorkLink link,next;
499: if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
500: for (link=(*dm)->workin; link; link=next) {
501: next = link->next;
502: PetscFree(link->mem);
503: PetscFree(link);
504: }
505: (*dm)->workin = NULL;
506: }
508: PetscObjectDestroy(&(*dm)->dmksp);
509: PetscObjectDestroy(&(*dm)->dmsnes);
510: PetscObjectDestroy(&(*dm)->dmts);
512: if ((*dm)->ctx && (*dm)->ctxdestroy) {
513: (*(*dm)->ctxdestroy)(&(*dm)->ctx);
514: }
515: VecDestroy(&(*dm)->x);
516: MatFDColoringDestroy(&(*dm)->fd);
517: DMClearGlobalVectors(*dm);
518: ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);
519: PetscFree((*dm)->vectype);
520: PetscFree((*dm)->mattype);
522: PetscSectionDestroy(&(*dm)->defaultSection);
523: PetscSectionDestroy(&(*dm)->defaultGlobalSection);
524: PetscLayoutDestroy(&(*dm)->map);
525: PetscSectionDestroy(&(*dm)->defaultConstraintSection);
526: MatDestroy(&(*dm)->defaultConstraintMat);
527: PetscSFDestroy(&(*dm)->sf);
528: PetscSFDestroy(&(*dm)->defaultSF);
530: DMDestroy(&(*dm)->coordinateDM);
531: VecDestroy(&(*dm)->coordinates);
532: VecDestroy(&(*dm)->coordinatesLocal);
533: PetscFree3((*dm)->L,(*dm)->maxCell,(*dm)->bdtype);
535: PetscDSDestroy(&(*dm)->prob);
536: DMDestroy(&(*dm)->dmBC);
537: /* if memory was published with SAWs then destroy it */
538: PetscObjectSAWsViewOff((PetscObject)*dm);
540: (*(*dm)->ops->destroy)(*dm);
541: /* We do not destroy (*dm)->data here so that we can reference count backend objects */
542: PetscHeaderDestroy(dm);
543: return(0);
544: }
548: /*@
549: DMSetUp - sets up the data structures inside a DM object
551: Collective on DM
553: Input Parameter:
554: . dm - the DM object to setup
556: Level: developer
558: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
560: @*/
561: PetscErrorCode DMSetUp(DM dm)
562: {
567: if (dm->setupcalled) return(0);
568: if (dm->ops->setup) {
569: (*dm->ops->setup)(dm);
570: }
571: dm->setupcalled = PETSC_TRUE;
572: return(0);
573: }
577: /*@
578: DMSetFromOptions - sets parameters in a DM from the options database
580: Collective on DM
582: Input Parameter:
583: . dm - the DM object to set options for
585: Options Database:
586: + -dm_preallocate_only - Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
587: . -dm_vec_type <type> - type of vector to create inside DM
588: . -dm_mat_type <type> - type of matrix to create inside DM
589: - -dm_coloring_type - <global or ghosted>
591: Level: developer
593: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
595: @*/
596: PetscErrorCode DMSetFromOptions(DM dm)
597: {
598: char typeName[256];
599: PetscBool flg;
604: if (dm->sf) {
605: PetscSFSetFromOptions(dm->sf);
606: }
607: if (dm->defaultSF) {
608: PetscSFSetFromOptions(dm->defaultSF);
609: }
610: PetscObjectOptionsBegin((PetscObject)dm);
611: PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);
612: PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);
613: if (flg) {
614: DMSetVecType(dm,typeName);
615: }
616: PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);
617: if (flg) {
618: DMSetMatType(dm,typeName);
619: }
620: PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);
621: if (dm->ops->setfromoptions) {
622: (*dm->ops->setfromoptions)(PetscOptionsObject,dm);
623: }
624: /* process any options handlers added with PetscObjectAddOptionsHandler() */
625: PetscObjectProcessOptionsHandlers((PetscObject) dm);
626: PetscOptionsEnd();
627: return(0);
628: }
632: /*@C
633: DMView - Views a DM
635: Collective on DM
637: Input Parameter:
638: + dm - the DM object to view
639: - v - the viewer
641: Level: beginner
643: .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
645: @*/
646: PetscErrorCode DMView(DM dm,PetscViewer v)
647: {
649: PetscBool isbinary;
653: if (!v) {
654: PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);
655: }
656: PetscObjectPrintClassNamePrefixType((PetscObject)dm,v);
657: PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);
658: if (isbinary) {
659: PetscInt classid = DM_FILE_CLASSID;
660: char type[256];
662: PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);
663: PetscStrncpy(type,((PetscObject)dm)->type_name,256);
664: PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);
665: }
666: if (dm->ops->view) {
667: (*dm->ops->view)(dm,v);
668: }
669: return(0);
670: }
674: /*@
675: DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
677: Collective on DM
679: Input Parameter:
680: . dm - the DM object
682: Output Parameter:
683: . vec - the global vector
685: Level: beginner
687: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
689: @*/
690: PetscErrorCode DMCreateGlobalVector(DM dm,Vec *vec)
691: {
696: (*dm->ops->createglobalvector)(dm,vec);
697: return(0);
698: }
702: /*@
703: DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
705: Not Collective
707: Input Parameter:
708: . dm - the DM object
710: Output Parameter:
711: . vec - the local vector
713: Level: beginner
715: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
717: @*/
718: PetscErrorCode DMCreateLocalVector(DM dm,Vec *vec)
719: {
724: (*dm->ops->createlocalvector)(dm,vec);
725: return(0);
726: }
730: /*@
731: DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
733: Collective on DM
735: Input Parameter:
736: . dm - the DM that provides the mapping
738: Output Parameter:
739: . ltog - the mapping
741: Level: intermediate
743: Notes:
744: This mapping can then be used by VecSetLocalToGlobalMapping() or
745: MatSetLocalToGlobalMapping().
747: .seealso: DMCreateLocalVector()
748: @*/
749: PetscErrorCode DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
750: {
756: if (!dm->ltogmap) {
757: PetscSection section, sectionGlobal;
759: DMGetDefaultSection(dm, §ion);
760: if (section) {
761: PetscInt *ltog;
762: PetscInt pStart, pEnd, size, p, l;
764: DMGetDefaultGlobalSection(dm, §ionGlobal);
765: PetscSectionGetChart(section, &pStart, &pEnd);
766: PetscSectionGetStorageSize(section, &size);
767: PetscMalloc1(size, <og); /* We want the local+overlap size */
768: for (p = pStart, l = 0; p < pEnd; ++p) {
769: PetscInt dof, off, c;
771: /* Should probably use constrained dofs */
772: PetscSectionGetDof(section, p, &dof);
773: PetscSectionGetOffset(sectionGlobal, p, &off);
774: for (c = 0; c < dof; ++c, ++l) {
775: ltog[l] = off+c;
776: }
777: }
778: ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, 1,size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);
779: PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);
780: } else {
781: if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
782: (*dm->ops->getlocaltoglobalmapping)(dm);
783: }
784: }
785: *ltog = dm->ltogmap;
786: return(0);
787: }
791: /*@
792: DMGetBlockSize - Gets the inherent block size associated with a DM
794: Not Collective
796: Input Parameter:
797: . dm - the DM with block structure
799: Output Parameter:
800: . bs - the block size, 1 implies no exploitable block structure
802: Level: intermediate
804: .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMapping()
805: @*/
806: PetscErrorCode DMGetBlockSize(DM dm,PetscInt *bs)
807: {
811: if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
812: *bs = dm->bs;
813: return(0);
814: }
818: /*@
819: DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
821: Collective on DM
823: Input Parameter:
824: + dm1 - the DM object
825: - dm2 - the second, finer DM object
827: Output Parameter:
828: + mat - the interpolation
829: - vec - the scaling (optional)
831: Level: developer
833: Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
834: DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
836: For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
837: EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
840: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
842: @*/
843: PetscErrorCode DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
844: {
850: (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);
851: return(0);
852: }
856: /*@
857: DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
859: Collective on DM
861: Input Parameter:
862: + dm1 - the DM object
863: - dm2 - the second, finer DM object
865: Output Parameter:
866: . mat - the injection
868: Level: developer
870: Notes: For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
871: DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
873: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
875: @*/
876: PetscErrorCode DMCreateInjection(DM dm1,DM dm2,Mat *mat)
877: {
883: (*dm1->ops->getinjection)(dm1,dm2,mat);
884: return(0);
885: }
889: /*@
890: DMCreateColoring - Gets coloring for a DM
892: Collective on DM
894: Input Parameter:
895: + dm - the DM object
896: - ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
898: Output Parameter:
899: . coloring - the coloring
901: Level: developer
903: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()
905: @*/
906: PetscErrorCode DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
907: {
912: if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
913: (*dm->ops->getcoloring)(dm,ctype,coloring);
914: return(0);
915: }
919: /*@
920: DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
922: Collective on DM
924: Input Parameter:
925: . dm - the DM object
927: Output Parameter:
928: . mat - the empty Jacobian
930: Level: beginner
932: Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
933: do not need to do it yourself.
935: By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
936: the nonzero pattern call DMDASetMatPreallocateOnly()
938: For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
939: internally by PETSc.
941: For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
942: the indices for the global numbering for DMDAs which is complicated.
944: .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType()
946: @*/
947: PetscErrorCode DMCreateMatrix(DM dm,Mat *mat)
948: {
953: MatInitializePackage();
956: (*dm->ops->creatematrix)(dm,mat);
957: return(0);
958: }
962: /*@
963: DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
964: preallocated but the nonzero structure and zero values will not be set.
966: Logically Collective on DMDA
968: Input Parameter:
969: + dm - the DM
970: - only - PETSC_TRUE if only want preallocation
972: Level: developer
973: .seealso DMCreateMatrix()
974: @*/
975: PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
976: {
979: dm->prealloc_only = only;
980: return(0);
981: }
985: /*@C
986: DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
988: Not Collective
990: Input Parameters:
991: + dm - the DM object
992: . count - The minium size
993: - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
995: Output Parameter:
996: . array - the work array
998: Level: developer
1000: .seealso DMDestroy(), DMCreate()
1001: @*/
1002: PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1003: {
1005: DMWorkLink link;
1006: size_t dsize;
1011: if (dm->workin) {
1012: link = dm->workin;
1013: dm->workin = dm->workin->next;
1014: } else {
1015: PetscNewLog(dm,&link);
1016: }
1017: PetscDataTypeGetSize(dtype,&dsize);
1018: if (dsize*count > link->bytes) {
1019: PetscFree(link->mem);
1020: PetscMalloc(dsize*count,&link->mem);
1021: link->bytes = dsize*count;
1022: }
1023: link->next = dm->workout;
1024: dm->workout = link;
1025: *(void**)mem = link->mem;
1026: return(0);
1027: }
1031: /*@C
1032: DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1034: Not Collective
1036: Input Parameters:
1037: + dm - the DM object
1038: . count - The minium size
1039: - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1041: Output Parameter:
1042: . array - the work array
1044: Level: developer
1046: .seealso DMDestroy(), DMCreate()
1047: @*/
1048: PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1049: {
1050: DMWorkLink *p,link;
1055: for (p=&dm->workout; (link=*p); p=&link->next) {
1056: if (link->mem == *(void**)mem) {
1057: *p = link->next;
1058: link->next = dm->workin;
1059: dm->workin = link;
1060: *(void**)mem = NULL;
1061: return(0);
1062: }
1063: }
1064: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1065: }
1069: PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1070: {
1073: if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1074: dm->nullspaceConstructors[field] = nullsp;
1075: return(0);
1076: }
1080: /*@C
1081: DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1083: Not collective
1085: Input Parameter:
1086: . dm - the DM object
1088: Output Parameters:
1089: + numFields - The number of fields (or NULL if not requested)
1090: . fieldNames - The name for each field (or NULL if not requested)
1091: - fields - The global indices for each field (or NULL if not requested)
1093: Level: intermediate
1095: Notes:
1096: The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1097: PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1098: PetscFree().
1100: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1101: @*/
1102: PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1103: {
1104: PetscSection section, sectionGlobal;
1109: if (numFields) {
1111: *numFields = 0;
1112: }
1113: if (fieldNames) {
1115: *fieldNames = NULL;
1116: }
1117: if (fields) {
1119: *fields = NULL;
1120: }
1121: DMGetDefaultSection(dm, §ion);
1122: if (section) {
1123: PetscInt *fieldSizes, **fieldIndices;
1124: PetscInt nF, f, pStart, pEnd, p;
1126: DMGetDefaultGlobalSection(dm, §ionGlobal);
1127: PetscSectionGetNumFields(section, &nF);
1128: PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);
1129: PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);
1130: for (f = 0; f < nF; ++f) {
1131: fieldSizes[f] = 0;
1132: }
1133: for (p = pStart; p < pEnd; ++p) {
1134: PetscInt gdof;
1136: PetscSectionGetDof(sectionGlobal, p, &gdof);
1137: if (gdof > 0) {
1138: for (f = 0; f < nF; ++f) {
1139: PetscInt fdof, fcdof;
1141: PetscSectionGetFieldDof(section, p, f, &fdof);
1142: PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);
1143: fieldSizes[f] += fdof-fcdof;
1144: }
1145: }
1146: }
1147: for (f = 0; f < nF; ++f) {
1148: PetscMalloc1(fieldSizes[f], &fieldIndices[f]);
1149: fieldSizes[f] = 0;
1150: }
1151: for (p = pStart; p < pEnd; ++p) {
1152: PetscInt gdof, goff;
1154: PetscSectionGetDof(sectionGlobal, p, &gdof);
1155: if (gdof > 0) {
1156: PetscSectionGetOffset(sectionGlobal, p, &goff);
1157: for (f = 0; f < nF; ++f) {
1158: PetscInt fdof, fcdof, fc;
1160: PetscSectionGetFieldDof(section, p, f, &fdof);
1161: PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);
1162: for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1163: fieldIndices[f][fieldSizes[f]] = goff++;
1164: }
1165: }
1166: }
1167: }
1168: if (numFields) *numFields = nF;
1169: if (fieldNames) {
1170: PetscMalloc1(nF, fieldNames);
1171: for (f = 0; f < nF; ++f) {
1172: const char *fieldName;
1174: PetscSectionGetFieldName(section, f, &fieldName);
1175: PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);
1176: }
1177: }
1178: if (fields) {
1179: PetscMalloc1(nF, fields);
1180: for (f = 0; f < nF; ++f) {
1181: ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);
1182: }
1183: }
1184: PetscFree2(fieldSizes,fieldIndices);
1185: } else if (dm->ops->createfieldis) {
1186: (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);
1187: }
1188: return(0);
1189: }
1194: /*@C
1195: DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1196: corresponding to different fields: each IS contains the global indices of the dofs of the
1197: corresponding field. The optional list of DMs define the DM for each subproblem.
1198: Generalizes DMCreateFieldIS().
1200: Not collective
1202: Input Parameter:
1203: . dm - the DM object
1205: Output Parameters:
1206: + len - The number of subproblems in the field decomposition (or NULL if not requested)
1207: . namelist - The name for each field (or NULL if not requested)
1208: . islist - The global indices for each field (or NULL if not requested)
1209: - dmlist - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1211: Level: intermediate
1213: Notes:
1214: The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1215: PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1216: and all of the arrays should be freed with PetscFree().
1218: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1219: @*/
1220: PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1221: {
1226: if (len) {
1228: *len = 0;
1229: }
1230: if (namelist) {
1232: *namelist = 0;
1233: }
1234: if (islist) {
1236: *islist = 0;
1237: }
1238: if (dmlist) {
1240: *dmlist = 0;
1241: }
1242: /*
1243: Is it a good idea to apply the following check across all impls?
1244: Perhaps some impls can have a well-defined decomposition before DMSetUp?
1245: This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1246: */
1247: if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1248: if (!dm->ops->createfielddecomposition) {
1249: PetscSection section;
1250: PetscInt numFields, f;
1252: DMGetDefaultSection(dm, §ion);
1253: if (section) {PetscSectionGetNumFields(section, &numFields);}
1254: if (section && numFields && dm->ops->createsubdm) {
1255: *len = numFields;
1256: if (namelist) {PetscMalloc1(numFields,namelist);}
1257: if (islist) {PetscMalloc1(numFields,islist);}
1258: if (dmlist) {PetscMalloc1(numFields,dmlist);}
1259: for (f = 0; f < numFields; ++f) {
1260: const char *fieldName;
1262: DMCreateSubDM(dm, 1, &f, islist ? &(*islist)[f] : NULL, dmlist ? &(*dmlist)[f] : NULL);
1263: if (namelist) {
1264: PetscSectionGetFieldName(section, f, &fieldName);
1265: PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);
1266: }
1267: }
1268: } else {
1269: DMCreateFieldIS(dm, len, namelist, islist);
1270: /* By default there are no DMs associated with subproblems. */
1271: if (dmlist) *dmlist = NULL;
1272: }
1273: } else {
1274: (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);
1275: }
1276: return(0);
1277: }
1281: /*@C
1282: DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1283: The fields are defined by DMCreateFieldIS().
1285: Not collective
1287: Input Parameters:
1288: + dm - the DM object
1289: . numFields - number of fields in this subproblem
1290: - len - The number of subproblems in the decomposition (or NULL if not requested)
1292: Output Parameters:
1293: . is - The global indices for the subproblem
1294: - dm - The DM for the subproblem
1296: Level: intermediate
1298: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1299: @*/
1300: PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1301: {
1309: if (dm->ops->createsubdm) {
1310: (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);
1311: } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1312: return(0);
1313: }
1318: /*@C
1319: DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1320: corresponding to restrictions to pairs nested subdomains: each IS contains the global
1321: indices of the dofs of the corresponding subdomains. The inner subdomains conceptually
1322: define a nonoverlapping covering, while outer subdomains can overlap.
1323: The optional list of DMs define the DM for each subproblem.
1325: Not collective
1327: Input Parameter:
1328: . dm - the DM object
1330: Output Parameters:
1331: + len - The number of subproblems in the domain decomposition (or NULL if not requested)
1332: . namelist - The name for each subdomain (or NULL if not requested)
1333: . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1334: . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1335: - dmlist - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1337: Level: intermediate
1339: Notes:
1340: The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1341: PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1342: and all of the arrays should be freed with PetscFree().
1344: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1345: @*/
1346: PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1347: {
1348: PetscErrorCode ierr;
1349: DMSubDomainHookLink link;
1350: PetscInt i,l;
1359: /*
1360: Is it a good idea to apply the following check across all impls?
1361: Perhaps some impls can have a well-defined decomposition before DMSetUp?
1362: This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1363: */
1364: if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1365: if (dm->ops->createdomaindecomposition) {
1366: (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);
1367: /* copy subdomain hooks and context over to the subdomain DMs */
1368: if (dmlist) {
1369: for (i = 0; i < l; i++) {
1370: for (link=dm->subdomainhook; link; link=link->next) {
1371: if (link->ddhook) {(*link->ddhook)(dm,(*dmlist)[i],link->ctx);}
1372: }
1373: (*dmlist)[i]->ctx = dm->ctx;
1374: }
1375: }
1376: if (len) *len = l;
1377: }
1378: return(0);
1379: }
1384: /*@C
1385: DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1387: Not collective
1389: Input Parameters:
1390: + dm - the DM object
1391: . n - the number of subdomain scatters
1392: - subdms - the local subdomains
1394: Output Parameters:
1395: + n - the number of scatters returned
1396: . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1397: . oscat - scatter from global vector to overlapping global vector entries on subdomain
1398: - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1400: Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1401: of general nonlinear problems with overlapping subdomain methods. While merely having index sets that enable subsets
1402: of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1403: solution and residual data.
1405: Level: developer
1407: .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1408: @*/
1409: PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1410: {
1416: if (dm->ops->createddscatters) {
1417: (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);
1418: } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1419: return(0);
1420: }
1424: /*@
1425: DMRefine - Refines a DM object
1427: Collective on DM
1429: Input Parameter:
1430: + dm - the DM object
1431: - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1433: Output Parameter:
1434: . dmf - the refined DM, or NULL
1436: Note: If no refinement was done, the return value is NULL
1438: Level: developer
1440: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1441: @*/
1442: PetscErrorCode DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1443: {
1444: PetscErrorCode ierr;
1445: DMRefineHookLink link;
1449: (*dm->ops->refine)(dm,comm,dmf);
1450: if (*dmf) {
1451: (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1453: PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);
1455: (*dmf)->ctx = dm->ctx;
1456: (*dmf)->leveldown = dm->leveldown;
1457: (*dmf)->levelup = dm->levelup + 1;
1459: DMSetMatType(*dmf,dm->mattype);
1460: for (link=dm->refinehook; link; link=link->next) {
1461: if (link->refinehook) {
1462: (*link->refinehook)(dm,*dmf,link->ctx);
1463: }
1464: }
1465: }
1466: return(0);
1467: }
1471: /*@C
1472: DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1474: Logically Collective
1476: Input Arguments:
1477: + coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1478: . refinehook - function to run when setting up a coarser level
1479: . interphook - function to run to update data on finer levels (once per SNESSolve())
1480: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1482: Calling sequence of refinehook:
1483: $ refinehook(DM coarse,DM fine,void *ctx);
1485: + coarse - coarse level DM
1486: . fine - fine level DM to interpolate problem to
1487: - ctx - optional user-defined function context
1489: Calling sequence for interphook:
1490: $ interphook(DM coarse,Mat interp,DM fine,void *ctx)
1492: + coarse - coarse level DM
1493: . interp - matrix interpolating a coarse-level solution to the finer grid
1494: . fine - fine level DM to update
1495: - ctx - optional user-defined function context
1497: Level: advanced
1499: Notes:
1500: This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1502: If this function is called multiple times, the hooks will be run in the order they are added.
1504: This function is currently not available from Fortran.
1506: .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1507: @*/
1508: PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1509: {
1510: PetscErrorCode ierr;
1511: DMRefineHookLink link,*p;
1515: for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1516: PetscMalloc(sizeof(struct _DMRefineHookLink),&link);
1517: link->refinehook = refinehook;
1518: link->interphook = interphook;
1519: link->ctx = ctx;
1520: link->next = NULL;
1521: *p = link;
1522: return(0);
1523: }
1527: /*@
1528: DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1530: Collective if any hooks are
1532: Input Arguments:
1533: + coarse - coarser DM to use as a base
1534: . restrct - interpolation matrix, apply using MatInterpolate()
1535: - fine - finer DM to update
1537: Level: developer
1539: .seealso: DMRefineHookAdd(), MatInterpolate()
1540: @*/
1541: PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1542: {
1543: PetscErrorCode ierr;
1544: DMRefineHookLink link;
1547: for (link=fine->refinehook; link; link=link->next) {
1548: if (link->interphook) {
1549: (*link->interphook)(coarse,interp,fine,link->ctx);
1550: }
1551: }
1552: return(0);
1553: }
1557: /*@
1558: DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1560: Not Collective
1562: Input Parameter:
1563: . dm - the DM object
1565: Output Parameter:
1566: . level - number of refinements
1568: Level: developer
1570: .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1572: @*/
1573: PetscErrorCode DMGetRefineLevel(DM dm,PetscInt *level)
1574: {
1577: *level = dm->levelup;
1578: return(0);
1579: }
1583: /*@C
1584: DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1586: Logically Collective
1588: Input Arguments:
1589: + dm - the DM
1590: . beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1591: . endhook - function to run after DMGlobalToLocalEnd() has completed
1592: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1594: Calling sequence for beginhook:
1595: $ beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1597: + dm - global DM
1598: . g - global vector
1599: . mode - mode
1600: . l - local vector
1601: - ctx - optional user-defined function context
1604: Calling sequence for endhook:
1605: $ endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1607: + global - global DM
1608: - ctx - optional user-defined function context
1610: Level: advanced
1612: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1613: @*/
1614: PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1615: {
1616: PetscErrorCode ierr;
1617: DMGlobalToLocalHookLink link,*p;
1621: for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1622: PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);
1623: link->beginhook = beginhook;
1624: link->endhook = endhook;
1625: link->ctx = ctx;
1626: link->next = NULL;
1627: *p = link;
1628: return(0);
1629: }
1633: static PetscErrorCode DMGlobalToLocalHook_Constraints(DM dm, Vec g, InsertMode mode, Vec l, void *ctx)
1634: {
1635: Mat cMat;
1636: Vec cVec;
1637: PetscSection section, cSec;
1638: PetscInt pStart, pEnd, p, dof;
1643: DMGetDefaultConstraints(dm,&cSec,&cMat);
1644: if (cMat && (mode == INSERT_VALUES || mode == INSERT_ALL_VALUES || mode == INSERT_BC_VALUES)) {
1645: DMGetDefaultSection(dm,§ion);
1646: MatCreateVecs(cMat,NULL,&cVec);
1647: MatMult(cMat,l,cVec);
1648: PetscSectionGetChart(cSec,&pStart,&pEnd);
1649: for (p = pStart; p < pEnd; p++) {
1650: PetscSectionGetDof(cSec,p,&dof);
1651: if (dof) {
1652: PetscScalar *vals;
1653: VecGetValuesSection(cVec,cSec,p,&vals);
1654: VecSetValuesSection(l,section,p,vals,INSERT_ALL_VALUES);
1655: }
1656: }
1657: VecDestroy(&cVec);
1658: }
1659: return(0);
1660: }
1664: /*@
1665: DMGlobalToLocalBegin - Begins updating local vectors from global vector
1667: Neighbor-wise Collective on DM
1669: Input Parameters:
1670: + dm - the DM object
1671: . g - the global vector
1672: . mode - INSERT_VALUES or ADD_VALUES
1673: - l - the local vector
1676: Level: beginner
1678: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1680: @*/
1681: PetscErrorCode DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1682: {
1683: PetscSF sf;
1684: PetscErrorCode ierr;
1685: DMGlobalToLocalHookLink link;
1689: for (link=dm->gtolhook; link; link=link->next) {
1690: if (link->beginhook) {
1691: (*link->beginhook)(dm,g,mode,l,link->ctx);
1692: }
1693: }
1694: DMGetDefaultSF(dm, &sf);
1695: if (sf) {
1696: const PetscScalar *gArray;
1697: PetscScalar *lArray;
1699: if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1700: VecGetArray(l, &lArray);
1701: VecGetArrayRead(g, &gArray);
1702: PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);
1703: VecRestoreArray(l, &lArray);
1704: VecRestoreArrayRead(g, &gArray);
1705: } else {
1706: (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1707: }
1708: return(0);
1709: }
1713: /*@
1714: DMGlobalToLocalEnd - Ends updating local vectors from global vector
1716: Neighbor-wise Collective on DM
1718: Input Parameters:
1719: + dm - the DM object
1720: . g - the global vector
1721: . mode - INSERT_VALUES or ADD_VALUES
1722: - l - the local vector
1725: Level: beginner
1727: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1729: @*/
1730: PetscErrorCode DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1731: {
1732: PetscSF sf;
1733: PetscErrorCode ierr;
1734: const PetscScalar *gArray;
1735: PetscScalar *lArray;
1736: DMGlobalToLocalHookLink link;
1740: DMGetDefaultSF(dm, &sf);
1741: if (sf) {
1742: if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1744: VecGetArray(l, &lArray);
1745: VecGetArrayRead(g, &gArray);
1746: PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);
1747: VecRestoreArray(l, &lArray);
1748: VecRestoreArrayRead(g, &gArray);
1749: } else {
1750: (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
1751: }
1752: DMGlobalToLocalHook_Constraints(dm,g,mode,l,NULL);
1753: for (link=dm->gtolhook; link; link=link->next) {
1754: if (link->endhook) {(*link->endhook)(dm,g,mode,l,link->ctx);}
1755: }
1756: return(0);
1757: }
1761: /*@C
1762: DMLocalToGlobalHookAdd - adds a callback to be run when a local to global is called
1764: Logically Collective
1766: Input Arguments:
1767: + dm - the DM
1768: . beginhook - function to run at the beginning of DMLocalToGlobalBegin()
1769: . endhook - function to run after DMLocalToGlobalEnd() has completed
1770: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1772: Calling sequence for beginhook:
1773: $ beginhook(DM fine,Vec l,InsertMode mode,Vec g,void *ctx)
1775: + dm - global DM
1776: . l - local vector
1777: . mode - mode
1778: . g - global vector
1779: - ctx - optional user-defined function context
1782: Calling sequence for endhook:
1783: $ endhook(DM fine,Vec l,InsertMode mode,Vec g,void *ctx)
1785: + global - global DM
1786: . l - local vector
1787: . mode - mode
1788: . g - global vector
1789: - ctx - optional user-defined function context
1791: Level: advanced
1793: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1794: @*/
1795: PetscErrorCode DMLocalToGlobalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1796: {
1797: PetscErrorCode ierr;
1798: DMLocalToGlobalHookLink link,*p;
1802: for (p=&dm->ltoghook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1803: PetscMalloc(sizeof(struct _DMLocalToGlobalHookLink),&link);
1804: link->beginhook = beginhook;
1805: link->endhook = endhook;
1806: link->ctx = ctx;
1807: link->next = NULL;
1808: *p = link;
1809: return(0);
1810: }
1814: static PetscErrorCode DMLocalToGlobalHook_Constraints(DM dm, Vec l, InsertMode mode, Vec g, void *ctx)
1815: {
1816: Mat cMat;
1817: Vec cVec;
1818: PetscSection section, cSec;
1819: PetscInt pStart, pEnd, p, dof;
1824: DMGetDefaultConstraints(dm,&cSec,&cMat);
1825: if (cMat && (mode == ADD_VALUES || mode == ADD_ALL_VALUES || mode == ADD_BC_VALUES)) {
1826: DMGetDefaultSection(dm,§ion);
1827: MatCreateVecs(cMat,NULL,&cVec);
1828: PetscSectionGetChart(cSec,&pStart,&pEnd);
1829: for (p = pStart; p < pEnd; p++) {
1830: PetscSectionGetDof(cSec,p,&dof);
1831: if (dof) {
1832: PetscInt d;
1833: PetscScalar *vals;
1834: VecGetValuesSection(l,section,p,&vals);
1835: VecSetValuesSection(cVec,cSec,p,vals,mode);
1836: /* for this to be the true transpose, we have to zero the values that
1837: * we just extracted */
1838: for (d = 0; d < dof; d++) {
1839: vals[d] = 0.;
1840: }
1841: }
1842: }
1843: MatMultTransposeAdd(cMat,cVec,l,l);
1844: VecDestroy(&cVec);
1845: }
1846: return(0);
1847: }
1851: /*@
1852: DMLocalToGlobalBegin - updates global vectors from local vectors
1854: Neighbor-wise Collective on DM
1856: Input Parameters:
1857: + dm - the DM object
1858: . l - the local vector
1859: . mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that base point.
1860: - g - the global vector
1862: Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation.
1863: INSERT_VALUES is not supported for DMDA, in that case simply compute the values directly into a global vector instead of a local one.
1865: Level: beginner
1867: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1869: @*/
1870: PetscErrorCode DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1871: {
1872: PetscSF sf;
1873: PetscSection s, gs;
1874: DMLocalToGlobalHookLink link;
1875: const PetscScalar *lArray;
1876: PetscScalar *gArray;
1877: PetscBool isInsert;
1878: PetscErrorCode ierr;
1882: for (link=dm->ltoghook; link; link=link->next) {
1883: if (link->beginhook) {
1884: (*link->beginhook)(dm,l,mode,g,link->ctx);
1885: }
1886: }
1887: DMLocalToGlobalHook_Constraints(dm,l,mode,g,NULL);
1888: DMGetDefaultSF(dm, &sf);
1889: DMGetDefaultSection(dm, &s);
1890: switch (mode) {
1891: case INSERT_VALUES:
1892: case INSERT_ALL_VALUES:
1893: isInsert = PETSC_TRUE; break;
1894: case ADD_VALUES:
1895: case ADD_ALL_VALUES:
1896: isInsert = PETSC_FALSE; break;
1897: default:
1898: SETERRQ1(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1899: }
1900: if (sf && !isInsert) {
1901: VecGetArrayRead(l, &lArray);
1902: VecGetArray(g, &gArray);
1903: PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, MPIU_SUM);
1904: VecRestoreArrayRead(l, &lArray);
1905: VecRestoreArray(g, &gArray);
1906: } else if (s && isInsert) {
1907: PetscInt gStart, pStart, pEnd, p;
1909: DMGetDefaultGlobalSection(dm, &gs);
1910: PetscSectionGetChart(s, &pStart, &pEnd);
1911: VecGetOwnershipRange(g, &gStart, NULL);
1912: VecGetArrayRead(l, &lArray);
1913: VecGetArray(g, &gArray);
1914: for (p = pStart; p < pEnd; ++p) {
1915: PetscInt dof, gdof, cdof, gcdof, off, goff, d, e;
1917: PetscSectionGetDof(s, p, &dof);
1918: PetscSectionGetDof(gs, p, &gdof);
1919: PetscSectionGetConstraintDof(s, p, &cdof);
1920: PetscSectionGetConstraintDof(gs, p, &gcdof);
1921: PetscSectionGetOffset(s, p, &off);
1922: PetscSectionGetOffset(gs, p, &goff);
1923: /* Ignore off-process data and points with no global data */
1924: if (!gdof || goff < 0) continue;
1925: if (dof != gdof) SETERRQ5(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Inconsistent sizes, p: %d dof: %d gdof: %d cdof: %d gcdof: %d", p, dof, gdof, cdof, gcdof);
1926: /* If no constraints are enforced in the global vector */
1927: if (!gcdof) {
1928: for (d = 0; d < dof; ++d) gArray[goff-gStart+d] = lArray[off+d];
1929: /* If constraints are enforced in the global vector */
1930: } else if (cdof == gcdof) {
1931: const PetscInt *cdofs;
1932: PetscInt cind = 0;
1934: PetscSectionGetConstraintIndices(s, p, &cdofs);
1935: for (d = 0, e = 0; d < dof; ++d) {
1936: if ((cind < cdof) && (d == cdofs[cind])) {++cind; continue;}
1937: gArray[goff-gStart+e++] = lArray[off+d];
1938: }
1939: } else SETERRQ5(PETSC_COMM_SELF, PETSC_ERR_ARG_SIZ, "Inconsistent sizes, p: %d dof: %d gdof: %d cdof: %d gcdof: %d", p, dof, gdof, cdof, gcdof);
1940: }
1941: VecRestoreArrayRead(l, &lArray);
1942: VecRestoreArray(g, &gArray);
1943: } else {
1944: (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);
1945: }
1946: return(0);
1947: }
1951: /*@
1952: DMLocalToGlobalEnd - updates global vectors from local vectors
1954: Neighbor-wise Collective on DM
1956: Input Parameters:
1957: + dm - the DM object
1958: . l - the local vector
1959: . mode - INSERT_VALUES or ADD_VALUES
1960: - g - the global vector
1963: Level: beginner
1965: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1967: @*/
1968: PetscErrorCode DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1969: {
1970: PetscSF sf;
1971: PetscSection s;
1972: DMLocalToGlobalHookLink link;
1973: PetscBool isInsert;
1974: PetscErrorCode ierr;
1978: DMGetDefaultSF(dm, &sf);
1979: DMGetDefaultSection(dm, &s);
1980: switch (mode) {
1981: case INSERT_VALUES:
1982: case INSERT_ALL_VALUES:
1983: isInsert = PETSC_TRUE; break;
1984: case ADD_VALUES:
1985: case ADD_ALL_VALUES:
1986: isInsert = PETSC_FALSE; break;
1987: default:
1988: SETERRQ1(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1989: }
1990: if (sf && !isInsert) {
1991: const PetscScalar *lArray;
1992: PetscScalar *gArray;
1994: VecGetArrayRead(l, &lArray);
1995: VecGetArray(g, &gArray);
1996: PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, MPIU_SUM);
1997: VecRestoreArrayRead(l, &lArray);
1998: VecRestoreArray(g, &gArray);
1999: } else if (s && isInsert) {
2000: } else {
2001: (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);
2002: }
2003: for (link=dm->ltoghook; link; link=link->next) {
2004: if (link->endhook) {(*link->endhook)(dm,g,mode,l,link->ctx);}
2005: }
2006: return(0);
2007: }
2011: /*@
2012: DMLocalToLocalBegin - Maps from a local vector (including ghost points
2013: that contain irrelevant values) to another local vector where the ghost
2014: points in the second are set correctly. Must be followed by DMLocalToLocalEnd().
2016: Neighbor-wise Collective on DM and Vec
2018: Input Parameters:
2019: + dm - the DM object
2020: . g - the original local vector
2021: - mode - one of INSERT_VALUES or ADD_VALUES
2023: Output Parameter:
2024: . l - the local vector with correct ghost values
2026: Level: intermediate
2028: Notes:
2029: The local vectors used here need not be the same as those
2030: obtained from DMCreateLocalVector(), BUT they
2031: must have the same parallel data layout; they could, for example, be
2032: obtained with VecDuplicate() from the DM originating vectors.
2034: .keywords: DM, local-to-local, begin
2035: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
2037: @*/
2038: PetscErrorCode DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
2039: {
2040: PetscErrorCode ierr;
2044: (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
2045: return(0);
2046: }
2050: /*@
2051: DMLocalToLocalEnd - Maps from a local vector (including ghost points
2052: that contain irrelevant values) to another local vector where the ghost
2053: points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().
2055: Neighbor-wise Collective on DM and Vec
2057: Input Parameters:
2058: + da - the DM object
2059: . g - the original local vector
2060: - mode - one of INSERT_VALUES or ADD_VALUES
2062: Output Parameter:
2063: . l - the local vector with correct ghost values
2065: Level: intermediate
2067: Notes:
2068: The local vectors used here need not be the same as those
2069: obtained from DMCreateLocalVector(), BUT they
2070: must have the same parallel data layout; they could, for example, be
2071: obtained with VecDuplicate() from the DM originating vectors.
2073: .keywords: DM, local-to-local, end
2074: .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
2076: @*/
2077: PetscErrorCode DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
2078: {
2079: PetscErrorCode ierr;
2083: (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);
2084: return(0);
2085: }
2090: /*@
2091: DMCoarsen - Coarsens a DM object
2093: Collective on DM
2095: Input Parameter:
2096: + dm - the DM object
2097: - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
2099: Output Parameter:
2100: . dmc - the coarsened DM
2102: Level: developer
2104: .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2106: @*/
2107: PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
2108: {
2109: PetscErrorCode ierr;
2110: DMCoarsenHookLink link;
2114: (*dm->ops->coarsen)(dm, comm, dmc);
2115: if (!(*dmc)) SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "NULL coarse mesh produced");
2116: (*dmc)->ops->creatematrix = dm->ops->creatematrix;
2117: PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);
2118: (*dmc)->ctx = dm->ctx;
2119: (*dmc)->levelup = dm->levelup;
2120: (*dmc)->leveldown = dm->leveldown + 1;
2121: DMSetMatType(*dmc,dm->mattype);
2122: for (link=dm->coarsenhook; link; link=link->next) {
2123: if (link->coarsenhook) {(*link->coarsenhook)(dm,*dmc,link->ctx);}
2124: }
2125: return(0);
2126: }
2130: /*@C
2131: DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
2133: Logically Collective
2135: Input Arguments:
2136: + fine - nonlinear solver context on which to run a hook when restricting to a coarser level
2137: . coarsenhook - function to run when setting up a coarser level
2138: . restricthook - function to run to update data on coarser levels (once per SNESSolve())
2139: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2141: Calling sequence of coarsenhook:
2142: $ coarsenhook(DM fine,DM coarse,void *ctx);
2144: + fine - fine level DM
2145: . coarse - coarse level DM to restrict problem to
2146: - ctx - optional user-defined function context
2148: Calling sequence for restricthook:
2149: $ restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
2151: + fine - fine level DM
2152: . mrestrict - matrix restricting a fine-level solution to the coarse grid
2153: . rscale - scaling vector for restriction
2154: . inject - matrix restricting by injection
2155: . coarse - coarse level DM to update
2156: - ctx - optional user-defined function context
2158: Level: advanced
2160: Notes:
2161: This function is only needed if auxiliary data needs to be set up on coarse grids.
2163: If this function is called multiple times, the hooks will be run in the order they are added.
2165: In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2166: extract the finest level information from its context (instead of from the SNES).
2168: This function is currently not available from Fortran.
2170: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2171: @*/
2172: PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
2173: {
2174: PetscErrorCode ierr;
2175: DMCoarsenHookLink link,*p;
2179: for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2180: PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);
2181: link->coarsenhook = coarsenhook;
2182: link->restricthook = restricthook;
2183: link->ctx = ctx;
2184: link->next = NULL;
2185: *p = link;
2186: return(0);
2187: }
2191: /*@
2192: DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
2194: Collective if any hooks are
2196: Input Arguments:
2197: + fine - finer DM to use as a base
2198: . restrct - restriction matrix, apply using MatRestrict()
2199: . inject - injection matrix, also use MatRestrict()
2200: - coarse - coarer DM to update
2202: Level: developer
2204: .seealso: DMCoarsenHookAdd(), MatRestrict()
2205: @*/
2206: PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2207: {
2208: PetscErrorCode ierr;
2209: DMCoarsenHookLink link;
2212: for (link=fine->coarsenhook; link; link=link->next) {
2213: if (link->restricthook) {
2214: (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);
2215: }
2216: }
2217: return(0);
2218: }
2222: /*@C
2223: DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2225: Logically Collective
2227: Input Arguments:
2228: + global - global DM
2229: . ddhook - function to run to pass data to the decomposition DM upon its creation
2230: . restricthook - function to run to update data on block solve (at the beginning of the block solve)
2231: - ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2234: Calling sequence for ddhook:
2235: $ ddhook(DM global,DM block,void *ctx)
2237: + global - global DM
2238: . block - block DM
2239: - ctx - optional user-defined function context
2241: Calling sequence for restricthook:
2242: $ restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
2244: + global - global DM
2245: . out - scatter to the outer (with ghost and overlap points) block vector
2246: . in - scatter to block vector values only owned locally
2247: . block - block DM
2248: - ctx - optional user-defined function context
2250: Level: advanced
2252: Notes:
2253: This function is only needed if auxiliary data needs to be set up on subdomain DMs.
2255: If this function is called multiple times, the hooks will be run in the order they are added.
2257: In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2258: extract the global information from its context (instead of from the SNES).
2260: This function is currently not available from Fortran.
2262: .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2263: @*/
2264: PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2265: {
2266: PetscErrorCode ierr;
2267: DMSubDomainHookLink link,*p;
2271: for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2272: PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);
2273: link->restricthook = restricthook;
2274: link->ddhook = ddhook;
2275: link->ctx = ctx;
2276: link->next = NULL;
2277: *p = link;
2278: return(0);
2279: }
2283: /*@
2284: DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2286: Collective if any hooks are
2288: Input Arguments:
2289: + fine - finer DM to use as a base
2290: . oscatter - scatter from domain global vector filling subdomain global vector with overlap
2291: . gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2292: - coarse - coarer DM to update
2294: Level: developer
2296: .seealso: DMCoarsenHookAdd(), MatRestrict()
2297: @*/
2298: PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2299: {
2300: PetscErrorCode ierr;
2301: DMSubDomainHookLink link;
2304: for (link=global->subdomainhook; link; link=link->next) {
2305: if (link->restricthook) {
2306: (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);
2307: }
2308: }
2309: return(0);
2310: }
2314: /*@
2315: DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2317: Not Collective
2319: Input Parameter:
2320: . dm - the DM object
2322: Output Parameter:
2323: . level - number of coarsenings
2325: Level: developer
2327: .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2329: @*/
2330: PetscErrorCode DMGetCoarsenLevel(DM dm,PetscInt *level)
2331: {
2334: *level = dm->leveldown;
2335: return(0);
2336: }
2342: /*@C
2343: DMRefineHierarchy - Refines a DM object, all levels at once
2345: Collective on DM
2347: Input Parameter:
2348: + dm - the DM object
2349: - nlevels - the number of levels of refinement
2351: Output Parameter:
2352: . dmf - the refined DM hierarchy
2354: Level: developer
2356: .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2358: @*/
2359: PetscErrorCode DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2360: {
2365: if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2366: if (nlevels == 0) return(0);
2367: if (dm->ops->refinehierarchy) {
2368: (*dm->ops->refinehierarchy)(dm,nlevels,dmf);
2369: } else if (dm->ops->refine) {
2370: PetscInt i;
2372: DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);
2373: for (i=1; i<nlevels; i++) {
2374: DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);
2375: }
2376: } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2377: return(0);
2378: }
2382: /*@C
2383: DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2385: Collective on DM
2387: Input Parameter:
2388: + dm - the DM object
2389: - nlevels - the number of levels of coarsening
2391: Output Parameter:
2392: . dmc - the coarsened DM hierarchy
2394: Level: developer
2396: .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2398: @*/
2399: PetscErrorCode DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2400: {
2405: if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2406: if (nlevels == 0) return(0);
2408: if (dm->ops->coarsenhierarchy) {
2409: (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);
2410: } else if (dm->ops->coarsen) {
2411: PetscInt i;
2413: DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);
2414: for (i=1; i<nlevels; i++) {
2415: DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);
2416: }
2417: } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2418: return(0);
2419: }
2423: /*@
2424: DMCreateAggregates - Gets the aggregates that map between
2425: grids associated with two DMs.
2427: Collective on DM
2429: Input Parameters:
2430: + dmc - the coarse grid DM
2431: - dmf - the fine grid DM
2433: Output Parameters:
2434: . rest - the restriction matrix (transpose of the projection matrix)
2436: Level: intermediate
2438: .keywords: interpolation, restriction, multigrid
2440: .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2441: @*/
2442: PetscErrorCode DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2443: {
2449: (*dmc->ops->getaggregates)(dmc, dmf, rest);
2450: return(0);
2451: }
2455: /*@C
2456: DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2458: Not Collective
2460: Input Parameters:
2461: + dm - the DM object
2462: - destroy - the destroy function
2464: Level: intermediate
2466: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2468: @*/
2469: PetscErrorCode DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2470: {
2473: dm->ctxdestroy = destroy;
2474: return(0);
2475: }
2479: /*@
2480: DMSetApplicationContext - Set a user context into a DM object
2482: Not Collective
2484: Input Parameters:
2485: + dm - the DM object
2486: - ctx - the user context
2488: Level: intermediate
2490: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2492: @*/
2493: PetscErrorCode DMSetApplicationContext(DM dm,void *ctx)
2494: {
2497: dm->ctx = ctx;
2498: return(0);
2499: }
2503: /*@
2504: DMGetApplicationContext - Gets a user context from a DM object
2506: Not Collective
2508: Input Parameter:
2509: . dm - the DM object
2511: Output Parameter:
2512: . ctx - the user context
2514: Level: intermediate
2516: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2518: @*/
2519: PetscErrorCode DMGetApplicationContext(DM dm,void *ctx)
2520: {
2523: *(void**)ctx = dm->ctx;
2524: return(0);
2525: }
2529: /*@C
2530: DMSetVariableBounds - sets a function to compute the lower and upper bound vectors for SNESVI.
2532: Logically Collective on DM
2534: Input Parameter:
2535: + dm - the DM object
2536: - f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2538: Level: intermediate
2540: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2541: DMSetJacobian()
2543: @*/
2544: PetscErrorCode DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2545: {
2547: dm->ops->computevariablebounds = f;
2548: return(0);
2549: }
2553: /*@
2554: DMHasVariableBounds - does the DM object have a variable bounds function?
2556: Not Collective
2558: Input Parameter:
2559: . dm - the DM object to destroy
2561: Output Parameter:
2562: . flg - PETSC_TRUE if the variable bounds function exists
2564: Level: developer
2566: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2568: @*/
2569: PetscErrorCode DMHasVariableBounds(DM dm,PetscBool *flg)
2570: {
2572: *flg = (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2573: return(0);
2574: }
2578: /*@C
2579: DMComputeVariableBounds - compute variable bounds used by SNESVI.
2581: Logically Collective on DM
2583: Input Parameters:
2584: . dm - the DM object
2586: Output parameters:
2587: + xl - lower bound
2588: - xu - upper bound
2590: Level: advanced
2592: Notes: This is generally not called by users. It calls the function provided by the user with DMSetVariableBounds()
2594: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2596: @*/
2597: PetscErrorCode DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2598: {
2604: if (dm->ops->computevariablebounds) {
2605: (*dm->ops->computevariablebounds)(dm, xl,xu);
2606: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2607: return(0);
2608: }
2612: /*@
2613: DMHasColoring - does the DM object have a method of providing a coloring?
2615: Not Collective
2617: Input Parameter:
2618: . dm - the DM object
2620: Output Parameter:
2621: . flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2623: Level: developer
2625: .seealso DMHasFunction(), DMCreateColoring()
2627: @*/
2628: PetscErrorCode DMHasColoring(DM dm,PetscBool *flg)
2629: {
2631: *flg = (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2632: return(0);
2633: }
2635: #undef __FUNCT__
2637: /*@C
2638: DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2640: Collective on DM
2642: Input Parameter:
2643: + dm - the DM object
2644: - x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2646: Level: developer
2648: .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2650: @*/
2651: PetscErrorCode DMSetVec(DM dm,Vec x)
2652: {
2656: if (x) {
2657: if (!dm->x) {
2658: DMCreateGlobalVector(dm,&dm->x);
2659: }
2660: VecCopy(x,dm->x);
2661: } else if (dm->x) {
2662: VecDestroy(&dm->x);
2663: }
2664: return(0);
2665: }
2667: PetscFunctionList DMList = NULL;
2668: PetscBool DMRegisterAllCalled = PETSC_FALSE;
2672: /*@C
2673: DMSetType - Builds a DM, for a particular DM implementation.
2675: Collective on DM
2677: Input Parameters:
2678: + dm - The DM object
2679: - method - The name of the DM type
2681: Options Database Key:
2682: . -dm_type <type> - Sets the DM type; use -help for a list of available types
2684: Notes:
2685: See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2687: Level: intermediate
2689: .keywords: DM, set, type
2690: .seealso: DMGetType(), DMCreate()
2691: @*/
2692: PetscErrorCode DMSetType(DM dm, DMType method)
2693: {
2694: PetscErrorCode (*r)(DM);
2695: PetscBool match;
2700: PetscObjectTypeCompare((PetscObject) dm, method, &match);
2701: if (match) return(0);
2703: DMRegisterAll();
2704: PetscFunctionListFind(DMList,method,&r);
2705: if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2707: if (dm->ops->destroy) {
2708: (*dm->ops->destroy)(dm);
2709: dm->ops->destroy = NULL;
2710: }
2711: (*r)(dm);
2712: PetscObjectChangeTypeName((PetscObject)dm,method);
2713: return(0);
2714: }
2718: /*@C
2719: DMGetType - Gets the DM type name (as a string) from the DM.
2721: Not Collective
2723: Input Parameter:
2724: . dm - The DM
2726: Output Parameter:
2727: . type - The DM type name
2729: Level: intermediate
2731: .keywords: DM, get, type, name
2732: .seealso: DMSetType(), DMCreate()
2733: @*/
2734: PetscErrorCode DMGetType(DM dm, DMType *type)
2735: {
2741: DMRegisterAll();
2742: *type = ((PetscObject)dm)->type_name;
2743: return(0);
2744: }
2748: /*@C
2749: DMConvert - Converts a DM to another DM, either of the same or different type.
2751: Collective on DM
2753: Input Parameters:
2754: + dm - the DM
2755: - newtype - new DM type (use "same" for the same type)
2757: Output Parameter:
2758: . M - pointer to new DM
2760: Notes:
2761: Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2762: the MPI communicator of the generated DM is always the same as the communicator
2763: of the input DM.
2765: Level: intermediate
2767: .seealso: DMCreate()
2768: @*/
2769: PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2770: {
2771: DM B;
2772: char convname[256];
2773: PetscBool sametype, issame;
2780: PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);
2781: PetscStrcmp(newtype, "same", &issame);
2782: {
2783: PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2785: /*
2786: Order of precedence:
2787: 1) See if a specialized converter is known to the current DM.
2788: 2) See if a specialized converter is known to the desired DM class.
2789: 3) See if a good general converter is registered for the desired class
2790: 4) See if a good general converter is known for the current matrix.
2791: 5) Use a really basic converter.
2792: */
2794: /* 1) See if a specialized converter is known to the current DM and the desired class */
2795: PetscStrcpy(convname,"DMConvert_");
2796: PetscStrcat(convname,((PetscObject) dm)->type_name);
2797: PetscStrcat(convname,"_");
2798: PetscStrcat(convname,newtype);
2799: PetscStrcat(convname,"_C");
2800: PetscObjectQueryFunction((PetscObject)dm,convname,&conv);
2801: if (conv) goto foundconv;
2803: /* 2) See if a specialized converter is known to the desired DM class. */
2804: DMCreate(PetscObjectComm((PetscObject)dm), &B);
2805: DMSetType(B, newtype);
2806: PetscStrcpy(convname,"DMConvert_");
2807: PetscStrcat(convname,((PetscObject) dm)->type_name);
2808: PetscStrcat(convname,"_");
2809: PetscStrcat(convname,newtype);
2810: PetscStrcat(convname,"_C");
2811: PetscObjectQueryFunction((PetscObject)B,convname,&conv);
2812: if (conv) {
2813: DMDestroy(&B);
2814: goto foundconv;
2815: }
2817: #if 0
2818: /* 3) See if a good general converter is registered for the desired class */
2819: conv = B->ops->convertfrom;
2820: DMDestroy(&B);
2821: if (conv) goto foundconv;
2823: /* 4) See if a good general converter is known for the current matrix */
2824: if (dm->ops->convert) {
2825: conv = dm->ops->convert;
2826: }
2827: if (conv) goto foundconv;
2828: #endif
2830: /* 5) Use a really basic converter. */
2831: SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2833: foundconv:
2834: PetscLogEventBegin(DM_Convert,dm,0,0,0);
2835: (*conv)(dm,newtype,M);
2836: PetscLogEventEnd(DM_Convert,dm,0,0,0);
2837: }
2838: PetscObjectStateIncrease((PetscObject) *M);
2839: return(0);
2840: }
2842: /*--------------------------------------------------------------------------------------------------------------------*/
2846: /*@C
2847: DMRegister - Adds a new DM component implementation
2849: Not Collective
2851: Input Parameters:
2852: + name - The name of a new user-defined creation routine
2853: - create_func - The creation routine itself
2855: Notes:
2856: DMRegister() may be called multiple times to add several user-defined DMs
2859: Sample usage:
2860: .vb
2861: DMRegister("my_da", MyDMCreate);
2862: .ve
2864: Then, your DM type can be chosen with the procedural interface via
2865: .vb
2866: DMCreate(MPI_Comm, DM *);
2867: DMSetType(DM,"my_da");
2868: .ve
2869: or at runtime via the option
2870: .vb
2871: -da_type my_da
2872: .ve
2874: Level: advanced
2876: .keywords: DM, register
2877: .seealso: DMRegisterAll(), DMRegisterDestroy()
2879: @*/
2880: PetscErrorCode DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2881: {
2885: PetscFunctionListAdd(&DMList,sname,function);
2886: return(0);
2887: }
2891: /*@C
2892: DMLoad - Loads a DM that has been stored in binary with DMView().
2894: Collective on PetscViewer
2896: Input Parameters:
2897: + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2898: some related function before a call to DMLoad().
2899: - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2900: HDF5 file viewer, obtained from PetscViewerHDF5Open()
2902: Level: intermediate
2904: Notes:
2905: The type is determined by the data in the file, any type set into the DM before this call is ignored.
2907: Notes for advanced users:
2908: Most users should not need to know the details of the binary storage
2909: format, since DMLoad() and DMView() completely hide these details.
2910: But for anyone who's interested, the standard binary matrix storage
2911: format is
2912: .vb
2913: has not yet been determined
2914: .ve
2916: .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2917: @*/
2918: PetscErrorCode DMLoad(DM newdm, PetscViewer viewer)
2919: {
2920: PetscBool isbinary, ishdf5;
2926: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);
2927: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERHDF5,&ishdf5);
2928: if (isbinary) {
2929: PetscInt classid;
2930: char type[256];
2932: PetscViewerBinaryRead(viewer,&classid,1,NULL,PETSC_INT);
2933: if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid);
2934: PetscViewerBinaryRead(viewer,type,256,NULL,PETSC_CHAR);
2935: DMSetType(newdm, type);
2936: if (newdm->ops->load) {(*newdm->ops->load)(newdm,viewer);}
2937: } else if (ishdf5) {
2938: if (newdm->ops->load) {(*newdm->ops->load)(newdm,viewer);}
2939: } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen() or PetscViewerHDF5Open()");
2940: return(0);
2941: }
2943: /******************************** FEM Support **********************************/
2947: PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2948: {
2949: PetscInt f;
2953: PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);
2954: for (f = 0; f < len; ++f) {
2955: PetscPrintf(PETSC_COMM_SELF, " | %g |\n", (double)PetscRealPart(x[f]));
2956: }
2957: return(0);
2958: }
2962: PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2963: {
2964: PetscInt f, g;
2968: PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);
2969: for (f = 0; f < rows; ++f) {
2970: PetscPrintf(PETSC_COMM_SELF, " |");
2971: for (g = 0; g < cols; ++g) {
2972: PetscPrintf(PETSC_COMM_SELF, " % 9.5g", PetscRealPart(A[f*cols+g]));
2973: }
2974: PetscPrintf(PETSC_COMM_SELF, " |\n");
2975: }
2976: return(0);
2977: }
2981: PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X)
2982: {
2983: PetscMPIInt rank, numProcs;
2984: PetscInt p;
2988: MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);
2989: MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);
2990: PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);
2991: for (p = 0; p < numProcs; ++p) {
2992: if (p == rank) {
2993: Vec x;
2995: VecDuplicate(X, &x);
2996: VecCopy(X, x);
2997: VecChop(x, tol);
2998: VecView(x, PETSC_VIEWER_STDOUT_SELF);
2999: VecDestroy(&x);
3000: PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);
3001: }
3002: PetscBarrier((PetscObject) dm);
3003: }
3004: return(0);
3005: }
3009: /*@
3010: DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
3012: Input Parameter:
3013: . dm - The DM
3015: Output Parameter:
3016: . section - The PetscSection
3018: Level: intermediate
3020: Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
3022: .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
3023: @*/
3024: PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
3025: {
3031: if (!dm->defaultSection && dm->ops->createdefaultsection) {
3032: (*dm->ops->createdefaultsection)(dm);
3033: PetscObjectViewFromOptions((PetscObject) dm->defaultSection, NULL, "-dm_petscsection_view");
3034: }
3035: *section = dm->defaultSection;
3036: return(0);
3037: }
3041: /*@
3042: DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
3044: Input Parameters:
3045: + dm - The DM
3046: - section - The PetscSection
3048: Level: intermediate
3050: Note: Any existing Section will be destroyed
3052: .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
3053: @*/
3054: PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
3055: {
3056: PetscInt numFields = 0;
3057: PetscInt f;
3062: if (section) {
3064: PetscObjectReference((PetscObject)section);
3065: }
3066: PetscSectionDestroy(&dm->defaultSection);
3067: dm->defaultSection = section;
3068: if (section) {PetscSectionGetNumFields(dm->defaultSection, &numFields);}
3069: if (numFields) {
3070: DMSetNumFields(dm, numFields);
3071: for (f = 0; f < numFields; ++f) {
3072: PetscObject disc;
3073: const char *name;
3075: PetscSectionGetFieldName(dm->defaultSection, f, &name);
3076: DMGetField(dm, f, &disc);
3077: PetscObjectSetName(disc, name);
3078: }
3079: }
3080: /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
3081: PetscSectionDestroy(&dm->defaultGlobalSection);
3082: return(0);
3083: }
3087: /*@
3088: DMGetDefaultConstraints - Get the PetscSection and Mat the specify the local constraint interpolation. See DMSetDefaultConstraints() for a description of the purpose of constraint interpolation.
3090: not collective
3092: Input Parameter:
3093: . dm - The DM
3095: Output Parameter:
3096: + section - The PetscSection describing the range of the constraint matrix: relates rows of the constraint matrix to dofs of the default section. Returns NULL if there are no local constraints.
3097: - mat - The Mat that interpolates local constraints: its width should be the layout size of the default section. Returns NULL if there are no local constraints.
3099: Level: advanced
3101: Note: This gets borrowed references, so the user should not destroy the PetscSection or the Mat.
3103: .seealso: DMSetDefaultConstraints()
3104: @*/
3105: PetscErrorCode DMGetDefaultConstraints(DM dm, PetscSection *section, Mat *mat)
3106: {
3111: if (!dm->defaultConstraintSection && !dm->defaultConstraintMat && dm->ops->createdefaultconstraints) {(*dm->ops->createdefaultconstraints)(dm);}
3112: if (section) {*section = dm->defaultConstraintSection;}
3113: if (mat) {*mat = dm->defaultConstraintMat;}
3114: return(0);
3115: }
3119: /*@
3120: DMSetDefaultConstraints - Set the PetscSection and Mat the specify the local constraint interpolation.
3122: If a constraint matrix is specified, then it is applied during DMGlobalToLocalEnd() when mode is INSERT_VALUES, INSERT_BC_VALUES, or INSERT_ALL_VALUES. Without a constraint matrix, the local vector l returned by DMGlobalToLocalEnd() contains values that have been scattered from a global vector without modification; with a constraint matrix A, l is modified by computing c = A * l, l[s[i]] = c[i], where the scatter s is defined by the PetscSection returned by DMGetDefaultConstraintMatrix().
3124: If a constraint matrix is specified, then its adjoint is applied during DMLocalToGlobalBegin() when mode is ADD_VALUES, ADD_BC_VALUES, or ADD_ALL_VALUES. Without a constraint matrix, the local vector l is accumulated into a global vector without modification; with a constraint matrix A, l is first modified by computing c[i] = l[s[i]], l[s[i]] = 0, l = l + A'*c, which is the adjoint of the operation described above.
3126: collective on dm
3128: Input Parameters:
3129: + dm - The DM
3130: + section - The PetscSection describing the range of the constraint matrix: relates rows of the constraint matrix to dofs of the default section. Must have a local communicator (PETSC_COMM_SELF or derivative).
3131: - mat - The Mat that interpolates local constraints: its width should be the layout size of the default section: NULL indicates no constraints. Must have a local communicator (PETSC_COMM_SELF or derivative).
3133: Level: advanced
3135: Note: This increments the references of the PetscSection and the Mat, so they user can destroy them
3137: .seealso: DMGetDefaultConstraints()
3138: @*/
3139: PetscErrorCode DMSetDefaultConstraints(DM dm, PetscSection section, Mat mat)
3140: {
3141: PetscMPIInt result;
3146: if (section) {
3148: MPI_Comm_compare(PETSC_COMM_SELF,PetscObjectComm((PetscObject)section),&result);
3149: if (result != MPI_CONGRUENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NOTSAMECOMM,"constraint section must have local communicator");
3150: }
3151: if (mat) {
3153: MPI_Comm_compare(PETSC_COMM_SELF,PetscObjectComm((PetscObject)mat),&result);
3154: if (result != MPI_CONGRUENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NOTSAMECOMM,"constraint matrix must have local communicator");
3155: }
3156: PetscObjectReference((PetscObject)section);
3157: PetscSectionDestroy(&dm->defaultConstraintSection);
3158: dm->defaultConstraintSection = section;
3159: PetscObjectReference((PetscObject)mat);
3160: MatDestroy(&dm->defaultConstraintMat);
3161: dm->defaultConstraintMat = mat;
3162: return(0);
3163: }
3165: #ifdef PETSC_USE_DEBUG
3168: /*
3169: DMDefaultSectionCheckConsistency - Check the consistentcy of the global and local sections.
3171: Input Parameters:
3172: + dm - The DM
3173: . localSection - PetscSection describing the local data layout
3174: - globalSection - PetscSection describing the global data layout
3176: Level: intermediate
3178: .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3179: */
3180: static PetscErrorCode DMDefaultSectionCheckConsistency_Internal(DM dm, PetscSection localSection, PetscSection globalSection)
3181: {
3182: MPI_Comm comm;
3183: PetscLayout layout;
3184: const PetscInt *ranges;
3185: PetscInt pStart, pEnd, p, nroots;
3186: PetscMPIInt size, rank;
3187: PetscBool valid = PETSC_TRUE, gvalid;
3188: PetscErrorCode ierr;
3191: PetscObjectGetComm((PetscObject)dm,&comm);
3193: MPI_Comm_size(comm, &size);
3194: MPI_Comm_rank(comm, &rank);
3195: PetscSectionGetChart(globalSection, &pStart, &pEnd);
3196: PetscSectionGetConstrainedStorageSize(globalSection, &nroots);
3197: PetscLayoutCreate(comm, &layout);
3198: PetscLayoutSetBlockSize(layout, 1);
3199: PetscLayoutSetLocalSize(layout, nroots);
3200: PetscLayoutSetUp(layout);
3201: PetscLayoutGetRanges(layout, &ranges);
3202: for (p = pStart; p < pEnd; ++p) {
3203: PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d;
3205: PetscSectionGetDof(localSection, p, &dof);
3206: PetscSectionGetOffset(localSection, p, &off);
3207: PetscSectionGetConstraintDof(localSection, p, &cdof);
3208: PetscSectionGetDof(globalSection, p, &gdof);
3209: PetscSectionGetConstraintDof(globalSection, p, &gcdof);
3210: PetscSectionGetOffset(globalSection, p, &goff);
3211: if (!gdof) continue; /* Censored point */
3212: if ((gdof < 0 ? -(gdof+1) : gdof) != dof) {PetscSynchronizedPrintf(comm, "[%d]Global dof %d for point %d not equal to local dof %d\n", rank, gdof, p, dof); valid = PETSC_FALSE;}
3213: if (gcdof && (gcdof != cdof)) {PetscSynchronizedPrintf(comm, "[%d]Global constraints %d for point %d not equal to local constraints %d\n", rank, gcdof, p, cdof); valid = PETSC_FALSE;}
3214: if (gdof < 0) {
3215: gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3216: for (d = 0; d < gsize; ++d) {
3217: PetscInt offset = -(goff+1) + d, r;
3219: PetscFindInt(offset,size+1,ranges,&r);
3220: if (r < 0) r = -(r+2);
3221: if ((r < 0) || (r >= size)) {PetscSynchronizedPrintf(comm, "[%d]Point %d mapped to invalid process %d (%d, %d)\n", rank, p, r, gdof, goff); valid = PETSC_FALSE;break;}
3222: }
3223: }
3224: }
3225: PetscLayoutDestroy(&layout);
3226: PetscSynchronizedFlush(comm, NULL);
3227: MPI_Allreduce(&valid, &gvalid, 1, MPIU_BOOL, MPI_LAND, comm);
3228: if (!gvalid) {
3229: DMView(dm, NULL);
3230: SETERRQ(comm, PETSC_ERR_ARG_WRONG, "Inconsistent local and global sections");
3231: }
3232: return(0);
3233: }
3234: #endif
3238: /*@
3239: DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
3241: Collective on DM
3243: Input Parameter:
3244: . dm - The DM
3246: Output Parameter:
3247: . section - The PetscSection
3249: Level: intermediate
3251: Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
3253: .seealso: DMSetDefaultSection(), DMGetDefaultSection()
3254: @*/
3255: PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
3256: {
3262: if (!dm->defaultGlobalSection) {
3263: PetscSection s;
3265: DMGetDefaultSection(dm, &s);
3266: if (!s) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection in order to create a global PetscSection");
3267: if (!dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSF in order to create a global PetscSection");
3268: PetscSectionCreateGlobalSection(s, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);
3269: PetscLayoutDestroy(&dm->map);
3270: PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm), dm->defaultGlobalSection, &dm->map);
3271: PetscSectionViewFromOptions(dm->defaultGlobalSection, NULL, "-global_section_view");
3272: }
3273: *section = dm->defaultGlobalSection;
3274: return(0);
3275: }
3279: /*@
3280: DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
3282: Input Parameters:
3283: + dm - The DM
3284: - section - The PetscSection, or NULL
3286: Level: intermediate
3288: Note: Any existing Section will be destroyed
3290: .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
3291: @*/
3292: PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
3293: {
3299: PetscObjectReference((PetscObject)section);
3300: PetscSectionDestroy(&dm->defaultGlobalSection);
3301: dm->defaultGlobalSection = section;
3302: #ifdef PETSC_USE_DEBUG
3303: if (section) {DMDefaultSectionCheckConsistency_Internal(dm, dm->defaultSection, section);}
3304: #endif
3305: return(0);
3306: }
3310: /*@
3311: DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
3312: it is created from the default PetscSection layouts in the DM.
3314: Input Parameter:
3315: . dm - The DM
3317: Output Parameter:
3318: . sf - The PetscSF
3320: Level: intermediate
3322: Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3324: .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
3325: @*/
3326: PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
3327: {
3328: PetscInt nroots;
3334: PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);
3335: if (nroots < 0) {
3336: PetscSection section, gSection;
3338: DMGetDefaultSection(dm, §ion);
3339: if (section) {
3340: DMGetDefaultGlobalSection(dm, &gSection);
3341: DMCreateDefaultSF(dm, section, gSection);
3342: } else {
3343: *sf = NULL;
3344: return(0);
3345: }
3346: }
3347: *sf = dm->defaultSF;
3348: return(0);
3349: }
3353: /*@
3354: DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3356: Input Parameters:
3357: + dm - The DM
3358: - sf - The PetscSF
3360: Level: intermediate
3362: Note: Any previous SF is destroyed
3364: .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3365: @*/
3366: PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
3367: {
3373: PetscSFDestroy(&dm->defaultSF);
3374: dm->defaultSF = sf;
3375: return(0);
3376: }
3380: /*@C
3381: DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3382: describing the data layout.
3384: Input Parameters:
3385: + dm - The DM
3386: . localSection - PetscSection describing the local data layout
3387: - globalSection - PetscSection describing the global data layout
3389: Level: intermediate
3391: .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3392: @*/
3393: PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3394: {
3395: MPI_Comm comm;
3396: PetscLayout layout;
3397: const PetscInt *ranges;
3398: PetscInt *local;
3399: PetscSFNode *remote;
3400: PetscInt pStart, pEnd, p, nroots, nleaves = 0, l;
3401: PetscMPIInt size, rank;
3405: PetscObjectGetComm((PetscObject)dm,&comm);
3407: MPI_Comm_size(comm, &size);
3408: MPI_Comm_rank(comm, &rank);
3409: PetscSectionGetChart(globalSection, &pStart, &pEnd);
3410: PetscSectionGetConstrainedStorageSize(globalSection, &nroots);
3411: PetscLayoutCreate(comm, &layout);
3412: PetscLayoutSetBlockSize(layout, 1);
3413: PetscLayoutSetLocalSize(layout, nroots);
3414: PetscLayoutSetUp(layout);
3415: PetscLayoutGetRanges(layout, &ranges);
3416: for (p = pStart; p < pEnd; ++p) {
3417: PetscInt gdof, gcdof;
3419: PetscSectionGetDof(globalSection, p, &gdof);
3420: PetscSectionGetConstraintDof(globalSection, p, &gcdof);
3421: if (gcdof > (gdof < 0 ? -(gdof+1) : gdof)) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d has %d constraints > %d dof", p, gcdof, (gdof < 0 ? -(gdof+1) : gdof));
3422: nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3423: }
3424: PetscMalloc1(nleaves, &local);
3425: PetscMalloc1(nleaves, &remote);
3426: for (p = pStart, l = 0; p < pEnd; ++p) {
3427: const PetscInt *cind;
3428: PetscInt dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3430: PetscSectionGetDof(localSection, p, &dof);
3431: PetscSectionGetOffset(localSection, p, &off);
3432: PetscSectionGetConstraintDof(localSection, p, &cdof);
3433: PetscSectionGetConstraintIndices(localSection, p, &cind);
3434: PetscSectionGetDof(globalSection, p, &gdof);
3435: PetscSectionGetConstraintDof(globalSection, p, &gcdof);
3436: PetscSectionGetOffset(globalSection, p, &goff);
3437: if (!gdof) continue; /* Censored point */
3438: gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3439: if (gsize != dof-cdof) {
3440: if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3441: cdof = 0; /* Ignore constraints */
3442: }
3443: for (d = 0, c = 0; d < dof; ++d) {
3444: if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3445: local[l+d-c] = off+d;
3446: }
3447: if (gdof < 0) {
3448: for (d = 0; d < gsize; ++d, ++l) {
3449: PetscInt offset = -(goff+1) + d, r;
3451: PetscFindInt(offset,size+1,ranges,&r);
3452: if (r < 0) r = -(r+2);
3453: if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff);
3454: remote[l].rank = r;
3455: remote[l].index = offset - ranges[r];
3456: }
3457: } else {
3458: for (d = 0; d < gsize; ++d, ++l) {
3459: remote[l].rank = rank;
3460: remote[l].index = goff+d - ranges[rank];
3461: }
3462: }
3463: }
3464: if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3465: PetscLayoutDestroy(&layout);
3466: PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);
3467: return(0);
3468: }
3472: /*@
3473: DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3475: Input Parameter:
3476: . dm - The DM
3478: Output Parameter:
3479: . sf - The PetscSF
3481: Level: intermediate
3483: Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3485: .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3486: @*/
3487: PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3488: {
3492: *sf = dm->sf;
3493: return(0);
3494: }
3498: /*@
3499: DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3501: Input Parameters:
3502: + dm - The DM
3503: - sf - The PetscSF
3505: Level: intermediate
3507: .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3508: @*/
3509: PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3510: {
3516: PetscSFDestroy(&dm->sf);
3517: PetscObjectReference((PetscObject) sf);
3518: dm->sf = sf;
3519: return(0);
3520: }
3524: /*@
3525: DMGetDS - Get the PetscDS
3527: Input Parameter:
3528: . dm - The DM
3530: Output Parameter:
3531: . prob - The PetscDS
3533: Level: developer
3535: .seealso: DMSetDS()
3536: @*/
3537: PetscErrorCode DMGetDS(DM dm, PetscDS *prob)
3538: {
3542: *prob = dm->prob;
3543: return(0);
3544: }
3548: /*@
3549: DMSetDS - Set the PetscDS
3551: Input Parameters:
3552: + dm - The DM
3553: - prob - The PetscDS
3555: Level: developer
3557: .seealso: DMGetDS()
3558: @*/
3559: PetscErrorCode DMSetDS(DM dm, PetscDS prob)
3560: {
3566: PetscDSDestroy(&dm->prob);
3567: dm->prob = prob;
3568: PetscObjectReference((PetscObject) dm->prob);
3569: return(0);
3570: }
3574: PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3575: {
3580: PetscDSGetNumFields(dm->prob, numFields);
3581: return(0);
3582: }
3586: PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3587: {
3588: PetscInt Nf, f;
3593: PetscDSGetNumFields(dm->prob, &Nf);
3594: for (f = Nf; f < numFields; ++f) {
3595: PetscContainer obj;
3597: PetscContainerCreate(PetscObjectComm((PetscObject) dm), &obj);
3598: PetscDSSetDiscretization(dm->prob, f, (PetscObject) obj);
3599: PetscContainerDestroy(&obj);
3600: }
3601: return(0);
3602: }
3606: /*@
3607: DMGetField - Return the discretization object for a given DM field
3609: Not collective
3611: Input Parameters:
3612: + dm - The DM
3613: - f - The field number
3615: Output Parameter:
3616: . field - The discretization object
3618: Level: developer
3620: .seealso: DMSetField()
3621: @*/
3622: PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3623: {
3628: PetscDSGetDiscretization(dm->prob, f, field);
3629: return(0);
3630: }
3634: /*@
3635: DMSetField - Set the discretization object for a given DM field
3637: Logically collective on DM
3639: Input Parameters:
3640: + dm - The DM
3641: . f - The field number
3642: - field - The discretization object
3644: Level: developer
3646: .seealso: DMGetField()
3647: @*/
3648: PetscErrorCode DMSetField(DM dm, PetscInt f, PetscObject field)
3649: {
3654: PetscDSSetDiscretization(dm->prob, f, field);
3655: return(0);
3656: }
3660: PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3661: {
3662: DM dm_coord,dmc_coord;
3664: Vec coords,ccoords;
3665: Mat inject;
3667: DMGetCoordinateDM(dm,&dm_coord);
3668: DMGetCoordinateDM(dmc,&dmc_coord);
3669: DMGetCoordinates(dm,&coords);
3670: DMGetCoordinates(dmc,&ccoords);
3671: if (coords && !ccoords) {
3672: DMCreateGlobalVector(dmc_coord,&ccoords);
3673: DMCreateInjection(dmc_coord,dm_coord,&inject);
3674: MatRestrict(inject,coords,ccoords);
3675: MatDestroy(&inject);
3676: DMSetCoordinates(dmc,ccoords);
3677: VecDestroy(&ccoords);
3678: }
3679: return(0);
3680: }
3684: static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3685: {
3686: DM dm_coord,subdm_coord;
3688: Vec coords,ccoords,clcoords;
3689: VecScatter *scat_i,*scat_g;
3691: DMGetCoordinateDM(dm,&dm_coord);
3692: DMGetCoordinateDM(subdm,&subdm_coord);
3693: DMGetCoordinates(dm,&coords);
3694: DMGetCoordinates(subdm,&ccoords);
3695: if (coords && !ccoords) {
3696: DMCreateGlobalVector(subdm_coord,&ccoords);
3697: DMCreateLocalVector(subdm_coord,&clcoords);
3698: DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);
3699: VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3700: VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);
3701: VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);
3702: VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);
3703: DMSetCoordinates(subdm,ccoords);
3704: DMSetCoordinatesLocal(subdm,clcoords);
3705: VecScatterDestroy(&scat_i[0]);
3706: VecScatterDestroy(&scat_g[0]);
3707: VecDestroy(&ccoords);
3708: VecDestroy(&clcoords);
3709: PetscFree(scat_i);
3710: PetscFree(scat_g);
3711: }
3712: return(0);
3713: }
3717: /*@
3718: DMGetDimension - Return the topological dimension of the DM
3720: Not collective
3722: Input Parameter:
3723: . dm - The DM
3725: Output Parameter:
3726: . dim - The topological dimension
3728: Level: beginner
3730: .seealso: DMSetDimension(), DMCreate()
3731: @*/
3732: PetscErrorCode DMGetDimension(DM dm, PetscInt *dim)
3733: {
3737: *dim = dm->dim;
3738: return(0);
3739: }
3743: /*@
3744: DMSetDimension - Set the topological dimension of the DM
3746: Collective on dm
3748: Input Parameters:
3749: + dm - The DM
3750: - dim - The topological dimension
3752: Level: beginner
3754: .seealso: DMGetDimension(), DMCreate()
3755: @*/
3756: PetscErrorCode DMSetDimension(DM dm, PetscInt dim)
3757: {
3761: dm->dim = dim;
3762: return(0);
3763: }
3767: /*@
3768: DMGetDimPoints - Get the half-open interval for all points of a given dimension
3770: Collective on DM
3772: Input Parameters:
3773: + dm - the DM
3774: - dim - the dimension
3776: Output Parameters:
3777: + pStart - The first point of the given dimension
3778: . pEnd - The first point following points of the given dimension
3780: Note:
3781: The points are vertices in the Hasse diagram encoding the topology. This is explained in
3782: http://arxiv.org/abs/0908.4427. If not points exist of this dimension in the storage scheme,
3783: then the interval is empty.
3785: Level: intermediate
3787: .keywords: point, Hasse Diagram, dimension
3788: .seealso: DMPLEX, DMPlexGetDepthStratum(), DMPlexGetHeightStratum()
3789: @*/
3790: PetscErrorCode DMGetDimPoints(DM dm, PetscInt dim, PetscInt *pStart, PetscInt *pEnd)
3791: {
3792: PetscInt d;
3797: DMGetDimension(dm, &d);
3798: if ((dim < 0) || (dim > d)) SETERRQ2(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid dimension %d 1", dim, d);
3799: (*dm->ops->getdimpoints)(dm, dim, pStart, pEnd);
3800: return(0);
3801: }
3805: /*@
3806: DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3808: Collective on DM
3810: Input Parameters:
3811: + dm - the DM
3812: - c - coordinate vector
3814: Note:
3815: The coordinates do include those for ghost points, which are in the local vector
3817: Level: intermediate
3819: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3820: .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3821: @*/
3822: PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3823: {
3829: PetscObjectReference((PetscObject) c);
3830: VecDestroy(&dm->coordinates);
3831: dm->coordinates = c;
3832: VecDestroy(&dm->coordinatesLocal);
3833: DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);
3834: DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);
3835: return(0);
3836: }
3840: /*@
3841: DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3843: Collective on DM
3845: Input Parameters:
3846: + dm - the DM
3847: - c - coordinate vector
3849: Note:
3850: The coordinates of ghost points can be set using DMSetCoordinates()
3851: followed by DMGetCoordinatesLocal(). This is intended to enable the
3852: setting of ghost coordinates outside of the domain.
3854: Level: intermediate
3856: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3857: .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3858: @*/
3859: PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3860: {
3866: PetscObjectReference((PetscObject) c);
3867: VecDestroy(&dm->coordinatesLocal);
3869: dm->coordinatesLocal = c;
3871: VecDestroy(&dm->coordinates);
3872: return(0);
3873: }
3877: /*@
3878: DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3880: Not Collective
3882: Input Parameter:
3883: . dm - the DM
3885: Output Parameter:
3886: . c - global coordinate vector
3888: Note:
3889: This is a borrowed reference, so the user should NOT destroy this vector
3891: Each process has only the local coordinates (does NOT have the ghost coordinates).
3893: For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3894: and (x_0,y_0,z_0,x_1,y_1,z_1...)
3896: Level: intermediate
3898: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3899: .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3900: @*/
3901: PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3902: {
3908: if (!dm->coordinates && dm->coordinatesLocal) {
3909: DM cdm = NULL;
3911: DMGetCoordinateDM(dm, &cdm);
3912: DMCreateGlobalVector(cdm, &dm->coordinates);
3913: PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");
3914: DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);
3915: DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);
3916: }
3917: *c = dm->coordinates;
3918: return(0);
3919: }
3923: /*@
3924: DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3926: Collective on DM
3928: Input Parameter:
3929: . dm - the DM
3931: Output Parameter:
3932: . c - coordinate vector
3934: Note:
3935: This is a borrowed reference, so the user should NOT destroy this vector
3937: Each process has the local and ghost coordinates
3939: For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3940: and (x_0,y_0,z_0,x_1,y_1,z_1...)
3942: Level: intermediate
3944: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3945: .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3946: @*/
3947: PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3948: {
3954: if (!dm->coordinatesLocal && dm->coordinates) {
3955: DM cdm = NULL;
3957: DMGetCoordinateDM(dm, &cdm);
3958: DMCreateLocalVector(cdm, &dm->coordinatesLocal);
3959: PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");
3960: DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);
3961: DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);
3962: }
3963: *c = dm->coordinatesLocal;
3964: return(0);
3965: }
3969: /*@
3970: DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates
3972: Collective on DM
3974: Input Parameter:
3975: . dm - the DM
3977: Output Parameter:
3978: . cdm - coordinate DM
3980: Level: intermediate
3982: .keywords: distributed array, get, corners, nodes, local indices, coordinates
3983: .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3984: @*/
3985: PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3986: {
3992: if (!dm->coordinateDM) {
3993: if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3994: (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);
3995: }
3996: *cdm = dm->coordinateDM;
3997: return(0);
3998: }
4002: /*@
4003: DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates
4005: Logically Collective on DM
4007: Input Parameters:
4008: + dm - the DM
4009: - cdm - coordinate DM
4011: Level: intermediate
4013: .keywords: distributed array, get, corners, nodes, local indices, coordinates
4014: .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
4015: @*/
4016: PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm)
4017: {
4023: DMDestroy(&dm->coordinateDM);
4024: dm->coordinateDM = cdm;
4025: PetscObjectReference((PetscObject) dm->coordinateDM);
4026: return(0);
4027: }
4031: /*@
4032: DMGetCoordinateDim - Retrieve the dimension of embedding space for coordinate values.
4034: Not Collective
4036: Input Parameter:
4037: . dm - The DM object
4039: Output Parameter:
4040: . dim - The embedding dimension
4042: Level: intermediate
4044: .keywords: mesh, coordinates
4045: .seealso: DMSetCoordinateDim(), DMGetCoordinateSection(), DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection()
4046: @*/
4047: PetscErrorCode DMGetCoordinateDim(DM dm, PetscInt *dim)
4048: {
4052: if (dm->dimEmbed == PETSC_DEFAULT) {
4053: dm->dimEmbed = dm->dim;
4054: }
4055: *dim = dm->dimEmbed;
4056: return(0);
4057: }
4061: /*@
4062: DMSetCoordinateDim - Set the dimension of the embedding space for coordinate values.
4064: Not Collective
4066: Input Parameters:
4067: + dm - The DM object
4068: - dim - The embedding dimension
4070: Level: intermediate
4072: .keywords: mesh, coordinates
4073: .seealso: DMGetCoordinateDim(), DMSetCoordinateSection(), DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection()
4074: @*/
4075: PetscErrorCode DMSetCoordinateDim(DM dm, PetscInt dim)
4076: {
4079: dm->dimEmbed = dim;
4080: return(0);
4081: }
4085: /*@
4086: DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh.
4088: Not Collective
4090: Input Parameter:
4091: . dm - The DM object
4093: Output Parameter:
4094: . section - The PetscSection object
4096: Level: intermediate
4098: .keywords: mesh, coordinates
4099: .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection()
4100: @*/
4101: PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section)
4102: {
4103: DM cdm;
4109: DMGetCoordinateDM(dm, &cdm);
4110: DMGetDefaultSection(cdm, section);
4111: return(0);
4112: }
4116: /*@
4117: DMSetCoordinateSection - Set the layout of coordinate values over the mesh.
4119: Not Collective
4121: Input Parameters:
4122: + dm - The DM object
4123: . dim - The embedding dimension, or PETSC_DETERMINE
4124: - section - The PetscSection object
4126: Level: intermediate
4128: .keywords: mesh, coordinates
4129: .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection()
4130: @*/
4131: PetscErrorCode DMSetCoordinateSection(DM dm, PetscInt dim, PetscSection section)
4132: {
4133: DM cdm;
4139: DMGetCoordinateDM(dm, &cdm);
4140: DMSetDefaultSection(cdm, section);
4141: if (dim == PETSC_DETERMINE) {
4142: PetscInt d = dim;
4143: PetscInt pStart, pEnd, vStart, vEnd, v, dd;
4145: PetscSectionGetChart(section, &pStart, &pEnd);
4146: DMGetDimPoints(dm, 0, &vStart, &vEnd);
4147: pStart = PetscMax(vStart, pStart);
4148: pEnd = PetscMin(vEnd, pEnd);
4149: for (v = pStart; v < pEnd; ++v) {
4150: PetscSectionGetDof(section, v, &dd);
4151: if (dd) {d = dd; break;}
4152: }
4153: DMSetCoordinateDim(dm, d);
4154: }
4155: return(0);
4156: }
4160: /*@C
4161: DMSetPeriodicity - Set the description of mesh periodicity
4163: Input Parameters:
4164: + dm - The DM object
4165: . maxCell - Over distances greater than this, we can assume a point has crossed over to another sheet, when trying to localize cell coordinates
4166: . L - If we assume the mesh is a torus, this is the length of each coordinate
4167: - bd - This describes the type of periodicity in each topological dimension
4169: Level: developer
4171: .seealso: DMGetPeriodicity()
4172: @*/
4173: PetscErrorCode DMGetPeriodicity(DM dm, const PetscReal **maxCell, const PetscReal **L, const DMBoundaryType **bd)
4174: {
4177: if (L) *L = dm->L;
4178: if (maxCell) *maxCell = dm->maxCell;
4179: if (bd) *bd = dm->bdtype;
4180: return(0);
4181: }
4185: /*@C
4186: DMSetPeriodicity - Set the description of mesh periodicity
4188: Input Parameters:
4189: + dm - The DM object
4190: . maxCell - Over distances greater than this, we can assume a point has crossed over to another sheet, when trying to localize cell coordinates
4191: . L - If we assume the mesh is a torus, this is the length of each coordinate
4192: - bd - This describes the type of periodicity in each topological dimension
4194: Level: developer
4196: .seealso: DMGetPeriodicity()
4197: @*/
4198: PetscErrorCode DMSetPeriodicity(DM dm, const PetscReal maxCell[], const PetscReal L[], const DMBoundaryType bd[])
4199: {
4200: PetscInt dim, d;
4206: PetscFree3(dm->L,dm->maxCell,dm->bdtype);
4207: DMGetDimension(dm, &dim);
4208: PetscMalloc3(dim,&dm->L,dim,&dm->maxCell,dim,&dm->bdtype);
4209: for (d = 0; d < dim; ++d) {dm->L[d] = L[d]; dm->maxCell[d] = maxCell[d]; dm->bdtype[d] = bd[d];}
4210: return(0);
4211: }
4215: /*@
4216: DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
4218: Not collective
4220: Input Parameters:
4221: + dm - The DM
4222: - v - The Vec of points
4224: Output Parameter:
4225: . cells - The local cell numbers for cells which contain the points
4227: Level: developer
4229: .keywords: point location, mesh
4230: .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
4231: @*/
4232: PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
4233: {
4240: if (dm->ops->locatepoints) {
4241: (*dm->ops->locatepoints)(dm,v,cells);
4242: } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
4243: return(0);
4244: }
4248: /*@
4249: DMGetOutputDM - Retrieve the DM associated with the layout for output
4251: Input Parameter:
4252: . dm - The original DM
4254: Output Parameter:
4255: . odm - The DM which provides the layout for output
4257: Level: intermediate
4259: .seealso: VecView(), DMGetDefaultGlobalSection()
4260: @*/
4261: PetscErrorCode DMGetOutputDM(DM dm, DM *odm)
4262: {
4263: PetscSection section;
4264: PetscBool hasConstraints;
4270: DMGetDefaultSection(dm, §ion);
4271: PetscSectionHasConstraints(section, &hasConstraints);
4272: if (!hasConstraints) {
4273: *odm = dm;
4274: return(0);
4275: }
4276: if (!dm->dmBC) {
4277: PetscSection newSection, gsection;
4278: PetscSF sf;
4280: DMClone(dm, &dm->dmBC);
4281: PetscSectionClone(section, &newSection);
4282: DMSetDefaultSection(dm->dmBC, newSection);
4283: PetscSectionDestroy(&newSection);
4284: DMGetPointSF(dm->dmBC, &sf);
4285: PetscSectionCreateGlobalSection(section, sf, PETSC_TRUE, &gsection);
4286: DMSetDefaultGlobalSection(dm->dmBC, gsection);
4287: PetscSectionDestroy(&gsection);
4288: }
4289: *odm = dm->dmBC;
4290: return(0);
4291: }
4295: /*@
4296: DMGetOutputSequenceNumber - Retrieve the sequence number/value for output
4298: Input Parameter:
4299: . dm - The original DM
4301: Output Parameters:
4302: + num - The output sequence number
4303: - val - The output sequence value
4305: Level: intermediate
4307: Note: This is intended for output that should appear in sequence, for instance
4308: a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system.
4310: .seealso: VecView()
4311: @*/
4312: PetscErrorCode DMGetOutputSequenceNumber(DM dm, PetscInt *num, PetscReal *val)
4313: {
4318: return(0);
4319: }
4323: /*@
4324: DMSetOutputSequenceNumber - Set the sequence number/value for output
4326: Input Parameters:
4327: + dm - The original DM
4328: . num - The output sequence number
4329: - val - The output sequence value
4331: Level: intermediate
4333: Note: This is intended for output that should appear in sequence, for instance
4334: a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system.
4336: .seealso: VecView()
4337: @*/
4338: PetscErrorCode DMSetOutputSequenceNumber(DM dm, PetscInt num, PetscReal val)
4339: {
4342: dm->outputSequenceNum = num;
4343: dm->outputSequenceVal = val;
4344: return(0);
4345: }
4349: /*@C
4350: DMOutputSequenceLoad - Retrieve the sequence value from a Viewer
4352: Input Parameters:
4353: + dm - The original DM
4354: . name - The sequence name
4355: - num - The output sequence number
4357: Output Parameter:
4358: . val - The output sequence value
4360: Level: intermediate
4362: Note: This is intended for output that should appear in sequence, for instance
4363: a set of timesteps in an HDF5 file, or a set of realizations of a stochastic system.
4365: .seealso: DMGetOutputSequenceNumber(), DMSetOutputSequenceNumber(), VecView()
4366: @*/
4367: PetscErrorCode DMOutputSequenceLoad(DM dm, PetscViewer viewer, const char *name, PetscInt num, PetscReal *val)
4368: {
4369: PetscBool ishdf5;
4376: PetscObjectTypeCompare((PetscObject) viewer, PETSCVIEWERHDF5, &ishdf5);
4377: if (ishdf5) {
4378: #if defined(PETSC_HAVE_HDF5)
4379: PetscScalar value;
4381: DMSequenceLoad_HDF5(dm, name, num, &value, viewer);
4382: *val = PetscRealPart(value);
4383: #endif
4384: } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Invalid viewer; open viewer with PetscViewerHDF5Open()");
4385: return(0);
4386: }