Actual source code: plexexodusii.c
1: #define PETSCDM_DLL
2: #include <petsc/private/dmpleximpl.h>
4: #if defined(PETSC_HAVE_EXODUSII)
5: #include <netcdf.h>
6: #include <exodusII.h>
7: #endif
9: #include <petsc/private/viewerimpl.h>
10: #include <petsc/private/viewerexodusiiimpl.h>
11: #if defined(PETSC_HAVE_EXODUSII)
12: /*
13: PETSC_VIEWER_EXODUSII_ - Creates an ExodusII PetscViewer shared by all processors in a communicator.
15: Collective
17: Input Parameter:
18: . comm - the MPI communicator to share the ExodusII PetscViewer
20: Level: intermediate
22: Notes:
23: misses Fortran bindings
25: Notes:
26: Unlike almost all other PETSc routines, PETSC_VIEWER_EXODUSII_ does not return
27: an error code. The GLVIS PetscViewer is usually used in the form
28: $ XXXView(XXX object, PETSC_VIEWER_EXODUSII_(comm));
30: .seealso: PetscViewerExodusIIOpen(), PetscViewerType, PetscViewerCreate(), PetscViewerDestroy()
31: */
32: PetscViewer PETSC_VIEWER_EXODUSII_(MPI_Comm comm)
33: {
34: PetscViewer viewer;
38: PetscViewerExodusIIOpen(comm, "mesh.exo", FILE_MODE_WRITE, &viewer);
39: if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_EXODUSII_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");return(0);}
40: PetscObjectRegisterDestroy((PetscObject) viewer);
41: if (ierr) {PetscError(PETSC_COMM_SELF,__LINE__,"PETSC_VIEWER_EXODUSII_",__FILE__,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL," ");return(0);}
42: PetscFunctionReturn(viewer);
43: }
45: static PetscErrorCode PetscViewerView_ExodusII(PetscViewer v, PetscViewer viewer)
46: {
47: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) v->data;
48: PetscErrorCode ierr;
51: if (exo->filename) {PetscViewerASCIIPrintf(viewer, "Filename: %s\n", exo->filename);}
52: if (exo->exoid) {PetscViewerASCIIPrintf(viewer, "exoid: %d\n", exo->exoid) ;}
53: if (exo->btype) {PetscViewerASCIIPrintf(viewer, "IO Mode: %d\n", exo->btype) ;}
54: if (exo->order) {PetscViewerASCIIPrintf(viewer, "Mesh order: %d\n", exo->order) ;}
55: return(0);
56: }
58: static PetscErrorCode PetscViewerSetFromOptions_ExodusII(PetscOptionItems *PetscOptionsObject, PetscViewer v)
59: {
63: PetscOptionsHead(PetscOptionsObject, "ExodusII PetscViewer Options");
64: PetscOptionsTail();
65: return(0);
66: }
68: static PetscErrorCode PetscViewerSetUp_ExodusII(PetscViewer viewer)
69: {
71: return(0);
72: }
74: static PetscErrorCode PetscViewerDestroy_ExodusII(PetscViewer viewer)
75: {
76: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
77: PetscErrorCode ierr;
80: if (exo->exoid >= 0) {PetscStackCallStandard(ex_close,(exo->exoid));}
81: PetscFree(exo->filename);
82: PetscFree(exo);
83: PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileSetName_C",NULL);
84: PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileGetName_C",NULL);
85: PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerFileSetMode_C",NULL);
86: PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerGetId_C",NULL);
87: PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerGetOrder_C",NULL);
88: PetscObjectComposeFunction((PetscObject)viewer,"PetscViewerSetOrder_C",NULL);
89: return(0);
90: }
92: static PetscErrorCode PetscViewerFileSetName_ExodusII(PetscViewer viewer, const char name[])
93: {
94: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
95: PetscMPIInt rank;
96: int CPU_word_size, IO_word_size, EXO_mode;
97: PetscErrorCode ierr;
98: MPI_Info mpi_info = MPI_INFO_NULL;
99: float EXO_version;
101: MPI_Comm_rank(PetscObjectComm((PetscObject) viewer), &rank);
102: CPU_word_size = sizeof(PetscReal);
103: IO_word_size = sizeof(PetscReal);
106: if (exo->exoid >= 0) {
107: PetscStackCallStandard(ex_close,(exo->exoid));
108: exo->exoid = -1;
109: }
110: if (exo->filename) {PetscFree(exo->filename);}
111: PetscStrallocpy(name, &exo->filename);
112: switch (exo->btype) {
113: case FILE_MODE_READ:
114: EXO_mode = EX_READ;
115: break;
116: case FILE_MODE_APPEND:
117: case FILE_MODE_UPDATE:
118: case FILE_MODE_APPEND_UPDATE:
119: /* Will fail if the file does not already exist */
120: EXO_mode = EX_WRITE;
121: break;
122: case FILE_MODE_WRITE:
123: /*
124: exodus only allows writing geometry upon file creation, so we will let DMView create the file.
125: */
126: return(0);
127: break;
128: default:
129: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()");
130: }
131: #if defined(PETSC_USE_64BIT_INDICES)
132: EXO_mode += EX_ALL_INT64_API;
133: #endif
134: exo->exoid = ex_open_par(name,EXO_mode,&CPU_word_size,&IO_word_size,&EXO_version,PETSC_COMM_WORLD,mpi_info);
135: if (exo->exoid < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open_par failed for %s", name);
136: return(0);
137: }
139: static PetscErrorCode PetscViewerFileGetName_ExodusII(PetscViewer viewer, const char **name)
140: {
141: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
144: *name = exo->filename;
145: return(0);
146: }
148: static PetscErrorCode PetscViewerFileSetMode_ExodusII(PetscViewer viewer, PetscFileMode type)
149: {
150: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
153: exo->btype = type;
154: return(0);
155: }
157: static PetscErrorCode PetscViewerFileGetMode_ExodusII(PetscViewer viewer, PetscFileMode *type)
158: {
159: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
162: *type = exo->btype;
163: return(0);
164: }
166: static PetscErrorCode PetscViewerExodusIIGetId_ExodusII(PetscViewer viewer, int *exoid)
167: {
168: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
171: *exoid = exo->exoid;
172: return(0);
173: }
175: static PetscErrorCode PetscViewerExodusIIGetOrder_ExodusII(PetscViewer viewer, PetscInt *order)
176: {
177: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
180: *order = exo->order;
181: return(0);
182: }
184: static PetscErrorCode PetscViewerExodusIISetOrder_ExodusII(PetscViewer viewer, PetscInt order)
185: {
186: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
189: exo->order = order;
190: return(0);
191: }
193: /*MC
194: PETSCVIEWEREXODUSII - A viewer that writes to an Exodus II file
196: .seealso: PetscViewerExodusIIOpen(), PetscViewerCreate(), PETSCVIEWERBINARY, PETSCVIEWERHDF5, DMView(),
197: PetscViewerFileSetName(), PetscViewerFileSetMode(), PetscViewerFormat, PetscViewerType, PetscViewerSetType()
199: Level: beginner
200: M*/
202: PETSC_EXTERN PetscErrorCode PetscViewerCreate_ExodusII(PetscViewer v)
203: {
204: PetscViewer_ExodusII *exo;
205: PetscErrorCode ierr;
208: PetscNewLog(v,&exo);
210: v->data = (void*) exo;
211: v->ops->destroy = PetscViewerDestroy_ExodusII;
212: v->ops->setfromoptions = PetscViewerSetFromOptions_ExodusII;
213: v->ops->setup = PetscViewerSetUp_ExodusII;
214: v->ops->view = PetscViewerView_ExodusII;
215: v->ops->flush = 0;
216: exo->btype = (PetscFileMode) -1;
217: exo->filename = 0;
218: exo->exoid = -1;
220: PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileSetName_C",PetscViewerFileSetName_ExodusII);
221: PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileGetName_C",PetscViewerFileGetName_ExodusII);
222: PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileSetMode_C",PetscViewerFileSetMode_ExodusII);
223: PetscObjectComposeFunction((PetscObject)v,"PetscViewerFileGetMode_C",PetscViewerFileGetMode_ExodusII);
224: PetscObjectComposeFunction((PetscObject)v,"PetscViewerGetId_C",PetscViewerExodusIIGetId_ExodusII);
225: PetscObjectComposeFunction((PetscObject)v,"PetscViewerSetOrder_C",PetscViewerExodusIISetOrder_ExodusII);
226: PetscObjectComposeFunction((PetscObject)v,"PetscViewerGetOrder_C",PetscViewerExodusIIGetOrder_ExodusII);
227: return(0);
228: }
230: /*
231: EXOGetVarIndex - Locate a result in an exodus file based on its name
233: Collective
235: Input Parameters:
236: + exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
237: . obj_type - the type of entity for instance EX_NODAL, EX_ELEM_BLOCK
238: - name - the name of the result
240: Output Parameters:
241: . varIndex - the location in the exodus file of the result
243: Notes:
244: The exodus variable index is obtained by comparing name and the
245: names of zonal variables declared in the exodus file. For instance if name is "V"
246: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
247: amongst all variables of type obj_type.
249: Level: beginner
251: .seealso: EXOGetVarIndex(),DMPlexView_ExodusII_Internal(),VecViewPlex_ExodusII_Nodal_Internal(),VecLoadNodal_PlexEXO(),VecLoadZonal_PlexEXO()
252: */
253: PetscErrorCode EXOGetVarIndex_Internal(int exoid, ex_entity_type obj_type, const char name[], int *varIndex)
254: {
255: int num_vars, i, j;
256: char ext_name[MAX_STR_LENGTH+1], var_name[MAX_STR_LENGTH+1];
257: const int num_suffix = 5;
258: char *suffix[5];
259: PetscBool flg;
263: suffix[0] = (char *) "";
264: suffix[1] = (char *) "_X";
265: suffix[2] = (char *) "_XX";
266: suffix[3] = (char *) "_1";
267: suffix[4] = (char *) "_11";
269: *varIndex = -1;
270: PetscStackCallStandard(ex_get_variable_param,(exoid, obj_type, &num_vars));
271: for (i = 0; i < num_vars; ++i) {
272: PetscStackCallStandard(ex_get_variable_name,(exoid, obj_type, i+1, var_name));
273: for (j = 0; j < num_suffix; ++j) {
274: PetscStrncpy(ext_name, name, MAX_STR_LENGTH);
275: PetscStrlcat(ext_name, suffix[j], MAX_STR_LENGTH);
276: PetscStrcasecmp(ext_name, var_name, &flg);
277: if (flg) {
278: *varIndex = i+1;
279: }
280: }
281: }
282: return(0);
283: }
285: /*
286: DMView_PlexExodusII - Write a DM to disk in exodus format
288: Collective on dm
290: Input Parameters:
291: + dm - The dm to be written
292: . viewer - an exodusII viewer
294: Notes:
295: Not all DM can be written to disk this way. For instance, exodus assume that element blocks (mapped to "Cell sets" labels)
296: consists of sequentially numbered cells. If this is not the case, the exodus file will be corrupted.
298: If the dm has been distributed, only the part of the DM on MPI rank 0 (including "ghost" cells and vertices)
299: will be written.
301: DMPlex only represents geometry while most post-processing software expect that a mesh also provides information
302: on the discretization space. This function assumes that the file represents Lagrange finite elements of order 1 or 2.
303: The order of the mesh shall be set using PetscViewerExodusIISetOrder
304: It should be extended to use PetscFE objects.
306: This function will only handle TRI, TET, QUAD, and HEX cells.
307: Level: beginner
309: .seealso:
310: */
311: PetscErrorCode DMView_PlexExodusII(DM dm, PetscViewer viewer)
312: {
313: enum ElemType {TRI, QUAD, TET, HEX};
314: MPI_Comm comm;
315: PetscInt degree; /* the order of the mesh */
316: /* Connectivity Variables */
317: PetscInt cellsNotInConnectivity;
318: /* Cell Sets */
319: DMLabel csLabel;
320: IS csIS;
321: const PetscInt *csIdx;
322: PetscInt num_cs, cs;
323: enum ElemType *type;
324: PetscBool hasLabel;
325: /* Coordinate Variables */
326: DM cdm;
327: PetscSection coordSection;
328: Vec coord;
329: PetscInt **nodes;
330: PetscInt depth, d, dim, skipCells = 0;
331: PetscInt pStart, pEnd, p, cStart, cEnd, numCells, vStart, vEnd, numVertices, eStart, eEnd, numEdges, fStart, fEnd, numFaces, numNodes;
332: PetscInt num_vs, num_fs;
333: PetscMPIInt rank, size;
334: const char *dmName;
335: PetscInt nodesTriP1[4] = {3,0,0,0};
336: PetscInt nodesTriP2[4] = {3,3,0,0};
337: PetscInt nodesQuadP1[4] = {4,0,0,0};
338: PetscInt nodesQuadP2[4] = {4,4,0,1};
339: PetscInt nodesTetP1[4] = {4,0,0,0};
340: PetscInt nodesTetP2[4] = {4,6,0,0};
341: PetscInt nodesHexP1[4] = {8,0,0,0};
342: PetscInt nodesHexP2[4] = {8,12,6,1};
343: PetscErrorCode ierr;
345: int CPU_word_size, IO_word_size, EXO_mode;
346: float EXO_version;
348: PetscViewer_ExodusII *exo = (PetscViewer_ExodusII *) viewer->data;
351: PetscObjectGetComm((PetscObject)dm, &comm);
352: MPI_Comm_rank(comm, &rank);
353: MPI_Comm_size(comm, &size);
355: /*
356: Creating coordSection is a collective operation so we do it somewhat out of sequence
357: */
358: PetscSectionCreate(comm, &coordSection);
359: DMGetCoordinatesLocalSetUp(dm);
360: if (!rank) {
361: switch (exo->btype) {
362: case FILE_MODE_READ:
363: case FILE_MODE_APPEND:
364: case FILE_MODE_UPDATE:
365: case FILE_MODE_APPEND_UPDATE:
366: /* exodusII does not allow writing geometry to an existing file */
367: SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "cannot add geometry to existing file %s", exo->filename);
368: case FILE_MODE_WRITE:
369: /* Create an empty file if one already exists*/
370: EXO_mode = EX_CLOBBER;
371: #if defined(PETSC_USE_64BIT_INDICES)
372: EXO_mode += EX_ALL_INT64_API;
373: #endif
374: CPU_word_size = sizeof(PetscReal);
375: IO_word_size = sizeof(PetscReal);
376: exo->exoid = ex_create(exo->filename, EXO_mode, &CPU_word_size, &IO_word_size);
377: if (exo->exoid < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_create failed for %s", exo->filename);
379: break;
380: default:
381: SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ORDER, "Must call PetscViewerFileSetMode() before PetscViewerFileSetName()");
382: }
384: /* --- Get DM info --- */
385: PetscObjectGetName((PetscObject) dm, &dmName);
386: DMPlexGetDepth(dm, &depth);
387: DMGetDimension(dm, &dim);
388: DMPlexGetChart(dm, &pStart, &pEnd);
389: DMPlexGetHeightStratum(dm, 0, &cStart, &cEnd);
390: DMPlexGetHeightStratum(dm, 1, &fStart, &fEnd);
391: DMPlexGetDepthStratum(dm, 1, &eStart, &eEnd);
392: DMPlexGetDepthStratum(dm, 0, &vStart, &vEnd);
393: numCells = cEnd - cStart;
394: numEdges = eEnd - eStart;
395: numVertices = vEnd - vStart;
396: if (depth == 3) {numFaces = fEnd - fStart;}
397: else {numFaces = 0;}
398: DMGetLabelSize(dm, "Cell Sets", &num_cs);
399: DMGetLabelSize(dm, "Vertex Sets", &num_vs);
400: DMGetLabelSize(dm, "Face Sets", &num_fs);
401: DMGetCoordinatesLocal(dm, &coord);
402: DMGetCoordinateDM(dm, &cdm);
403: if (num_cs > 0) {
404: DMGetLabel(dm, "Cell Sets", &csLabel);
405: DMLabelGetValueIS(csLabel, &csIS);
406: ISGetIndices(csIS, &csIdx);
407: }
408: PetscMalloc1(num_cs, &nodes);
409: /* Set element type for each block and compute total number of nodes */
410: PetscMalloc1(num_cs, &type);
411: numNodes = numVertices;
413: PetscViewerExodusIIGetOrder(viewer, °ree);
414: if (degree == 2) {numNodes += numEdges;}
415: cellsNotInConnectivity = numCells;
416: for (cs = 0; cs < num_cs; ++cs) {
417: IS stratumIS;
418: const PetscInt *cells;
419: PetscScalar *xyz = NULL;
420: PetscInt csSize, closureSize;
422: DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS);
423: ISGetIndices(stratumIS, &cells);
424: ISGetSize(stratumIS, &csSize);
425: DMPlexVecGetClosure(cdm, NULL, coord, cells[0], &closureSize, &xyz);
426: switch (dim) {
427: case 2:
428: if (closureSize == 3*dim) {type[cs] = TRI;}
429: else if (closureSize == 4*dim) {type[cs] = QUAD;}
430: else SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices %D in dimension %D has no ExodusII type", closureSize/dim, dim);
431: break;
432: case 3:
433: if (closureSize == 4*dim) {type[cs] = TET;}
434: else if (closureSize == 8*dim) {type[cs] = HEX;}
435: else SETERRQ2(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Number of vertices %D in dimension %D has no ExodusII type", closureSize/dim, dim);
436: break;
437: default: SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Dimension %D not handled by ExodusII viewer", dim);
438: }
439: if ((degree == 2) && (type[cs] == QUAD)) {numNodes += csSize;}
440: if ((degree == 2) && (type[cs] == HEX)) {numNodes += csSize; numNodes += numFaces;}
441: DMPlexVecRestoreClosure(cdm, NULL, coord, cells[0], &closureSize, &xyz);
442: /* Set nodes and Element type */
443: if (type[cs] == TRI) {
444: if (degree == 1) nodes[cs] = nodesTriP1;
445: else if (degree == 2) nodes[cs] = nodesTriP2;
446: } else if (type[cs] == QUAD) {
447: if (degree == 1) nodes[cs] = nodesQuadP1;
448: else if (degree == 2) nodes[cs] = nodesQuadP2;
449: } else if (type[cs] == TET) {
450: if (degree == 1) nodes[cs] = nodesTetP1;
451: else if (degree == 2) nodes[cs] = nodesTetP2;
452: } else if (type[cs] == HEX) {
453: if (degree == 1) nodes[cs] = nodesHexP1;
454: else if (degree == 2) nodes[cs] = nodesHexP2;
455: }
456: /* Compute the number of cells not in the connectivity table */
457: cellsNotInConnectivity -= nodes[cs][3]*csSize;
459: ISRestoreIndices(stratumIS, &cells);
460: ISDestroy(&stratumIS);
461: }
462: if (num_cs > 0) {PetscStackCallStandard(ex_put_init,(exo->exoid, dmName, dim, numNodes, numCells, num_cs, num_vs, num_fs));}
463: /* --- Connectivity --- */
464: for (cs = 0; cs < num_cs; ++cs) {
465: IS stratumIS;
466: const PetscInt *cells;
467: PetscInt *connect, off = 0;
468: PetscInt edgesInClosure = 0, facesInClosure = 0, verticesInClosure = 0;
469: PetscInt csSize, c, connectSize, closureSize;
470: char *elem_type = NULL;
471: char elem_type_tri3[] = "TRI3", elem_type_quad4[] = "QUAD4";
472: char elem_type_tri6[] = "TRI6", elem_type_quad9[] = "QUAD9";
473: char elem_type_tet4[] = "TET4", elem_type_hex8[] = "HEX8";
474: char elem_type_tet10[] = "TET10", elem_type_hex27[] = "HEX27";
476: DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS);
477: ISGetIndices(stratumIS, &cells);
478: ISGetSize(stratumIS, &csSize);
479: /* Set Element type */
480: if (type[cs] == TRI) {
481: if (degree == 1) elem_type = elem_type_tri3;
482: else if (degree == 2) elem_type = elem_type_tri6;
483: } else if (type[cs] == QUAD) {
484: if (degree == 1) elem_type = elem_type_quad4;
485: else if (degree == 2) elem_type = elem_type_quad9;
486: } else if (type[cs] == TET) {
487: if (degree == 1) elem_type = elem_type_tet4;
488: else if (degree == 2) elem_type = elem_type_tet10;
489: } else if (type[cs] == HEX) {
490: if (degree == 1) elem_type = elem_type_hex8;
491: else if (degree == 2) elem_type = elem_type_hex27;
492: }
493: connectSize = nodes[cs][0] + nodes[cs][1] + nodes[cs][2] + nodes[cs][3];
494: PetscMalloc1(PetscMax(27,connectSize)*csSize, &connect);
495: PetscStackCallStandard(ex_put_block,(exo->exoid, EX_ELEM_BLOCK, csIdx[cs], elem_type, csSize, connectSize, 0, 0, 1));
496: /* Find number of vertices, edges, and faces in the closure */
497: verticesInClosure = nodes[cs][0];
498: if (depth > 1) {
499: if (dim == 2) {
500: DMPlexGetConeSize(dm, cells[0], &edgesInClosure);
501: } else if (dim == 3) {
502: PetscInt *closure = NULL;
504: DMPlexGetConeSize(dm, cells[0], &facesInClosure);
505: DMPlexGetTransitiveClosure(dm, cells[0], PETSC_TRUE, &closureSize, &closure);
506: edgesInClosure = closureSize - facesInClosure - 1 - verticesInClosure;
507: DMPlexRestoreTransitiveClosure(dm, cells[0], PETSC_TRUE, &closureSize, &closure);
508: }
509: }
510: /* Get connectivity for each cell */
511: for (c = 0; c < csSize; ++c) {
512: PetscInt *closure = NULL;
513: PetscInt temp, i;
515: DMPlexGetTransitiveClosure(dm, cells[c], PETSC_TRUE, &closureSize, &closure);
516: for (i = 0; i < connectSize; ++i) {
517: if (i < nodes[cs][0]) {/* Vertices */
518: connect[i+off] = closure[(i+edgesInClosure+facesInClosure+1)*2] + 1;
519: connect[i+off] -= cellsNotInConnectivity;
520: } else if (i < nodes[cs][0]+nodes[cs][1]) { /* Edges */
521: connect[i+off] = closure[(i-verticesInClosure+facesInClosure+1)*2] + 1;
522: if (nodes[cs][2] == 0) connect[i+off] -= numFaces;
523: connect[i+off] -= cellsNotInConnectivity;
524: } else if (i < nodes[cs][0]+nodes[cs][1]+nodes[cs][3]) { /* Cells */
525: connect[i+off] = closure[0] + 1;
526: connect[i+off] -= skipCells;
527: } else if (i < nodes[cs][0]+nodes[cs][1]+nodes[cs][3]+nodes[cs][2]) { /* Faces */
528: connect[i+off] = closure[(i-edgesInClosure-verticesInClosure)*2] + 1;
529: connect[i+off] -= cellsNotInConnectivity;
530: } else {
531: connect[i+off] = -1;
532: }
533: }
534: /* Tetrahedra are inverted */
535: if (type[cs] == TET) {
536: temp = connect[0+off]; connect[0+off] = connect[1+off]; connect[1+off] = temp;
537: if (degree == 2) {
538: temp = connect[5+off]; connect[5+off] = connect[6+off]; connect[6+off] = temp;
539: temp = connect[7+off]; connect[7+off] = connect[8+off]; connect[8+off] = temp;
540: }
541: }
542: /* Hexahedra are inverted */
543: if (type[cs] == HEX) {
544: temp = connect[1+off]; connect[1+off] = connect[3+off]; connect[3+off] = temp;
545: if (degree == 2) {
546: temp = connect[8+off]; connect[8+off] = connect[11+off]; connect[11+off] = temp;
547: temp = connect[9+off]; connect[9+off] = connect[10+off]; connect[10+off] = temp;
548: temp = connect[16+off]; connect[16+off] = connect[17+off]; connect[17+off] = temp;
549: temp = connect[18+off]; connect[18+off] = connect[19+off]; connect[19+off] = temp;
551: temp = connect[12+off]; connect[12+off] = connect[16+off]; connect[16+off] = temp;
552: temp = connect[13+off]; connect[13+off] = connect[17+off]; connect[17+off] = temp;
553: temp = connect[14+off]; connect[14+off] = connect[18+off]; connect[18+off] = temp;
554: temp = connect[15+off]; connect[15+off] = connect[19+off]; connect[19+off] = temp;
556: temp = connect[23+off]; connect[23+off] = connect[26+off]; connect[26+off] = temp;
557: temp = connect[24+off]; connect[24+off] = connect[25+off]; connect[25+off] = temp;
558: temp = connect[25+off]; connect[25+off] = connect[26+off]; connect[26+off] = temp;
559: }
560: }
561: off += connectSize;
562: DMPlexRestoreTransitiveClosure(dm, cells[c], PETSC_TRUE, &closureSize, &closure);
563: }
564: PetscStackCallStandard(ex_put_conn,(exo->exoid, EX_ELEM_BLOCK, csIdx[cs], connect, 0, 0));
565: skipCells += (nodes[cs][3] == 0)*csSize;
566: PetscFree(connect);
567: ISRestoreIndices(stratumIS, &cells);
568: ISDestroy(&stratumIS);
569: }
570: PetscFree(type);
571: /* --- Coordinates --- */
572: PetscSectionSetChart(coordSection, pStart, pEnd);
573: if (num_cs) {
574: for (d = 0; d < depth; ++d) {
575: DMPlexGetDepthStratum(dm, d, &pStart, &pEnd);
576: for (p = pStart; p < pEnd; ++p) {
577: PetscSectionSetDof(coordSection, p, nodes[0][d] > 0);
578: }
579: }
580: }
581: for (cs = 0; cs < num_cs; ++cs) {
582: IS stratumIS;
583: const PetscInt *cells;
584: PetscInt csSize, c;
586: DMLabelGetStratumIS(csLabel, csIdx[cs], &stratumIS);
587: ISGetIndices(stratumIS, &cells);
588: ISGetSize(stratumIS, &csSize);
589: for (c = 0; c < csSize; ++c) {
590: PetscSectionSetDof(coordSection, cells[c], nodes[cs][3] > 0);
591: }
592: ISRestoreIndices(stratumIS, &cells);
593: ISDestroy(&stratumIS);
594: }
595: if (num_cs > 0) {
596: ISRestoreIndices(csIS, &csIdx);
597: ISDestroy(&csIS);
598: }
599: PetscFree(nodes);
600: PetscSectionSetUp(coordSection);
601: if (numNodes > 0) {
602: const char *coordNames[3] = {"x", "y", "z"};
603: PetscScalar *coords, *closure;
604: PetscReal *cval;
605: PetscInt hasDof, n = 0;
607: /* There can't be more than 24 values in the closure of a point for the coord coordSection */
608: PetscCalloc3(numNodes*3, &coords, dim, &cval, 24, &closure);
609: DMGetCoordinatesLocalNoncollective(dm, &coord);
610: DMPlexGetChart(dm, &pStart, &pEnd);
611: for (p = pStart; p < pEnd; ++p) {
612: PetscSectionGetDof(coordSection, p, &hasDof);
613: if (hasDof) {
614: PetscInt closureSize = 24, j;
616: DMPlexVecGetClosure(cdm, NULL, coord, p, &closureSize, &closure);
617: for (d = 0; d < dim; ++d) {
618: cval[d] = 0.0;
619: for (j = 0; j < closureSize/dim; j++) cval[d] += closure[j*dim+d];
620: coords[d*numNodes+n] = cval[d] * dim / closureSize;
621: }
622: ++n;
623: }
624: }
625: PetscStackCallStandard(ex_put_coord,(exo->exoid, &coords[0*numNodes], &coords[1*numNodes], &coords[2*numNodes]));
626: PetscFree3(coords, cval, closure);
627: PetscStackCallStandard(ex_put_coord_names,(exo->exoid, (char **) coordNames));
628: }
630: /* --- Node Sets/Vertex Sets --- */
631: DMHasLabel(dm, "Vertex Sets", &hasLabel);
632: if (hasLabel) {
633: PetscInt i, vs, vsSize;
634: const PetscInt *vsIdx, *vertices;
635: PetscInt *nodeList;
636: IS vsIS, stratumIS;
637: DMLabel vsLabel;
638: DMGetLabel(dm, "Vertex Sets", &vsLabel);
639: DMLabelGetValueIS(vsLabel, &vsIS);
640: ISGetIndices(vsIS, &vsIdx);
641: for (vs=0; vs<num_vs; ++vs) {
642: DMLabelGetStratumIS(vsLabel, vsIdx[vs], &stratumIS);
643: ISGetIndices(stratumIS, &vertices);
644: ISGetSize(stratumIS, &vsSize);
645: PetscMalloc1(vsSize, &nodeList);
646: for (i=0; i<vsSize; ++i) {
647: nodeList[i] = vertices[i] - skipCells + 1;
648: }
649: PetscStackCallStandard(ex_put_set_param,(exo->exoid, EX_NODE_SET, vsIdx[vs], vsSize, 0));
650: PetscStackCallStandard(ex_put_set,(exo->exoid, EX_NODE_SET, vsIdx[vs], nodeList, NULL));
651: ISRestoreIndices(stratumIS, &vertices);
652: ISDestroy(&stratumIS);
653: PetscFree(nodeList);
654: }
655: ISRestoreIndices(vsIS, &vsIdx);
656: ISDestroy(&vsIS);
657: }
658: /* --- Side Sets/Face Sets --- */
659: DMHasLabel(dm, "Face Sets", &hasLabel);
660: if (hasLabel) {
661: PetscInt i, j, fs, fsSize;
662: const PetscInt *fsIdx, *faces;
663: IS fsIS, stratumIS;
664: DMLabel fsLabel;
665: PetscInt numPoints, *points;
666: PetscInt elem_list_size = 0;
667: PetscInt *elem_list, *elem_ind, *side_list;
669: DMGetLabel(dm, "Face Sets", &fsLabel);
670: /* Compute size of Node List and Element List */
671: DMLabelGetValueIS(fsLabel, &fsIS);
672: ISGetIndices(fsIS, &fsIdx);
673: for (fs=0; fs<num_fs; ++fs) {
674: DMLabelGetStratumIS(fsLabel, fsIdx[fs], &stratumIS);
675: ISGetSize(stratumIS, &fsSize);
676: elem_list_size += fsSize;
677: ISDestroy(&stratumIS);
678: }
679: PetscMalloc3(num_fs, &elem_ind,
680: elem_list_size, &elem_list,
681: elem_list_size, &side_list);
682: elem_ind[0] = 0;
683: for (fs=0; fs<num_fs; ++fs) {
684: DMLabelGetStratumIS(fsLabel, fsIdx[fs], &stratumIS);
685: ISGetIndices(stratumIS, &faces);
686: ISGetSize(stratumIS, &fsSize);
687: /* Set Parameters */
688: PetscStackCallStandard(ex_put_set_param,(exo->exoid, EX_SIDE_SET, fsIdx[fs], fsSize, 0));
689: /* Indices */
690: if (fs<num_fs-1) {
691: elem_ind[fs+1] = elem_ind[fs] + fsSize;
692: }
694: for (i=0; i<fsSize; ++i) {
695: /* Element List */
696: points = NULL;
697: DMPlexGetTransitiveClosure(dm, faces[i], PETSC_FALSE, &numPoints, &points);
698: elem_list[elem_ind[fs] + i] = points[2] +1;
699: DMPlexRestoreTransitiveClosure(dm, faces[i], PETSC_FALSE, &numPoints, &points);
701: /* Side List */
702: points = NULL;
703: DMPlexGetTransitiveClosure(dm, elem_list[elem_ind[fs] + i]-1, PETSC_TRUE, &numPoints, &points);
704: for (j=1; j<numPoints; ++j) {
705: if (points[j*2]==faces[i]) {break;}
706: }
707: /* Convert HEX sides */
708: if (numPoints == 27) {
709: if (j == 1) {j = 5;}
710: else if (j == 2) {j = 6;}
711: else if (j == 3) {j = 1;}
712: else if (j == 4) {j = 3;}
713: else if (j == 5) {j = 2;}
714: else if (j == 6) {j = 4;}
715: }
716: /* Convert TET sides */
717: if (numPoints == 15) {
718: --j;
719: if (j == 0) {j = 4;}
720: }
721: side_list[elem_ind[fs] + i] = j;
722: DMPlexRestoreTransitiveClosure(dm, elem_list[elem_ind[fs] + i]-1, PETSC_TRUE, &numPoints, &points);
724: }
725: ISRestoreIndices(stratumIS, &faces);
726: ISDestroy(&stratumIS);
727: }
728: ISRestoreIndices(fsIS, &fsIdx);
729: ISDestroy(&fsIS);
731: /* Put side sets */
732: for (fs=0; fs<num_fs; ++fs) {
733: PetscStackCallStandard(ex_put_set,(exo->exoid, EX_SIDE_SET, fsIdx[fs], &elem_list[elem_ind[fs]], &side_list[elem_ind[fs]]));
734: }
735: PetscFree3(elem_ind,elem_list,side_list);
736: }
737: /*
738: close the exodus file
739: */
740: ex_close(exo->exoid);
741: exo->exoid = -1;
742: }
743: PetscSectionDestroy(&coordSection);
745: /*
746: reopen the file in parallel
747: */
748: EXO_mode = EX_WRITE;
749: #if defined(PETSC_USE_64BIT_INDICES)
750: EXO_mode += EX_ALL_INT64_API;
751: #endif
752: CPU_word_size = sizeof(PetscReal);
753: IO_word_size = sizeof(PetscReal);
754: exo->exoid = ex_open_par(exo->filename,EXO_mode,&CPU_word_size,&IO_word_size,&EXO_version,comm,MPI_INFO_NULL);
755: if (exo->exoid < 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open_par failed for %s", exo->filename);
756: return(0);
757: }
759: /*
760: VecView_PlexExodusII_Internal - Write a Vec corresponding in an exodus file
762: Collective on v
764: Input Parameters:
765: + v - The vector to be written
766: - viewer - The PetscViewerExodusII viewer associate to an exodus file
768: Notes:
769: The exodus result variable index is obtained by comparing the Vec name and the
770: names of variables declared in the exodus file. For instance for a Vec named "V"
771: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
772: amongst all variables.
773: In the event where a nodal and zonal variable both match, the function will return an error instead of
774: possibly corrupting the file
776: Level: beginner
778: .seealso: EXOGetVarIndex_Internal(),DMPlexView_ExodusII(),VecView_PlexExodusII()
779: @*/
780: PetscErrorCode VecView_PlexExodusII_Internal(Vec v, PetscViewer viewer)
781: {
782: DM dm;
783: MPI_Comm comm;
784: PetscMPIInt rank;
786: int exoid,offsetN = 0, offsetZ = 0;
787: const char *vecname;
788: PetscInt step;
789: PetscErrorCode ierr;
792: PetscObjectGetComm((PetscObject) v, &comm);
793: MPI_Comm_rank(comm, &rank);
794: PetscViewerExodusIIGetId(viewer,&exoid);
795: VecGetDM(v, &dm);
796: PetscObjectGetName((PetscObject) v, &vecname);
798: DMGetOutputSequenceNumber(dm,&step,NULL);
799: EXOGetVarIndex_Internal(exoid,EX_NODAL,vecname,&offsetN);
800: EXOGetVarIndex_Internal(exoid,EX_ELEM_BLOCK,vecname,&offsetZ);
801: if (offsetN <= 0 && offsetZ <= 0) SETERRQ1(comm, PETSC_ERR_FILE_UNEXPECTED, "Found both nodal and zonal variable %s in exodus file. \n", vecname);
802: if (offsetN > 0) {
803: VecViewPlex_ExodusII_Nodal_Internal(v,exoid,(int) step+1,offsetN);
804: } else if (offsetZ > 0) {
805: VecViewPlex_ExodusII_Zonal_Internal(v,exoid,(int) step+1,offsetZ);
806: } else SETERRQ1(comm, PETSC_ERR_FILE_UNEXPECTED, "Could not find nodal or zonal variable %s in exodus file. \n", vecname);
807: return(0);
808: }
810: /*
811: VecLoad_PlexExodusII_Internal - Write a Vec corresponding in an exodus file
813: Collective on v
815: Input Parameters:
816: + v - The vector to be written
817: - viewer - The PetscViewerExodusII viewer associate to an exodus file
819: Notes:
820: The exodus result variable index is obtained by comparing the Vec name and the
821: names of variables declared in the exodus file. For instance for a Vec named "V"
822: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
823: amongst all variables.
824: In the event where a nodal and zonal variable both match, the function will return an error instead of
825: possibly corrupting the file
827: Level: beginner
829: .seealso: EXOGetVarIndex_Internal(),DMPlexView_ExodusII(),VecView_PlexExodusII()
830: @*/
831: PetscErrorCode VecLoad_PlexExodusII_Internal(Vec v, PetscViewer viewer)
832: {
833: DM dm;
834: MPI_Comm comm;
835: PetscMPIInt rank;
837: int exoid,offsetN = 0, offsetZ = 0;
838: const char *vecname;
839: PetscInt step;
840: PetscErrorCode ierr;
843: PetscObjectGetComm((PetscObject) v, &comm);
844: MPI_Comm_rank(comm, &rank);
845: PetscViewerExodusIIGetId(viewer,&exoid);
846: VecGetDM(v, &dm);
847: PetscObjectGetName((PetscObject) v, &vecname);
849: DMGetOutputSequenceNumber(dm,&step,NULL);
850: EXOGetVarIndex_Internal(exoid,EX_NODAL,vecname,&offsetN);
851: EXOGetVarIndex_Internal(exoid,EX_ELEM_BLOCK,vecname,&offsetZ);
852: if (offsetN <= 0 && offsetZ <= 0) SETERRQ1(comm, PETSC_ERR_FILE_UNEXPECTED, "Found both nodal and zonal variable %s in exodus file. \n", vecname);
853: if (offsetN > 0) {
854: VecLoadPlex_ExodusII_Nodal_Internal(v,exoid,(int) step+1,offsetN);
855: } else if (offsetZ > 0) {
856: VecLoadPlex_ExodusII_Zonal_Internal(v,exoid,(int) step+1,offsetZ);
857: } else SETERRQ1(comm, PETSC_ERR_FILE_UNEXPECTED, "Could not find nodal or zonal variable %s in exodus file. \n", vecname);
858: return(0);
859: }
861: /*
862: VecViewPlex_ExodusII_Nodal_Internal - Write a Vec corresponding to a nodal field to an exodus file
864: Collective on v
866: Input Parameters:
867: + v - The vector to be written
868: . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
869: . step - the time step to write at (exodus steps are numbered starting from 1)
870: - offset - the location of the variable in the file
872: Notes:
873: The exodus result nodal variable index is obtained by comparing the Vec name and the
874: names of nodal variables declared in the exodus file. For instance for a Vec named "V"
875: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
876: amongst all nodal variables.
878: Level: beginner
880: .seealso: EXOGetVarIndex_Internal(),DMPlexView_ExodusII_Internal(),VecLoadNodal_PlexEXO(),VecViewZonal_PlexEXO(),VecLoadZonal_PlexEXO()
881: @*/
882: PetscErrorCode VecViewPlex_ExodusII_Nodal_Internal(Vec v, int exoid, int step, int offset)
883: {
884: MPI_Comm comm;
885: PetscMPIInt size;
886: DM dm;
887: Vec vNatural, vComp;
888: const PetscScalar *varray;
889: PetscInt xs, xe, bs;
890: PetscBool useNatural;
891: PetscErrorCode ierr;
894: PetscObjectGetComm((PetscObject) v, &comm);
895: MPI_Comm_size(comm, &size);
896: VecGetDM(v, &dm);
897: DMGetUseNatural(dm, &useNatural);
898: useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
899: if (useNatural) {
900: DMGetGlobalVector(dm, &vNatural);
901: DMPlexGlobalToNaturalBegin(dm, v, vNatural);
902: DMPlexGlobalToNaturalEnd(dm, v, vNatural);
903: } else {
904: vNatural = v;
905: }
907: /* Write local chunk of the result in the exodus file
908: exodus stores each component of a vector-valued field as a separate variable.
909: We assume that they are stored sequentially */
910: VecGetOwnershipRange(vNatural, &xs, &xe);
911: VecGetBlockSize(vNatural, &bs);
912: if (bs == 1) {
913: VecGetArrayRead(vNatural, &varray);
914: PetscStackCallStandard(ex_put_partial_var,(exoid, step, EX_NODAL, offset, 1, xs+1, xe-xs, varray));
915: VecRestoreArrayRead(vNatural, &varray);
916: } else {
917: IS compIS;
918: PetscInt c;
920: ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS);
921: for (c = 0; c < bs; ++c) {
922: ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs);
923: VecGetSubVector(vNatural, compIS, &vComp);
924: VecGetArrayRead(vComp, &varray);
925: PetscStackCallStandard(ex_put_partial_var,(exoid, step, EX_NODAL, offset+c, 1, xs/bs+1, (xe-xs)/bs, varray));
926: VecRestoreArrayRead(vComp, &varray);
927: VecRestoreSubVector(vNatural, compIS, &vComp);
928: }
929: ISDestroy(&compIS);
930: }
931: if (useNatural) {DMRestoreGlobalVector(dm, &vNatural);}
932: return(0);
933: }
935: /*
936: VecLoadPlex_ExodusII_Nodal_Internal - Read a Vec corresponding to a nodal field from an exodus file
938: Collective on v
940: Input Parameters:
941: + v - The vector to be written
942: . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
943: . step - the time step to read at (exodus steps are numbered starting from 1)
944: - offset - the location of the variable in the file
946: Notes:
947: The exodus result nodal variable index is obtained by comparing the Vec name and the
948: names of nodal variables declared in the exodus file. For instance for a Vec named "V"
949: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
950: amongst all nodal variables.
952: Level: beginner
954: .seealso: EXOGetVarIndex_Internal(), DMPlexView_ExodusII_Internal(), VecViewPlex_ExodusII_Nodal_Internal(), VecViewZonal_PlexEXO(), VecLoadZonal_PlexEXO()
955: */
956: PetscErrorCode VecLoadPlex_ExodusII_Nodal_Internal(Vec v, int exoid, int step, int offset)
957: {
958: MPI_Comm comm;
959: PetscMPIInt size;
960: DM dm;
961: Vec vNatural, vComp;
962: PetscScalar *varray;
963: PetscInt xs, xe, bs;
964: PetscBool useNatural;
968: PetscObjectGetComm((PetscObject) v, &comm);
969: MPI_Comm_size(comm, &size);
970: VecGetDM(v,&dm);
971: DMGetUseNatural(dm, &useNatural);
972: useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
973: if (useNatural) {DMGetGlobalVector(dm,&vNatural);}
974: else {vNatural = v;}
976: /* Read local chunk from the file */
977: VecGetOwnershipRange(vNatural, &xs, &xe);
978: VecGetBlockSize(vNatural, &bs);
979: if (bs == 1) {
980: VecGetArray(vNatural, &varray);
981: PetscStackCallStandard(ex_get_partial_var,(exoid, step, EX_NODAL, offset, 1, xs+1, xe-xs, varray));
982: VecRestoreArray(vNatural, &varray);
983: } else {
984: IS compIS;
985: PetscInt c;
987: ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS);
988: for (c = 0; c < bs; ++c) {
989: ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs);
990: VecGetSubVector(vNatural, compIS, &vComp);
991: VecGetArray(vComp, &varray);
992: PetscStackCallStandard(ex_get_partial_var,(exoid, step, EX_NODAL, offset+c, 1, xs/bs+1, (xe-xs)/bs, varray));
993: VecRestoreArray(vComp, &varray);
994: VecRestoreSubVector(vNatural, compIS, &vComp);
995: }
996: ISDestroy(&compIS);
997: }
998: if (useNatural) {
999: DMPlexNaturalToGlobalBegin(dm, vNatural, v);
1000: DMPlexNaturalToGlobalEnd(dm, vNatural, v);
1001: DMRestoreGlobalVector(dm, &vNatural);
1002: }
1003: return(0);
1004: }
1006: /*
1007: VecViewPlex_ExodusII_Zonal_Internal - Write a Vec corresponding to a zonal (cell based) field to an exodus file
1009: Collective on v
1011: Input Parameters:
1012: + v - The vector to be written
1013: . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
1014: . step - the time step to write at (exodus steps are numbered starting from 1)
1015: - offset - the location of the variable in the file
1017: Notes:
1018: The exodus result zonal variable index is obtained by comparing the Vec name and the
1019: names of zonal variables declared in the exodus file. For instance for a Vec named "V"
1020: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
1021: amongst all zonal variables.
1023: Level: beginner
1025: .seealso: EXOGetVarIndex_Internal(),DMPlexView_ExodusII_Internal(),VecViewPlex_ExodusII_Nodal_Internal(),VecLoadPlex_ExodusII_Nodal_Internal(),VecLoadPlex_ExodusII_Zonal_Internal()
1026: */
1027: PetscErrorCode VecViewPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step, int offset)
1028: {
1029: MPI_Comm comm;
1030: PetscMPIInt size;
1031: DM dm;
1032: Vec vNatural, vComp;
1033: const PetscScalar *varray;
1034: PetscInt xs, xe, bs;
1035: PetscBool useNatural;
1036: IS compIS;
1037: PetscInt *csSize, *csID;
1038: PetscInt numCS, set, csxs = 0;
1039: PetscErrorCode ierr;
1042: PetscObjectGetComm((PetscObject)v, &comm);
1043: MPI_Comm_size(comm, &size);
1044: VecGetDM(v, &dm);
1045: DMGetUseNatural(dm, &useNatural);
1046: useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
1047: if (useNatural) {
1048: DMGetGlobalVector(dm, &vNatural);
1049: DMPlexGlobalToNaturalBegin(dm, v, vNatural);
1050: DMPlexGlobalToNaturalEnd(dm, v, vNatural);
1051: } else {
1052: vNatural = v;
1053: }
1055: /* Write local chunk of the result in the exodus file
1056: exodus stores each component of a vector-valued field as a separate variable.
1057: We assume that they are stored sequentially
1058: Zonal variables are accessed one element block at a time, so we loop through the cell sets,
1059: but once the vector has been reordered to natural size, we cannot use the label information
1060: to figure out what to save where. */
1061: numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK);
1062: PetscMalloc2(numCS, &csID, numCS, &csSize);
1063: PetscStackCallStandard(ex_get_ids,(exoid, EX_ELEM_BLOCK, csID));
1064: for (set = 0; set < numCS; ++set) {
1065: ex_block block;
1067: block.id = csID[set];
1068: block.type = EX_ELEM_BLOCK;
1069: PetscStackCallStandard(ex_get_block_param,(exoid, &block));
1070: csSize[set] = block.num_entry;
1071: }
1072: VecGetOwnershipRange(vNatural, &xs, &xe);
1073: VecGetBlockSize(vNatural, &bs);
1074: if (bs > 1) {ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS);}
1075: for (set = 0; set < numCS; set++) {
1076: PetscInt csLocalSize, c;
1078: /* range of indices for set setID[set]: csxs:csxs + csSize[set]-1
1079: local slice of zonal values: xs/bs,xm/bs-1
1080: intersection: max(xs/bs,csxs),min(xm/bs-1,csxs + csSize[set]-1) */
1081: csLocalSize = PetscMax(0, PetscMin(xe/bs, csxs+csSize[set]) - PetscMax(xs/bs, csxs));
1082: if (bs == 1) {
1083: VecGetArrayRead(vNatural, &varray);
1084: PetscStackCallStandard(ex_put_partial_var,(exoid, step, EX_ELEM_BLOCK, offset, csID[set], PetscMax(xs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs)]));
1085: VecRestoreArrayRead(vNatural, &varray);
1086: } else {
1087: for (c = 0; c < bs; ++c) {
1088: ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs);
1089: VecGetSubVector(vNatural, compIS, &vComp);
1090: VecGetArrayRead(vComp, &varray);
1091: PetscStackCallStandard(ex_put_partial_var,(exoid, step, EX_ELEM_BLOCK, offset+c, csID[set], PetscMax(xs/bs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs/bs)]));
1092: VecRestoreArrayRead(vComp, &varray);
1093: VecRestoreSubVector(vNatural, compIS, &vComp);
1094: }
1095: }
1096: csxs += csSize[set];
1097: }
1098: PetscFree2(csID, csSize);
1099: if (bs > 1) {ISDestroy(&compIS);}
1100: if (useNatural) {DMRestoreGlobalVector(dm,&vNatural);}
1101: return(0);
1102: }
1104: /*
1105: VecLoadPlex_ExodusII_Zonal_Internal - Read a Vec corresponding to a zonal (cell based) field from an exodus file
1107: Collective on v
1109: Input Parameters:
1110: + v - The vector to be written
1111: . exoid - the exodus id of a file (obtained from ex_open or ex_create for instance)
1112: . step - the time step to read at (exodus steps are numbered starting from 1)
1113: - offset - the location of the variable in the file
1115: Notes:
1116: The exodus result zonal variable index is obtained by comparing the Vec name and the
1117: names of zonal variables declared in the exodus file. For instance for a Vec named "V"
1118: the location in the exodus file will be the first match of "V", "V_X", "V_XX", "V_1", or "V_11"
1119: amongst all zonal variables.
1121: Level: beginner
1123: .seealso: EXOGetVarIndex_Internal(), DMPlexView_ExodusII_Internal(), VecViewPlex_ExodusII_Nodal_Internal(), VecLoadPlex_ExodusII_Nodal_Internal(), VecLoadPlex_ExodusII_Zonal_Internal()
1124: */
1125: PetscErrorCode VecLoadPlex_ExodusII_Zonal_Internal(Vec v, int exoid, int step, int offset)
1126: {
1127: MPI_Comm comm;
1128: PetscMPIInt size;
1129: DM dm;
1130: Vec vNatural, vComp;
1131: PetscScalar *varray;
1132: PetscInt xs, xe, bs;
1133: PetscBool useNatural;
1134: IS compIS;
1135: PetscInt *csSize, *csID;
1136: PetscInt numCS, set, csxs = 0;
1137: PetscErrorCode ierr;
1140: PetscObjectGetComm((PetscObject)v,&comm);
1141: MPI_Comm_size(comm, &size);
1142: VecGetDM(v, &dm);
1143: DMGetUseNatural(dm, &useNatural);
1144: useNatural = useNatural && size > 1 ? PETSC_TRUE : PETSC_FALSE;
1145: if (useNatural) {DMGetGlobalVector(dm,&vNatural);}
1146: else {vNatural = v;}
1148: /* Read local chunk of the result in the exodus file
1149: exodus stores each component of a vector-valued field as a separate variable.
1150: We assume that they are stored sequentially
1151: Zonal variables are accessed one element block at a time, so we loop through the cell sets,
1152: but once the vector has been reordered to natural size, we cannot use the label information
1153: to figure out what to save where. */
1154: numCS = ex_inquire_int(exoid, EX_INQ_ELEM_BLK);
1155: PetscMalloc2(numCS, &csID, numCS, &csSize);
1156: PetscStackCallStandard(ex_get_ids,(exoid, EX_ELEM_BLOCK, csID));
1157: for (set = 0; set < numCS; ++set) {
1158: ex_block block;
1160: block.id = csID[set];
1161: block.type = EX_ELEM_BLOCK;
1162: PetscStackCallStandard(ex_get_block_param,(exoid, &block));
1163: csSize[set] = block.num_entry;
1164: }
1165: VecGetOwnershipRange(vNatural, &xs, &xe);
1166: VecGetBlockSize(vNatural, &bs);
1167: if (bs > 1) {ISCreateStride(comm, (xe-xs)/bs, xs, bs, &compIS);}
1168: for (set = 0; set < numCS; ++set) {
1169: PetscInt csLocalSize, c;
1171: /* range of indices for set setID[set]: csxs:csxs + csSize[set]-1
1172: local slice of zonal values: xs/bs,xm/bs-1
1173: intersection: max(xs/bs,csxs),min(xm/bs-1,csxs + csSize[set]-1) */
1174: csLocalSize = PetscMax(0, PetscMin(xe/bs, csxs+csSize[set]) - PetscMax(xs/bs, csxs));
1175: if (bs == 1) {
1176: VecGetArray(vNatural, &varray);
1177: PetscStackCallStandard(ex_get_partial_var,(exoid, step, EX_ELEM_BLOCK, offset, csID[set], PetscMax(xs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs)]));
1178: VecRestoreArray(vNatural, &varray);
1179: } else {
1180: for (c = 0; c < bs; ++c) {
1181: ISStrideSetStride(compIS, (xe-xs)/bs, xs+c, bs);
1182: VecGetSubVector(vNatural, compIS, &vComp);
1183: VecGetArray(vComp, &varray);
1184: PetscStackCallStandard(ex_get_partial_var,(exoid, step, EX_ELEM_BLOCK, offset+c, csID[set], PetscMax(xs/bs-csxs, 0)+1, csLocalSize, &varray[PetscMax(0, csxs-xs/bs)]));
1185: VecRestoreArray(vComp, &varray);
1186: VecRestoreSubVector(vNatural, compIS, &vComp);
1187: }
1188: }
1189: csxs += csSize[set];
1190: }
1191: PetscFree2(csID, csSize);
1192: if (bs > 1) {ISDestroy(&compIS);}
1193: if (useNatural) {
1194: DMPlexNaturalToGlobalBegin(dm, vNatural, v);
1195: DMPlexNaturalToGlobalEnd(dm, vNatural, v);
1196: DMRestoreGlobalVector(dm, &vNatural);
1197: }
1198: return(0);
1199: }
1200: #endif
1202: /*@
1203: PetscViewerExodusIIGetId - Get the file id of the ExodusII file
1205: Logically Collective on PetscViewer
1207: Input Parameter:
1208: . viewer - the PetscViewer
1210: Output Parameter:
1211: . exoid - The ExodusII file id
1213: Level: intermediate
1215: .seealso: PetscViewerFileSetMode(), PetscViewerCreate(), PetscViewerSetType(), PetscViewerBinaryOpen()
1216: @*/
1217: PetscErrorCode PetscViewerExodusIIGetId(PetscViewer viewer, int *exoid)
1218: {
1223: PetscTryMethod(viewer, "PetscViewerGetId_C",(PetscViewer,int*),(viewer,exoid));
1224: return(0);
1225: }
1227: /*@
1228: PetscViewerExodusIISetOrder - Set the elements order in the exodusII file.
1230: Collective
1232: Input Parameters:
1233: + viewer - the viewer
1234: - order - elements order
1236: Output Parameter:
1238: Level: beginner
1240: Note:
1242: .seealso: PetscViewerExodusIIGetId(), PetscViewerExodusIIGetOrder(), PetscViewerExodusIISetOrder()
1243: @*/
1244: PetscErrorCode PetscViewerExodusIISetOrder(PetscViewer viewer, PetscInt order)
1245: {
1250: PetscTryMethod(viewer, "PetscViewerSetOrder_C",(PetscViewer,PetscInt),(viewer,order));
1251: return(0);
1252: }
1254: /*@
1255: PetscViewerExodusIIGetOrder - Get the elements order in the exodusII file.
1257: Collective
1259: Input Parameters:
1260: + viewer - the viewer
1261: - order - elements order
1263: Output Parameter:
1265: Level: beginner
1267: Note:
1269: .seealso: PetscViewerExodusIIGetId(), PetscViewerExodusIIGetOrder(), PetscViewerExodusIISetOrder()
1270: @*/
1271: PetscErrorCode PetscViewerExodusIIGetOrder(PetscViewer viewer, PetscInt *order)
1272: {
1277: PetscTryMethod(viewer, "PetscViewerGetOrder_C",(PetscViewer,PetscInt*),(viewer,order));
1278: return(0);
1279: }
1281: /*@C
1282: PetscViewerExodusIIOpen - Opens a file for ExodusII input/output.
1284: Collective
1286: Input Parameters:
1287: + comm - MPI communicator
1288: . name - name of file
1289: - type - type of file
1290: $ FILE_MODE_WRITE - create new file for binary output
1291: $ FILE_MODE_READ - open existing file for binary input
1292: $ FILE_MODE_APPEND - open existing file for binary output
1294: Output Parameter:
1295: . exo - PetscViewer for Exodus II input/output to use with the specified file
1297: Level: beginner
1299: Note:
1300: This PetscViewer should be destroyed with PetscViewerDestroy().
1302: .seealso: PetscViewerPushFormat(), PetscViewerDestroy(),
1303: DMLoad(), PetscFileMode, PetscViewer, PetscViewerSetType(), PetscViewerFileSetMode(), PetscViewerFileSetName()
1304: @*/
1305: PetscErrorCode PetscViewerExodusIIOpen(MPI_Comm comm, const char name[], PetscFileMode type, PetscViewer *exo)
1306: {
1310: PetscViewerCreate(comm, exo);
1311: PetscViewerSetType(*exo, PETSCVIEWEREXODUSII);
1312: PetscViewerFileSetMode(*exo, type);
1313: PetscViewerFileSetName(*exo, name);
1314: PetscViewerSetFromOptions(*exo);
1315: return(0);
1316: }
1318: /*@C
1319: DMPlexCreateExodusFromFile - Create a DMPlex mesh from an ExodusII file.
1321: Collective
1323: Input Parameters:
1324: + comm - The MPI communicator
1325: . filename - The name of the ExodusII file
1326: - interpolate - Create faces and edges in the mesh
1328: Output Parameter:
1329: . dm - The DM object representing the mesh
1331: Level: beginner
1333: .seealso: DMPLEX, DMCreate(), DMPlexCreateExodus()
1334: @*/
1335: PetscErrorCode DMPlexCreateExodusFromFile(MPI_Comm comm, const char filename[], PetscBool interpolate, DM *dm)
1336: {
1337: PetscMPIInt rank;
1339: #if defined(PETSC_HAVE_EXODUSII)
1340: int CPU_word_size = sizeof(PetscReal), IO_word_size = 0, exoid = -1;
1341: float version;
1342: #endif
1346: MPI_Comm_rank(comm, &rank);
1347: #if defined(PETSC_HAVE_EXODUSII)
1348: if (rank == 0) {
1349: exoid = ex_open(filename, EX_READ, &CPU_word_size, &IO_word_size, &version);
1350: if (exoid <= 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_LIB, "ex_open(\"%s\",...) did not return a valid file ID", filename);
1351: }
1352: DMPlexCreateExodus(comm, exoid, interpolate, dm);
1353: if (rank == 0) {PetscStackCallStandard(ex_close,(exoid));}
1354: return(0);
1355: #else
1356: SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1357: #endif
1358: }
1360: #if defined(PETSC_HAVE_EXODUSII)
1361: static PetscErrorCode ExodusGetCellType_Internal(const char *elem_type, DMPolytopeType *ct)
1362: {
1363: PetscBool flg;
1367: *ct = DM_POLYTOPE_UNKNOWN;
1368: PetscStrcmp(elem_type, "TRI", &flg);
1369: if (flg) {*ct = DM_POLYTOPE_TRIANGLE; goto done;}
1370: PetscStrcmp(elem_type, "TRI3", &flg);
1371: if (flg) {*ct = DM_POLYTOPE_TRIANGLE; goto done;}
1372: PetscStrcmp(elem_type, "QUAD", &flg);
1373: if (flg) {*ct = DM_POLYTOPE_QUADRILATERAL; goto done;}
1374: PetscStrcmp(elem_type, "QUAD4", &flg);
1375: if (flg) {*ct = DM_POLYTOPE_QUADRILATERAL; goto done;}
1376: PetscStrcmp(elem_type, "SHELL4", &flg);
1377: if (flg) {*ct = DM_POLYTOPE_QUADRILATERAL; goto done;}
1378: PetscStrcmp(elem_type, "TETRA", &flg);
1379: if (flg) {*ct = DM_POLYTOPE_TETRAHEDRON; goto done;}
1380: PetscStrcmp(elem_type, "TET4", &flg);
1381: if (flg) {*ct = DM_POLYTOPE_TETRAHEDRON; goto done;}
1382: PetscStrcmp(elem_type, "WEDGE", &flg);
1383: if (flg) {*ct = DM_POLYTOPE_TRI_PRISM; goto done;}
1384: PetscStrcmp(elem_type, "HEX", &flg);
1385: if (flg) {*ct = DM_POLYTOPE_HEXAHEDRON; goto done;}
1386: PetscStrcmp(elem_type, "HEX8", &flg);
1387: if (flg) {*ct = DM_POLYTOPE_HEXAHEDRON; goto done;}
1388: PetscStrcmp(elem_type, "HEXAHEDRON", &flg);
1389: if (flg) {*ct = DM_POLYTOPE_HEXAHEDRON; goto done;}
1390: SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONG, "Unrecognized element type %s", elem_type);
1391: done:
1392: return(0);
1393: }
1394: #endif
1396: /*@
1397: DMPlexCreateExodus - Create a DMPlex mesh from an ExodusII file ID.
1399: Collective
1401: Input Parameters:
1402: + comm - The MPI communicator
1403: . exoid - The ExodusII id associated with a exodus file and obtained using ex_open
1404: - interpolate - Create faces and edges in the mesh
1406: Output Parameter:
1407: . dm - The DM object representing the mesh
1409: Level: beginner
1411: .seealso: DMPLEX, DMCreate()
1412: @*/
1413: PetscErrorCode DMPlexCreateExodus(MPI_Comm comm, PetscInt exoid, PetscBool interpolate, DM *dm)
1414: {
1415: #if defined(PETSC_HAVE_EXODUSII)
1416: PetscMPIInt num_proc, rank;
1417: DMLabel cellSets = NULL, faceSets = NULL, vertSets = NULL;
1418: PetscSection coordSection;
1419: Vec coordinates;
1420: PetscScalar *coords;
1421: PetscInt coordSize, v;
1423: /* Read from ex_get_init() */
1424: char title[PETSC_MAX_PATH_LEN+1];
1425: int dim = 0, dimEmbed = 0, numVertices = 0, numCells = 0, numHybridCells = 0;
1426: int num_cs = 0, num_vs = 0, num_fs = 0;
1427: #endif
1430: #if defined(PETSC_HAVE_EXODUSII)
1431: MPI_Comm_rank(comm, &rank);
1432: MPI_Comm_size(comm, &num_proc);
1433: DMCreate(comm, dm);
1434: DMSetType(*dm, DMPLEX);
1435: /* Open EXODUS II file and read basic information on rank 0, then broadcast to all processors */
1436: if (rank == 0) {
1437: PetscMemzero(title,PETSC_MAX_PATH_LEN+1);
1438: PetscStackCallStandard(ex_get_init,(exoid, title, &dimEmbed, &numVertices, &numCells, &num_cs, &num_vs, &num_fs));
1439: if (!num_cs) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Exodus file does not contain any cell set\n");
1440: }
1441: MPI_Bcast(title, PETSC_MAX_PATH_LEN+1, MPI_CHAR, 0, comm);
1442: MPI_Bcast(&dim, 1, MPI_INT, 0, comm);
1443: PetscObjectSetName((PetscObject) *dm, title);
1444: DMPlexSetChart(*dm, 0, numCells+numVertices);
1445: /* We do not want this label automatically computed, instead we compute it here */
1446: DMCreateLabel(*dm, "celltype");
1448: /* Read cell sets information */
1449: if (rank == 0) {
1450: PetscInt *cone;
1451: int c, cs, ncs, c_loc, v, v_loc;
1452: /* Read from ex_get_elem_blk_ids() */
1453: int *cs_id, *cs_order;
1454: /* Read from ex_get_elem_block() */
1455: char buffer[PETSC_MAX_PATH_LEN+1];
1456: int num_cell_in_set, num_vertex_per_cell, num_hybrid, num_attr;
1457: /* Read from ex_get_elem_conn() */
1458: int *cs_connect;
1460: /* Get cell sets IDs */
1461: PetscMalloc2(num_cs, &cs_id, num_cs, &cs_order);
1462: PetscStackCallStandard(ex_get_ids,(exoid, EX_ELEM_BLOCK, cs_id));
1463: /* Read the cell set connectivity table and build mesh topology
1464: EXO standard requires that cells in cell sets be numbered sequentially and be pairwise disjoint. */
1465: /* Check for a hybrid mesh */
1466: for (cs = 0, num_hybrid = 0; cs < num_cs; ++cs) {
1467: DMPolytopeType ct;
1468: char elem_type[PETSC_MAX_PATH_LEN];
1470: PetscArrayzero(elem_type, sizeof(elem_type));
1471: PetscStackCallStandard(ex_get_elem_type,(exoid, cs_id[cs], elem_type));
1472: ExodusGetCellType_Internal(elem_type, &ct);
1473: dim = PetscMax(dim, DMPolytopeTypeGetDim(ct));
1474: PetscStackCallStandard(ex_get_block,(exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set,&num_vertex_per_cell, 0, 0, &num_attr));
1475: switch (ct) {
1476: case DM_POLYTOPE_TRI_PRISM:
1477: cs_order[cs] = cs;
1478: numHybridCells += num_cell_in_set;
1479: ++num_hybrid;
1480: break;
1481: default:
1482: for (c = cs; c > cs-num_hybrid; --c) cs_order[c] = cs_order[c-1];
1483: cs_order[cs-num_hybrid] = cs;
1484: }
1485: }
1486: /* First set sizes */
1487: for (ncs = 0, c = 0; ncs < num_cs; ++ncs) {
1488: DMPolytopeType ct;
1489: char elem_type[PETSC_MAX_PATH_LEN];
1490: const PetscInt cs = cs_order[ncs];
1492: PetscArrayzero(elem_type, sizeof(elem_type));
1493: PetscStackCallStandard(ex_get_elem_type,(exoid, cs_id[cs], elem_type));
1494: ExodusGetCellType_Internal(elem_type, &ct);
1495: PetscStackCallStandard(ex_get_block,(exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set,&num_vertex_per_cell, 0, 0, &num_attr));
1496: for (c_loc = 0; c_loc < num_cell_in_set; ++c_loc, ++c) {
1497: DMPlexSetConeSize(*dm, c, num_vertex_per_cell);
1498: DMPlexSetCellType(*dm, c, ct);
1499: }
1500: }
1501: for (v = numCells; v < numCells+numVertices; ++v) {DMPlexSetCellType(*dm, v, DM_POLYTOPE_POINT);}
1502: DMSetUp(*dm);
1503: for (ncs = 0, c = 0; ncs < num_cs; ++ncs) {
1504: const PetscInt cs = cs_order[ncs];
1505: PetscStackCallStandard(ex_get_block,(exoid, EX_ELEM_BLOCK, cs_id[cs], buffer, &num_cell_in_set, &num_vertex_per_cell, 0, 0, &num_attr));
1506: PetscMalloc2(num_vertex_per_cell*num_cell_in_set,&cs_connect,num_vertex_per_cell,&cone);
1507: PetscStackCallStandard(ex_get_conn,(exoid, EX_ELEM_BLOCK, cs_id[cs], cs_connect,NULL,NULL));
1508: /* EXO uses Fortran-based indexing, DMPlex uses C-style and numbers cell first then vertices. */
1509: for (c_loc = 0, v = 0; c_loc < num_cell_in_set; ++c_loc, ++c) {
1510: DMPolytopeType ct;
1512: for (v_loc = 0; v_loc < num_vertex_per_cell; ++v_loc, ++v) {
1513: cone[v_loc] = cs_connect[v]+numCells-1;
1514: }
1515: DMPlexGetCellType(*dm, c, &ct);
1516: DMPlexInvertCell(ct, cone);
1517: DMPlexSetCone(*dm, c, cone);
1518: DMSetLabelValue_Fast(*dm, &cellSets, "Cell Sets", c, cs_id[cs]);
1519: }
1520: PetscFree2(cs_connect,cone);
1521: }
1522: PetscFree2(cs_id, cs_order);
1523: }
1524: {
1525: PetscInt ints[] = {dim, dimEmbed};
1527: MPI_Bcast(ints, 2, MPIU_INT, 0, comm);
1528: DMSetDimension(*dm, ints[0]);
1529: DMSetCoordinateDim(*dm, ints[1]);
1530: dim = ints[0];
1531: dimEmbed = ints[1];
1532: }
1533: DMPlexSymmetrize(*dm);
1534: DMPlexStratify(*dm);
1535: if (interpolate) {
1536: DM idm;
1538: DMPlexInterpolate(*dm, &idm);
1539: DMDestroy(dm);
1540: *dm = idm;
1541: }
1543: /* Create vertex set label */
1544: if (rank == 0 && (num_vs > 0)) {
1545: int vs, v;
1546: /* Read from ex_get_node_set_ids() */
1547: int *vs_id;
1548: /* Read from ex_get_node_set_param() */
1549: int num_vertex_in_set;
1550: /* Read from ex_get_node_set() */
1551: int *vs_vertex_list;
1553: /* Get vertex set ids */
1554: PetscMalloc1(num_vs, &vs_id);
1555: PetscStackCallStandard(ex_get_ids,(exoid, EX_NODE_SET, vs_id));
1556: for (vs = 0; vs < num_vs; ++vs) {
1557: PetscStackCallStandard(ex_get_set_param,(exoid, EX_NODE_SET, vs_id[vs], &num_vertex_in_set, NULL));
1558: PetscMalloc1(num_vertex_in_set, &vs_vertex_list);
1559: PetscStackCallStandard(ex_get_set,(exoid, EX_NODE_SET, vs_id[vs], vs_vertex_list, NULL));
1560: for (v = 0; v < num_vertex_in_set; ++v) {
1561: DMSetLabelValue_Fast(*dm, &vertSets, "Vertex Sets", vs_vertex_list[v]+numCells-1, vs_id[vs]);
1562: }
1563: PetscFree(vs_vertex_list);
1564: }
1565: PetscFree(vs_id);
1566: }
1567: /* Read coordinates */
1568: DMGetCoordinateSection(*dm, &coordSection);
1569: PetscSectionSetNumFields(coordSection, 1);
1570: PetscSectionSetFieldComponents(coordSection, 0, dimEmbed);
1571: PetscSectionSetChart(coordSection, numCells, numCells + numVertices);
1572: for (v = numCells; v < numCells+numVertices; ++v) {
1573: PetscSectionSetDof(coordSection, v, dimEmbed);
1574: PetscSectionSetFieldDof(coordSection, v, 0, dimEmbed);
1575: }
1576: PetscSectionSetUp(coordSection);
1577: PetscSectionGetStorageSize(coordSection, &coordSize);
1578: VecCreate(PETSC_COMM_SELF, &coordinates);
1579: PetscObjectSetName((PetscObject) coordinates, "coordinates");
1580: VecSetSizes(coordinates, coordSize, PETSC_DETERMINE);
1581: VecSetBlockSize(coordinates, dimEmbed);
1582: VecSetType(coordinates,VECSTANDARD);
1583: VecGetArray(coordinates, &coords);
1584: if (rank == 0) {
1585: PetscReal *x, *y, *z;
1587: PetscMalloc3(numVertices,&x,numVertices,&y,numVertices,&z);
1588: PetscStackCallStandard(ex_get_coord,(exoid, x, y, z));
1589: if (dimEmbed > 0) {
1590: for (v = 0; v < numVertices; ++v) coords[v*dimEmbed+0] = x[v];
1591: }
1592: if (dimEmbed > 1) {
1593: for (v = 0; v < numVertices; ++v) coords[v*dimEmbed+1] = y[v];
1594: }
1595: if (dimEmbed > 2) {
1596: for (v = 0; v < numVertices; ++v) coords[v*dimEmbed+2] = z[v];
1597: }
1598: PetscFree3(x,y,z);
1599: }
1600: VecRestoreArray(coordinates, &coords);
1601: DMSetCoordinatesLocal(*dm, coordinates);
1602: VecDestroy(&coordinates);
1604: /* Create side set label */
1605: if (rank == 0 && interpolate && (num_fs > 0)) {
1606: int fs, f, voff;
1607: /* Read from ex_get_side_set_ids() */
1608: int *fs_id;
1609: /* Read from ex_get_side_set_param() */
1610: int num_side_in_set;
1611: /* Read from ex_get_side_set_node_list() */
1612: int *fs_vertex_count_list, *fs_vertex_list;
1613: /* Read side set labels */
1614: char fs_name[MAX_STR_LENGTH+1];
1616: /* Get side set ids */
1617: PetscMalloc1(num_fs, &fs_id);
1618: PetscStackCallStandard(ex_get_ids,(exoid, EX_SIDE_SET, fs_id));
1619: for (fs = 0; fs < num_fs; ++fs) {
1620: PetscStackCallStandard(ex_get_set_param,(exoid, EX_SIDE_SET, fs_id[fs], &num_side_in_set, NULL));
1621: PetscMalloc2(num_side_in_set,&fs_vertex_count_list,num_side_in_set*4,&fs_vertex_list);
1622: PetscStackCallStandard(ex_get_side_set_node_list,(exoid, fs_id[fs], fs_vertex_count_list, fs_vertex_list));
1623: /* Get the specific name associated with this side set ID. */
1624: int fs_name_err = ex_get_name(exoid, EX_SIDE_SET, fs_id[fs], fs_name);
1625: for (f = 0, voff = 0; f < num_side_in_set; ++f) {
1626: const PetscInt *faces = NULL;
1627: PetscInt faceSize = fs_vertex_count_list[f], numFaces;
1628: PetscInt faceVertices[4], v;
1630: if (faceSize > 4) SETERRQ1(comm, PETSC_ERR_ARG_WRONG, "ExodusII side cannot have %d > 4 vertices", faceSize);
1631: for (v = 0; v < faceSize; ++v, ++voff) {
1632: faceVertices[v] = fs_vertex_list[voff]+numCells-1;
1633: }
1634: DMPlexGetFullJoin(*dm, faceSize, faceVertices, &numFaces, &faces);
1635: if (numFaces != 1) SETERRQ3(comm, PETSC_ERR_ARG_WRONG, "Invalid ExodusII side %d in set %d maps to %d faces", f, fs, numFaces);
1636: DMSetLabelValue_Fast(*dm, &faceSets, "Face Sets", faces[0], fs_id[fs]);
1637: /* Only add the label if one has been detected for this side set. */
1638: if (!fs_name_err) {
1639: DMSetLabelValue(*dm, fs_name, faces[0], fs_id[fs]);
1640: }
1641: DMPlexRestoreJoin(*dm, faceSize, faceVertices, &numFaces, &faces);
1642: }
1643: PetscFree2(fs_vertex_count_list,fs_vertex_list);
1644: }
1645: PetscFree(fs_id);
1646: }
1648: { /* Create Cell/Face/Vertex Sets labels at all processes */
1649: enum {n = 3};
1650: PetscBool flag[n];
1652: flag[0] = cellSets ? PETSC_TRUE : PETSC_FALSE;
1653: flag[1] = faceSets ? PETSC_TRUE : PETSC_FALSE;
1654: flag[2] = vertSets ? PETSC_TRUE : PETSC_FALSE;
1655: MPI_Bcast(flag, n, MPIU_BOOL, 0, comm);
1656: if (flag[0]) {DMCreateLabel(*dm, "Cell Sets");}
1657: if (flag[1]) {DMCreateLabel(*dm, "Face Sets");}
1658: if (flag[2]) {DMCreateLabel(*dm, "Vertex Sets");}
1659: }
1660: return(0);
1661: #else
1662: SETERRQ(comm, PETSC_ERR_SUP, "This method requires ExodusII support. Reconfigure using --download-exodusii");
1663: #endif
1664: }