Actual source code: ex11.c

petsc-3.8.4 2018-03-24
Report Typos and Errors

  2: static char help[] = "Tests MatMeshToDual()\n\n";

  4: /*T
  5:    Concepts: Mat^mesh partitioning
  6:    Processors: n
  7: T*/

  9: /*
 10:   Include "petscmat.h" so that we can use matrices.
 11:   automatically includes:
 12:      petscsys.h       - base PETSc routines   petscvec.h    - vectors
 13:      petscmat.h    - matrices
 14:      petscis.h     - index sets            petscviewer.h - viewers
 15: */
 16:  #include <petscmat.h>

 18: int main(int argc,char **args)
 19: {
 20:   Mat             mesh,dual;
 21:   PetscErrorCode  ierr;
 22:   PetscInt        Nvertices = 6;       /* total number of vertices */
 23:   PetscInt        ncells    = 2;       /* number cells on this process */
 24:   PetscInt        *ii,*jj;
 25:   PetscMPIInt     size,rank;
 26:   MatPartitioning part;
 27:   IS              is;

 29:   PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
 30:   MPI_Comm_size(MPI_COMM_WORLD,&size);
 31:   if (size != 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"This example is for exactly two processes");
 32:   MPI_Comm_rank(MPI_COMM_WORLD,&rank);

 34:   PetscMalloc1(3,&ii);
 35:   PetscMalloc1(6,&jj);
 36:   ii[0] = 0; ii[1] = 3; ii[2] = 6;
 37:   if (!rank) {
 38:     jj[0] = 0; jj[1] = 1; jj[2] = 2; jj[3] = 1; jj[4] = 2; jj[5] = 3;
 39:   } else {
 40:     jj[0] = 1; jj[1] = 4; jj[2] = 5; jj[3] = 1; jj[4] = 3; jj[5] = 5;
 41:   }
 42:   MatCreateMPIAdj(MPI_COMM_WORLD,ncells,Nvertices,ii,jj,NULL,&mesh);
 43:   MatMeshToCellGraph(mesh,2,&dual);
 44:   MatView(dual,PETSC_VIEWER_STDOUT_WORLD);

 46:   MatPartitioningCreate(MPI_COMM_WORLD,&part);
 47:   MatPartitioningSetAdjacency(part,dual);
 48:   MatPartitioningSetFromOptions(part);
 49:   MatPartitioningApply(part,&is);
 50:   ISView(is,PETSC_VIEWER_STDOUT_WORLD);
 51:   ISDestroy(&is);
 52:   MatPartitioningDestroy(&part);

 54:   MatDestroy(&mesh);
 55:   MatDestroy(&dual);
 56:   PetscFinalize();
 57:   return ierr;
 58: }