Actual source code: ex11.c
petsc-3.7.3 2016-08-01
2: static char help[] = "Tests MatMeshToDual()\n\n";
4: /*T
5: Concepts: Mat^mesh partitioning
6: Processors: n
7: T*/
9: /*
10: Include "petscmat.h" so that we can use matrices.
11: automatically includes:
12: petscsys.h - base PETSc routines petscvec.h - vectors
13: petscmat.h - matrices
14: petscis.h - index sets petscviewer.h - viewers
15: */
16: #include <petscmat.h>
20: int main(int argc,char **args)
21: {
22: Mat mesh,dual;
23: PetscErrorCode ierr;
24: PetscInt Nvertices = 6; /* total number of vertices */
25: PetscInt ncells = 2; /* number cells on this process */
26: PetscInt *ii,*jj;
27: PetscMPIInt size,rank;
28: MatPartitioning part;
29: IS is;
31: PetscInitialize(&argc,&args,(char*)0,help);
32: MPI_Comm_size(MPI_COMM_WORLD,&size);
33: if (size != 2) SETERRQ(PETSC_COMM_WORLD,PETSC_ERR_SUP,"This example is for exactly two processes");
34: MPI_Comm_rank(MPI_COMM_WORLD,&rank);
36: PetscMalloc1(3,&ii);
37: PetscMalloc1(6,&jj);
38: ii[0] = 0; ii[1] = 3; ii[2] = 6;
39: if (!rank) {
40: jj[0] = 0; jj[1] = 1; jj[2] = 2; jj[3] = 1; jj[4] = 2; jj[5] = 3;
41: } else {
42: jj[0] = 1; jj[1] = 4; jj[2] = 5; jj[3] = 1; jj[4] = 3; jj[5] = 5;
43: }
44: MatCreateMPIAdj(MPI_COMM_WORLD,ncells,Nvertices,ii,jj,NULL,&mesh);
45: MatMeshToCellGraph(mesh,2,&dual);
46: MatView(dual,PETSC_VIEWER_STDOUT_WORLD);
48: MatPartitioningCreate(MPI_COMM_WORLD,&part);
49: MatPartitioningSetAdjacency(part,dual);
50: MatPartitioningSetFromOptions(part);
51: MatPartitioningApply(part,&is);
52: ISView(is,PETSC_VIEWER_STDOUT_WORLD);
53: ISDestroy(&is);
54: MatPartitioningDestroy(&part);
56: MatDestroy(&mesh);
57: MatDestroy(&dual);
58: PetscFinalize();
59: return 0;
60: }