Actual source code: ex83.c
petsc-3.11.4 2019-09-28
2: static char help[] = "Partition tiny grid using hierarchical partitioning and increase overlap using MatIncreaseOverlapSplit.\n\n";
4: /*T
5: Concepts: partitioning
6: Processors: 4
7: T*/
9: /*
10: Include "petscmat.h" so that we can use matrices. Note that this file
11: automatically includes:
12: petscsys.h - base PETSc routines petscvec.h - vectors
13: petscmat.h - matrices
14: petscis.h - index sets
15: petscviewer.h - viewers
16: */
17: #include <petscmat.h>
19: int main(int argc,char **args)
20: {
21: Mat A,B;
22: PetscErrorCode ierr;
23: PetscMPIInt rank,size,membershipKey;
24: PetscInt *ia,*ja,*indices_sc,isrows_localsize;
25: const PetscInt *indices;
26: MatPartitioning part;
27: IS is,isrows,isrows_sc;
28: IS coarseparts,fineparts;
29: MPI_Comm comm,scomm;
31: PetscInitialize(&argc,&args,(char*)0,help);if (ierr) return ierr;
32: comm = PETSC_COMM_WORLD;
33: MPI_Comm_size(comm,&size);
34: if (size != 4) SETERRQ(comm,1,"Must run with 4 processors \n");
35: MPI_Comm_rank(comm,&rank);
36: /*set a small matrix */
37: PetscMalloc1(5,&ia);
38: PetscMalloc1(16,&ja);
39: if (!rank) {
40: ja[0] = 1; ja[1] = 4; ja[2] = 0; ja[3] = 2; ja[4] = 5; ja[5] = 1; ja[6] = 3; ja[7] = 6;
41: ja[8] = 2; ja[9] = 7;
42: ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
43: membershipKey = 0;
44: } else if (rank == 1) {
45: ja[0] = 0; ja[1] = 5; ja[2] = 8; ja[3] = 1; ja[4] = 4; ja[5] = 6; ja[6] = 9; ja[7] = 2;
46: ja[8] = 5; ja[9] = 7; ja[10] = 10; ja[11] = 3; ja[12] = 6; ja[13] = 11;
47: ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
48: membershipKey = 0;
49: } else if (rank == 2) {
50: ja[0] = 4; ja[1] = 9; ja[2] = 12; ja[3] = 5; ja[4] = 8; ja[5] = 10; ja[6] = 13; ja[7] = 6;
51: ja[8] = 9; ja[9] = 11; ja[10] = 14; ja[11] = 7; ja[12] = 10; ja[13] = 15;
52: ia[0] = 0; ia[1] = 3; ia[2] = 7; ia[3] = 11; ia[4] = 14;
53: membershipKey = 1;
54: } else {
55: ja[0] = 8; ja[1] = 13; ja[2] = 9; ja[3] = 12; ja[4] = 14; ja[5] = 10; ja[6] = 13; ja[7] = 15;
56: ja[8] = 11; ja[9] = 14;
57: ia[0] = 0; ia[1] = 2; ia[2] = 5; ia[3] = 8; ia[4] = 10;
58: membershipKey = 1;
59: }
60: MatCreateMPIAdj(comm,4,16,ia,ja,NULL,&A);
61: MatView(A,PETSC_VIEWER_STDOUT_WORLD);
62: /*
63: Partition the graph of the matrix
64: */
65: MatPartitioningCreate(comm,&part);
66: MatPartitioningSetAdjacency(part,A);
67: MatPartitioningSetType(part,MATPARTITIONINGHIERARCH);
68: MatPartitioningHierarchicalSetNcoarseparts(part,2);
69: MatPartitioningHierarchicalSetNfineparts(part,2);
70: MatPartitioningSetFromOptions(part);
71: /* get new processor owner number of each vertex */
72: MatPartitioningApply(part,&is);
73: /* coarse parts */
74: MatPartitioningHierarchicalGetCoarseparts(part,&coarseparts);
75: ISView(coarseparts,PETSC_VIEWER_STDOUT_WORLD);
76: /* fine parts */
77: MatPartitioningHierarchicalGetFineparts(part,&fineparts);
78: ISView(fineparts,PETSC_VIEWER_STDOUT_WORLD);
79: /* partitioning */
80: ISView(is,PETSC_VIEWER_STDOUT_WORLD);
81: /* compute coming rows */
82: ISBuildTwoSided(is,NULL,&isrows);
83: ISView(isrows,PETSC_VIEWER_STDOUT_WORLD);
84: /*create a sub-communicator */
85: MPI_Comm_split(comm, membershipKey,rank,&scomm);
86: ISGetLocalSize(isrows,&isrows_localsize);
87: PetscCalloc1(isrows_localsize,&indices_sc);
88: ISGetIndices(isrows,&indices);
89: PetscMemcpy(indices_sc,indices,sizeof(PetscInt)*isrows_localsize);
90: ISRestoreIndices(isrows,&indices);
91: ISDestroy(&is);
92: ISDestroy(&coarseparts);
93: ISDestroy(&fineparts);
94: ISDestroy(&isrows);
95: MatPartitioningDestroy(&part);
96: /*create a sub-IS on the sub communicator */
97: ISCreateGeneral(scomm,isrows_localsize,indices_sc,PETSC_OWN_POINTER,&isrows_sc);
98: MatConvert(A,MATMPIAIJ,MAT_INITIAL_MATRIX,&B);
99: #if 1
100: MatView(B,PETSC_VIEWER_STDOUT_WORLD);
101: #endif
102: /*increase overlap */
103: MatIncreaseOverlapSplit(B,1,&isrows_sc,1);
104: ISView(isrows_sc,NULL);
105: ISDestroy(&isrows_sc);
106: /*
107: Free work space. All PETSc objects should be destroyed when they
108: are no longer needed.
109: */
110: MatDestroy(&A);
111: MatDestroy(&B);
112: PetscFinalize();
113: return ierr;
114: }