Actual source code: mpb_aij.c
petsc-3.14.6 2021-03-30
1: #include <../src/mat/impls/aij/mpi/mpiaij.h>
3: PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat mat, MPI_Comm subComm, MatReuse scall,Mat *subMat)
4: {
6: Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data;
7: Mat_SeqAIJ *aijB = (Mat_SeqAIJ*)aij->B->data;
8: PetscMPIInt subCommSize,subCommRank;
9: PetscMPIInt *commRankMap,subRank,rank,commRank;
10: PetscInt *garrayCMap,col,i,j,*nnz,newRow,newCol;
13: MPI_Comm_size(subComm,&subCommSize);
14: MPI_Comm_rank(subComm,&subCommRank);
15: MPI_Comm_rank(PetscObjectComm((PetscObject)mat),&commRank);
17: /* create subMat object with the relevant layout */
18: if (scall == MAT_INITIAL_MATRIX) {
19: MatCreate(subComm,subMat);
20: MatSetType(*subMat,MATMPIAIJ);
21: MatSetSizes(*subMat,mat->rmap->n,mat->cmap->n,PETSC_DECIDE,PETSC_DECIDE);
22: MatSetBlockSizesFromMats(*subMat,mat,mat);
24: /* need to setup rmap and cmap before Preallocation */
25: PetscLayoutSetUp((*subMat)->rmap);
26: PetscLayoutSetUp((*subMat)->cmap);
27: }
29: /* create a map of comm_rank from subComm to comm - should commRankMap and garrayCMap be kept for reused? */
30: PetscMalloc1(subCommSize,&commRankMap);
31: MPI_Allgather(&commRank,1,MPI_INT,commRankMap,1,MPI_INT,subComm);
33: /* Traverse garray and identify column indices [of offdiag mat] that
34: should be discarded. For the ones not discarded, store the newCol+1
35: value in garrayCMap */
36: PetscCalloc1(aij->B->cmap->n,&garrayCMap);
37: for (i=0; i<aij->B->cmap->n; i++) {
38: col = aij->garray[i];
39: for (subRank=0; subRank<subCommSize; subRank++) {
40: rank = commRankMap[subRank];
41: if ((col >= mat->cmap->range[rank]) && (col < mat->cmap->range[rank+1])) {
42: garrayCMap[i] = (*subMat)->cmap->range[subRank] + col - mat->cmap->range[rank]+1;
43: break;
44: }
45: }
46: }
48: if (scall == MAT_INITIAL_MATRIX) {
49: /* Compute preallocation for the offdiag mat */
50: PetscCalloc1(aij->B->rmap->n,&nnz);
51: for (i=0; i<aij->B->rmap->n; i++) {
52: for (j=aijB->i[i]; j<aijB->i[i+1]; j++) {
53: if (garrayCMap[aijB->j[j]]) nnz[i]++;
54: }
55: }
56: MatMPIAIJSetPreallocation(*(subMat),0,NULL,0,nnz);
58: /* reuse diag block with the new submat */
59: MatDestroy(&((Mat_MPIAIJ*)((*subMat)->data))->A);
60: ((Mat_MPIAIJ*)((*subMat)->data))->A = aij->A;
61: PetscObjectReference((PetscObject)aij->A);
62: } else if (((Mat_MPIAIJ*)(*subMat)->data)->A != aij->A) {
63: PetscObject obj = (PetscObject)((Mat_MPIAIJ*)((*subMat)->data))->A;
64: PetscObjectReference((PetscObject)obj);
65: ((Mat_MPIAIJ*)((*subMat)->data))->A = aij->A;
66: PetscObjectReference((PetscObject)aij->A);
67: }
69: /* Traverse aij->B and insert values into subMat */
70: if ((*subMat)->assembled) {
71: (*subMat)->was_assembled = PETSC_TRUE;
72: (*subMat)->assembled = PETSC_FALSE;
73: }
74: for (i=0; i<aij->B->rmap->n; i++) {
75: newRow = (*subMat)->rmap->range[subCommRank] + i;
76: for (j=aijB->i[i]; j<aijB->i[i+1]; j++) {
77: newCol = garrayCMap[aijB->j[j]];
78: if (newCol) {
79: newCol--; /* remove the increment */
80: MatSetValues_MPIAIJ(*subMat,1,&newRow,1,&newCol,(aijB->a+j),INSERT_VALUES);
81: }
82: }
83: }
84: MatAssemblyBegin(*subMat,MAT_FINAL_ASSEMBLY);
85: MatAssemblyEnd(*subMat,MAT_FINAL_ASSEMBLY);
87: /* deallocate temporary data */
88: PetscFree(commRankMap);
89: PetscFree(garrayCMap);
90: if (scall == MAT_INITIAL_MATRIX) {
91: PetscFree(nnz);
92: }
93: return(0);
94: }