Actual source code: mpiadj.c


  2: /*
  3:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
  4: */
  5: #include <../src/mat/impls/adj/mpi/mpiadj.h>
  6: #include <petscsf.h>

  8: /*
  9:  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
 10:  * */
 11: static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
 12: {
 13:   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
 14:   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
 15:   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
 16:   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
 17:   PetscMPIInt        owner;
 18:   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
 19:   PetscLayout        rmap;
 20:   MPI_Comm           comm;
 21:   PetscSF            sf;
 22:   PetscSFNode       *iremote;
 23:   PetscBool          done;

 25:   PetscObjectGetComm((PetscObject)adj,&comm);
 26:   MatGetLayouts(adj,&rmap,NULL);
 27:   ISGetLocalSize(irows,&nlrows_is);
 28:   ISGetIndices(irows,&irows_indices);
 29:   PetscMalloc1(nlrows_is,&iremote);
 30:   /* construct sf graph*/
 31:   nleaves = nlrows_is;
 32:   for (i=0; i<nlrows_is; i++) {
 33:     owner = -1;
 34:     rlocalindex = -1;
 35:     PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);
 36:     iremote[i].rank  = owner;
 37:     iremote[i].index = rlocalindex;
 38:   }
 39:   MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);
 40:   PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);
 41:   nroots = nlrows_mat;
 42:   for (i=0; i<nlrows_mat; i++) {
 43:     ncols_send[i] = xadj[i+1]-xadj[i];
 44:   }
 45:   PetscSFCreate(comm,&sf);
 46:   PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
 47:   PetscSFSetType(sf,PETSCSFBASIC);
 48:   PetscSFSetFromOptions(sf);
 49:   PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE);
 50:   PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE);
 51:   PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE);
 52:   PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE);
 53:   PetscSFDestroy(&sf);
 54:   Ncols_recv =0;
 55:   for (i=0; i<nlrows_is; i++) {
 56:     Ncols_recv             += ncols_recv[i];
 57:     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
 58:   }
 59:   Ncols_send = 0;
 60:   for (i=0; i<nlrows_mat; i++) {
 61:     Ncols_send += ncols_send[i];
 62:   }
 63:   PetscCalloc1(Ncols_recv,&iremote);
 64:   PetscCalloc1(Ncols_recv,&adjncy_recv);
 65:   nleaves = Ncols_recv;
 66:   Ncols_recv = 0;
 67:   for (i=0; i<nlrows_is; i++) {
 68:     PetscLayoutFindOwner(rmap,irows_indices[i],&owner);
 69:     for (j=0; j<ncols_recv[i]; j++) {
 70:       iremote[Ncols_recv].rank    = owner;
 71:       iremote[Ncols_recv++].index = xadj_recv[i]+j;
 72:     }
 73:   }
 74:   ISRestoreIndices(irows,&irows_indices);
 75:   /*if we need to deal with edge weights ???*/
 76:   if (a->useedgeweights) PetscCalloc1(Ncols_recv,&values_recv);
 77:   nroots = Ncols_send;
 78:   PetscSFCreate(comm,&sf);
 79:   PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);
 80:   PetscSFSetType(sf,PETSCSFBASIC);
 81:   PetscSFSetFromOptions(sf);
 82:   PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE);
 83:   PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE);
 84:   if (a->useedgeweights) {
 85:     PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE);
 86:     PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE);
 87:   }
 88:   PetscSFDestroy(&sf);
 89:   MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);
 90:   ISGetLocalSize(icols,&icols_n);
 91:   ISGetIndices(icols,&icols_indices);
 92:   rnclos = 0;
 93:   for (i=0; i<nlrows_is; i++) {
 94:     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
 95:       PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);
 96:       if (loc<0) {
 97:         adjncy_recv[j] = -1;
 98:         if (a->useedgeweights) values_recv[j] = -1;
 99:         ncols_recv[i]--;
100:       } else {
101:         rnclos++;
102:       }
103:     }
104:   }
105:   ISRestoreIndices(icols,&icols_indices);
106:   PetscCalloc1(rnclos,&sadjncy);
107:   if (a->useedgeweights) PetscCalloc1(rnclos,&svalues);
108:   PetscCalloc1(nlrows_is+1,&sxadj);
109:   rnclos = 0;
110:   for (i=0; i<nlrows_is; i++) {
111:     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
112:       if (adjncy_recv[j]<0) continue;
113:       sadjncy[rnclos] = adjncy_recv[j];
114:       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
115:       rnclos++;
116:     }
117:   }
118:   for (i=0; i<nlrows_is; i++) {
119:     sxadj[i+1] = sxadj[i]+ncols_recv[i];
120:   }
121:   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    PetscFree(sxadj);
122:   if (sadj_adjncy) { *sadj_adjncy = sadjncy;} else PetscFree(sadjncy);
123:   if (sadj_values) {
124:     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=NULL;
125:   } else {
126:     if (a->useedgeweights) PetscFree(svalues);
127:   }
128:   PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);
129:   PetscFree(adjncy_recv);
130:   if (a->useedgeweights) PetscFree(values_recv);
131:   return 0;
132: }

134: static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
135: {
136:   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
137:   PetscInt          *indices,nindx,j,k,loc;
138:   PetscMPIInt        issame;
139:   const PetscInt    *irow_indices,*icol_indices;
140:   MPI_Comm           scomm_row,scomm_col,scomm_mat;

142:   nindx = 0;
143:   /*
144:    * Estimate a maximum number for allocating memory
145:    */
146:   for (i=0; i<n; i++) {
147:     ISGetLocalSize(irow[i],&irow_n);
148:     ISGetLocalSize(icol[i],&icol_n);
149:     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
150:   }
151:   PetscMalloc1(nindx,&indices);
152:   /* construct a submat */
153:   for (i=0; i<n; i++) {
154:     if (subcomm) {
155:       PetscObjectGetComm((PetscObject)irow[i],&scomm_row);
156:       PetscObjectGetComm((PetscObject)icol[i],&scomm_col);
157:       MPI_Comm_compare(scomm_row,scomm_col,&issame);
159:       MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);
161:     } else {
162:       scomm_row = PETSC_COMM_SELF;
163:     }
164:     /*get sub-matrix data*/
165:     sxadj=NULL; sadjncy=NULL; svalues=NULL;
166:     MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);
167:     ISGetLocalSize(irow[i],&irow_n);
168:     ISGetLocalSize(icol[i],&icol_n);
169:     ISGetIndices(irow[i],&irow_indices);
170:     PetscArraycpy(indices,irow_indices,irow_n);
171:     ISRestoreIndices(irow[i],&irow_indices);
172:     ISGetIndices(icol[i],&icol_indices);
173:     PetscArraycpy(indices+irow_n,icol_indices,icol_n);
174:     ISRestoreIndices(icol[i],&icol_indices);
175:     nindx = irow_n+icol_n;
176:     PetscSortRemoveDupsInt(&nindx,indices);
177:     /* renumber columns */
178:     for (j=0; j<irow_n; j++) {
179:       for (k=sxadj[j]; k<sxadj[j+1]; k++) {
180:         PetscFindInt(sadjncy[k],nindx,indices,&loc);
182:         sadjncy[k] = loc;
183:       }
184:     }
185:     if (scall==MAT_INITIAL_MATRIX) {
186:       MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);
187:     } else {
188:        Mat                sadj = *(submat[i]);
189:        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
190:        PetscObjectGetComm((PetscObject)sadj,&scomm_mat);
191:        MPI_Comm_compare(scomm_row,scomm_mat,&issame);
193:        PetscArraycpy(sa->i,sxadj,irow_n+1);
194:        PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]);
195:        if (svalues) PetscArraycpy(sa->values,svalues,sxadj[irow_n]);
196:        PetscFree(sxadj);
197:        PetscFree(sadjncy);
198:        if (svalues) PetscFree(svalues);
199:     }
200:   }
201:   PetscFree(indices);
202:   return 0;
203: }

205: static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
206: {
207:   /*get sub-matrices across a sub communicator */
208:   MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);
209:   return 0;
210: }

212: static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
213: {
214:   /*get sub-matrices based on PETSC_COMM_SELF */
215:   MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);
216:   return 0;
217: }

219: static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
220: {
221:   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
222:   PetscInt          i,j,m = A->rmap->n;
223:   const char        *name;
224:   PetscViewerFormat format;

226:   PetscObjectGetName((PetscObject)A,&name);
227:   PetscViewerGetFormat(viewer,&format);
228:   if (format == PETSC_VIEWER_ASCII_INFO) {
229:     return 0;
231:   else {
232:     PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
233:     PetscViewerASCIIPushSynchronized(viewer);
234:     for (i=0; i<m; i++) {
235:       PetscViewerASCIISynchronizedPrintf(viewer,"row %" PetscInt_FMT ":",i+A->rmap->rstart);
236:       for (j=a->i[i]; j<a->i[i+1]; j++) {
237:         if (a->values) {
238:           PetscViewerASCIISynchronizedPrintf(viewer," (%" PetscInt_FMT ", %" PetscInt_FMT ") ",a->j[j], a->values[j]);
239:         } else {
240:           PetscViewerASCIISynchronizedPrintf(viewer," %" PetscInt_FMT " ",a->j[j]);
241:         }
242:       }
243:       PetscViewerASCIISynchronizedPrintf(viewer,"\n");
244:     }
245:     PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
246:     PetscViewerFlush(viewer);
247:     PetscViewerASCIIPopSynchronized(viewer);
248:   }
249:   return 0;
250: }

252: static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
253: {
254:   PetscBool      iascii;

256:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
257:   if (iascii) {
258:     MatView_MPIAdj_ASCII(A,viewer);
259:   }
260:   return 0;
261: }

263: static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
264: {
265:   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;

267: #if defined(PETSC_USE_LOG)
268:   PetscLogObjectState((PetscObject)mat,"Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT,mat->rmap->n,mat->cmap->n,a->nz);
269: #endif
270:   PetscFree(a->diag);
271:   if (a->freeaij) {
272:     if (a->freeaijwithfree) {
273:       if (a->i) free(a->i);
274:       if (a->j) free(a->j);
275:     } else {
276:       PetscFree(a->i);
277:       PetscFree(a->j);
278:       PetscFree(a->values);
279:     }
280:   }
281:   PetscFree(a->rowvalues);
282:   PetscFree(mat->data);
283:   PetscObjectChangeTypeName((PetscObject)mat,NULL);
284:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);
285:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);
286:   return 0;
287: }

289: static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
290: {
291:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;

293:   switch (op) {
294:   case MAT_SYMMETRIC:
295:   case MAT_STRUCTURALLY_SYMMETRIC:
296:   case MAT_HERMITIAN:
297:     a->symmetric = flg;
298:     break;
299:   case MAT_SYMMETRY_ETERNAL:
300:     break;
301:   default:
302:     PetscInfo(A,"Option %s ignored\n",MatOptions[op]);
303:     break;
304:   }
305:   return 0;
306: }

308: static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
309: {
310:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;

312:   row -= A->rmap->rstart;
314:   *nz = a->i[row+1] - a->i[row];
315:   if (v) {
316:     PetscInt j;
317:     if (a->rowvalues_alloc < *nz) {
318:       PetscFree(a->rowvalues);
319:       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
320:       PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);
321:     }
322:     for (j=0; j<*nz; j++) {
323:       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
324:     }
325:     *v = (*nz) ? a->rowvalues : NULL;
326:   }
327:   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
328:   return 0;
329: }

331: static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
332: {
333:   return 0;
334: }

336: static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
337: {
338:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
339:   PetscBool      flag;

341:   /* If the  matrix dimensions are not equal,or no of nonzeros */
342:   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
343:     flag = PETSC_FALSE;
344:   }

346:   /* if the a->i are the same */
347:   PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag);

349:   /* if a->j are the same */
350:   PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);

352:   MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
353:   return 0;
354: }

356: static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
357: {
358:   PetscInt   i;
359:   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
360:   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;

362:   *m    = A->rmap->n;
363:   *ia   = a->i;
364:   *ja   = a->j;
365:   *done = PETSC_TRUE;
366:   if (oshift) {
367:     for (i=0; i<(*ia)[*m]; i++) {
368:       (*ja)[i]++;
369:     }
370:     for (i=0; i<=(*m); i++) (*ia)[i]++;
371:   }
372:   return 0;
373: }

375: static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
376: {
377:   PetscInt   i;
378:   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
379:   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;

383:   if (oshift) {
386:     for (i=0; i<=(*m); i++) (*ia)[i]--;
387:     for (i=0; i<(*ia)[*m]; i++) {
388:       (*ja)[i]--;
389:     }
390:   }
391:   return 0;
392: }

394: PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
395: {
396:   Mat               B;
397:   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
398:   const PetscInt    *rj;
399:   const PetscScalar *ra;
400:   MPI_Comm          comm;

402:   MatGetSize(A,NULL,&N);
403:   MatGetLocalSize(A,&m,NULL);
404:   MatGetOwnershipRange(A,&rstart,NULL);

406:   /* count the number of nonzeros per row */
407:   for (i=0; i<m; i++) {
408:     MatGetRow(A,i+rstart,&len,&rj,NULL);
409:     for (j=0; j<len; j++) {
410:       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
411:     }
412:     nzeros += len;
413:     MatRestoreRow(A,i+rstart,&len,&rj,NULL);
414:   }

416:   /* malloc space for nonzeros */
417:   PetscMalloc1(nzeros+1,&a);
418:   PetscMalloc1(N+1,&ia);
419:   PetscMalloc1(nzeros+1,&ja);

421:   nzeros = 0;
422:   ia[0]  = 0;
423:   for (i=0; i<m; i++) {
424:     MatGetRow(A,i+rstart,&len,&rj,&ra);
425:     cnt  = 0;
426:     for (j=0; j<len; j++) {
427:       if (rj[j] != i+rstart) { /* if not diagonal */
428:         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
429:         ja[nzeros+cnt++] = rj[j];
430:       }
431:     }
432:     MatRestoreRow(A,i+rstart,&len,&rj,&ra);
433:     nzeros += cnt;
434:     ia[i+1] = nzeros;
435:   }

437:   PetscObjectGetComm((PetscObject)A,&comm);
438:   MatCreate(comm,&B);
439:   MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
440:   MatSetType(B,type);
441:   MatMPIAdjSetPreallocation(B,ia,ja,a);

443:   if (reuse == MAT_INPLACE_MATRIX) {
444:     MatHeaderReplace(A,&B);
445:   } else {
446:     *newmat = B;
447:   }
448:   return 0;
449: }

451: /* -------------------------------------------------------------------*/
452: static struct _MatOps MatOps_Values = {NULL,
453:                                        MatGetRow_MPIAdj,
454:                                        MatRestoreRow_MPIAdj,
455:                                        NULL,
456:                                 /* 4*/ NULL,
457:                                        NULL,
458:                                        NULL,
459:                                        NULL,
460:                                        NULL,
461:                                        NULL,
462:                                 /*10*/ NULL,
463:                                        NULL,
464:                                        NULL,
465:                                        NULL,
466:                                        NULL,
467:                                 /*15*/ NULL,
468:                                        MatEqual_MPIAdj,
469:                                        NULL,
470:                                        NULL,
471:                                        NULL,
472:                                 /*20*/ NULL,
473:                                        NULL,
474:                                        MatSetOption_MPIAdj,
475:                                        NULL,
476:                                 /*24*/ NULL,
477:                                        NULL,
478:                                        NULL,
479:                                        NULL,
480:                                        NULL,
481:                                 /*29*/ NULL,
482:                                        NULL,
483:                                        NULL,
484:                                        NULL,
485:                                        NULL,
486:                                 /*34*/ NULL,
487:                                        NULL,
488:                                        NULL,
489:                                        NULL,
490:                                        NULL,
491:                                 /*39*/ NULL,
492:                                        MatCreateSubMatrices_MPIAdj,
493:                                        NULL,
494:                                        NULL,
495:                                        NULL,
496:                                 /*44*/ NULL,
497:                                        NULL,
498:                                        MatShift_Basic,
499:                                        NULL,
500:                                        NULL,
501:                                 /*49*/ NULL,
502:                                        MatGetRowIJ_MPIAdj,
503:                                        MatRestoreRowIJ_MPIAdj,
504:                                        NULL,
505:                                        NULL,
506:                                 /*54*/ NULL,
507:                                        NULL,
508:                                        NULL,
509:                                        NULL,
510:                                        NULL,
511:                                 /*59*/ NULL,
512:                                        MatDestroy_MPIAdj,
513:                                        MatView_MPIAdj,
514:                                        MatConvertFrom_MPIAdj,
515:                                        NULL,
516:                                 /*64*/ NULL,
517:                                        NULL,
518:                                        NULL,
519:                                        NULL,
520:                                        NULL,
521:                                 /*69*/ NULL,
522:                                        NULL,
523:                                        NULL,
524:                                        NULL,
525:                                        NULL,
526:                                 /*74*/ NULL,
527:                                        NULL,
528:                                        NULL,
529:                                        NULL,
530:                                        NULL,
531:                                 /*79*/ NULL,
532:                                        NULL,
533:                                        NULL,
534:                                        NULL,
535:                                        NULL,
536:                                 /*84*/ NULL,
537:                                        NULL,
538:                                        NULL,
539:                                        NULL,
540:                                        NULL,
541:                                 /*89*/ NULL,
542:                                        NULL,
543:                                        NULL,
544:                                        NULL,
545:                                        NULL,
546:                                 /*94*/ NULL,
547:                                        NULL,
548:                                        NULL,
549:                                        NULL,
550:                                        NULL,
551:                                 /*99*/ NULL,
552:                                        NULL,
553:                                        NULL,
554:                                        NULL,
555:                                        NULL,
556:                                /*104*/ NULL,
557:                                        NULL,
558:                                        NULL,
559:                                        NULL,
560:                                        NULL,
561:                                /*109*/ NULL,
562:                                        NULL,
563:                                        NULL,
564:                                        NULL,
565:                                        NULL,
566:                                /*114*/ NULL,
567:                                        NULL,
568:                                        NULL,
569:                                        NULL,
570:                                        NULL,
571:                                /*119*/ NULL,
572:                                        NULL,
573:                                        NULL,
574:                                        NULL,
575:                                        NULL,
576:                                /*124*/ NULL,
577:                                        NULL,
578:                                        NULL,
579:                                        NULL,
580:                                        MatCreateSubMatricesMPI_MPIAdj,
581:                                /*129*/ NULL,
582:                                        NULL,
583:                                        NULL,
584:                                        NULL,
585:                                        NULL,
586:                                /*134*/ NULL,
587:                                        NULL,
588:                                        NULL,
589:                                        NULL,
590:                                        NULL,
591:                                /*139*/ NULL,
592:                                        NULL,
593:                                        NULL,
594:                                        NULL,
595:                                        NULL,
596:                                 /*144*/NULL,
597:                                        NULL,
598:                                        NULL,
599:                                        NULL
600: };

602: static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
603: {
604:   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
605:   PetscBool       useedgeweights;

607:   PetscLayoutSetUp(B->rmap);
608:   PetscLayoutSetUp(B->cmap);
609:   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
610:   /* Make everybody knows if they are using edge weights or not */
611:   MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B));

613:   if (PetscDefined(USE_DEBUG)) {
614:     PetscInt ii;

617:     for (ii=1; ii<B->rmap->n; ii++) {
619:     }
620:     for (ii=0; ii<i[B->rmap->n]; ii++) {
622:     }
623:   }
624:   B->preallocated = PETSC_TRUE;

626:   b->j      = j;
627:   b->i      = i;
628:   b->values = values;

630:   b->nz        = i[B->rmap->n];
631:   b->diag      = NULL;
632:   b->symmetric = PETSC_FALSE;
633:   b->freeaij   = PETSC_TRUE;

635:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
636:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
637:   return 0;
638: }

640: static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
641: {
642:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
643:   const PetscInt *ranges;
644:   MPI_Comm       acomm,bcomm;
645:   MPI_Group      agroup,bgroup;
646:   PetscMPIInt    i,rank,size,nranks,*ranks;

648:   *B    = NULL;
649:   PetscObjectGetComm((PetscObject)A,&acomm);
650:   MPI_Comm_size(acomm,&size);
651:   MPI_Comm_size(acomm,&rank);
652:   MatGetOwnershipRanges(A,&ranges);
653:   for (i=0,nranks=0; i<size; i++) {
654:     if (ranges[i+1] - ranges[i] > 0) nranks++;
655:   }
656:   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
657:     PetscObjectReference((PetscObject)A);
658:     *B   = A;
659:     return 0;
660:   }

662:   PetscMalloc1(nranks,&ranks);
663:   for (i=0,nranks=0; i<size; i++) {
664:     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
665:   }
666:   MPI_Comm_group(acomm,&agroup);
667:   MPI_Group_incl(agroup,nranks,ranks,&bgroup);
668:   PetscFree(ranks);
669:   MPI_Comm_create(acomm,bgroup,&bcomm);
670:   MPI_Group_free(&agroup);
671:   MPI_Group_free(&bgroup);
672:   if (bcomm != MPI_COMM_NULL) {
673:     PetscInt   m,N;
674:     Mat_MPIAdj *b;
675:     MatGetLocalSize(A,&m,NULL);
676:     MatGetSize(A,NULL,&N);
677:     MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);
678:     b          = (Mat_MPIAdj*)(*B)->data;
679:     b->freeaij = PETSC_FALSE;
680:     MPI_Comm_free(&bcomm);
681:   }
682:   return 0;
683: }

685: PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
686: {
687:   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
688:   PetscInt       *Values = NULL;
689:   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
690:   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;

692:   MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);
693:   MatGetSize(A,&M,&N);
694:   MatGetLocalSize(A,&m,NULL);
695:   nz   = adj->nz;
697:   MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));

699:   PetscMPIIntCast(nz,&mnz);
700:   PetscMalloc2(size,&allnz,size,&dispnz);
701:   MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A));
702:   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
703:   if (adj->values) {
704:     PetscMalloc1(NZ,&Values);
705:     MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));
706:   }
707:   PetscMalloc1(NZ,&J);
708:   MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));
709:   PetscFree2(allnz,dispnz);
710:   MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));
711:   nzstart -= nz;
712:   /* shift the i[] values so they will be correct after being received */
713:   for (i=0; i<m; i++) adj->i[i] += nzstart;
714:   PetscMalloc1(M+1,&II);
715:   PetscMPIIntCast(m,&mm);
716:   PetscMalloc2(size,&allm,size,&dispm);
717:   MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A));
718:   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
719:   MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A));
720:   PetscFree2(allm,dispm);
721:   II[M] = NZ;
722:   /* shift the i[] values back */
723:   for (i=0; i<m; i++) adj->i[i] -= nzstart;
724:   MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B);
725:   return 0;
726: }

728: /*@
729:    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows

731:    Collective

733:    Input Parameter:
734: .  A - original MPIAdj matrix

736:    Output Parameter:
737: .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A

739:    Level: developer

741:    Note:
742:    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.

744:    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.

746: .seealso: MatCreateMPIAdj()
747: @*/
748: PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
749: {
751:   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
752:   return 0;
753: }

755: /*MC
756:    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
757:    intended for use constructing orderings and partitionings.

759:   Level: beginner

761: .seealso: MatCreateMPIAdj
762: M*/

764: PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
765: {
766:   Mat_MPIAdj     *b;

768:   PetscNewLog(B,&b);
769:   B->data      = (void*)b;
770:   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
771:   B->assembled = PETSC_FALSE;

773:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);
774:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);
775:   PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj);
776:   PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);
777:   return 0;
778: }

780: /*@C
781:    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)

783:    Logically Collective

785:    Input Parameter:
786: .  A - the matrix

788:    Output Parameter:
789: .  B - the same matrix on all processes

791:    Level: intermediate

793: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
794: @*/
795: PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
796: {
797:   PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));
798:   return 0;
799: }

801: /*@C
802:    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements

804:    Logically Collective

806:    Input Parameters:
807: +  A - the matrix
808: .  i - the indices into j for the start of each row
809: .  j - the column indices for each row (sorted for each row).
810:        The indices in i and j start with zero (NOT with one).
811: -  values - [optional] edge weights

813:    Level: intermediate

815: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
816: @*/
817: PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
818: {
819:   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
820:   return 0;
821: }

823: /*@C
824:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
825:    The matrix does not have numerical values associated with it, but is
826:    intended for ordering (to reduce bandwidth etc) and partitioning.

828:    Collective

830:    Input Parameters:
831: +  comm - MPI communicator
832: .  m - number of local rows
833: .  N - number of global columns
834: .  i - the indices into j for the start of each row
835: .  j - the column indices for each row (sorted for each row).
836:        The indices in i and j start with zero (NOT with one).
837: -  values -[optional] edge weights

839:    Output Parameter:
840: .  A - the matrix

842:    Level: intermediate

844:    Notes:
845:     This matrix object does not support most matrix operations, include
846:    MatSetValues().
847:    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
848:    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
849:     call from Fortran you need not create the arrays with PetscMalloc().
850:    Should not include the matrix diagonals.

852:    If you already have a matrix, you can create its adjacency matrix by a call
853:    to MatConvert, specifying a type of MATMPIADJ.

855:    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC

857: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
858: @*/
859: PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
860: {
861:   MatCreate(comm,A);
862:   MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
863:   MatSetType(*A,MATMPIADJ);
864:   MatMPIAdjSetPreallocation(*A,i,j,values);
865:   return 0;
866: }