Actual source code: mpiadj.c

petsc-3.3-p7 2013-05-11
  2: /*
  3:     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure. 
  4: */
  5: #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/

  9: PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
 10: {
 11:   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
 12:   PetscErrorCode    ierr;
 13:   PetscInt          i,j,m = A->rmap->n;
 14:   const char        *name;
 15:   PetscViewerFormat format;

 18:   PetscObjectGetName((PetscObject)A,&name);
 19:   PetscViewerGetFormat(viewer,&format);
 20:   if (format == PETSC_VIEWER_ASCII_INFO) {
 21:     return(0);
 22:   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
 23:     SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"MATLAB format not supported");
 24:   } else {
 25:     PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
 26:     PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
 27:     for (i=0; i<m; i++) {
 28:       PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);
 29:       for (j=a->i[i]; j<a->i[i+1]; j++) {
 30:         PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
 31:       }
 32:       PetscViewerASCIISynchronizedPrintf(viewer,"\n");
 33:     }
 34:     PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
 35:     PetscViewerFlush(viewer);
 36:     PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
 37:   }
 38:   return(0);
 39: }

 43: PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
 44: {
 46:   PetscBool      iascii;

 49:   PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
 50:   if (iascii) {
 51:     MatView_MPIAdj_ASCII(A,viewer);
 52:   } else {
 53:     SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
 54:   }
 55:   return(0);
 56: }

 60: PetscErrorCode MatDestroy_MPIAdj(Mat mat)
 61: {
 62:   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;

 66: #if defined(PETSC_USE_LOG)
 67:   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
 68: #endif
 69:   PetscFree(a->diag);
 70:   if (a->freeaij) {
 71:     if (a->freeaijwithfree) {
 72:       if (a->i) free(a->i);
 73:       if (a->j) free(a->j);
 74:     } else {
 75:       PetscFree(a->i);
 76:       PetscFree(a->j);
 77:       PetscFree(a->values);
 78:     }
 79:   }
 80:   PetscFree(mat->data);
 81:   PetscObjectChangeTypeName((PetscObject)mat,0);
 82:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);
 83:   PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C","",PETSC_NULL);
 84:   return(0);
 85: }

 89: PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool  flg)
 90: {
 91:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;

 95:   switch (op) {
 96:   case MAT_SYMMETRIC:
 97:   case MAT_STRUCTURALLY_SYMMETRIC:
 98:   case MAT_HERMITIAN:
 99:     a->symmetric = flg;
100:     break;
101:   case MAT_SYMMETRY_ETERNAL:
102:     break;
103:   default:
104:     PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
105:     break;
106:   }
107:   return(0);
108: }


111: /*
112:      Adds diagonal pointers to sparse matrix structure.
113: */

117: PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118: {
119:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
121:   PetscInt       i,j,m = A->rmap->n;

124:   PetscMalloc(m*sizeof(PetscInt),&a->diag);
125:   PetscLogObjectMemory(A,m*sizeof(PetscInt));
126:   for (i=0; i<A->rmap->n; i++) {
127:     for (j=a->i[i]; j<a->i[i+1]; j++) {
128:       if (a->j[j] == i) {
129:         a->diag[i] = j;
130:         break;
131:       }
132:     }
133:   }
134:   return(0);
135: }

139: PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140: {
141:   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142:   PetscInt   *itmp;

145:   row -= A->rmap->rstart;

147:   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");

149:   *nz = a->i[row+1] - a->i[row];
150:   if (v) *v = PETSC_NULL;
151:   if (idx) {
152:     itmp = a->j + a->i[row];
153:     if (*nz) {
154:       *idx = itmp;
155:     }
156:     else *idx = 0;
157:   }
158:   return(0);
159: }

163: PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
164: {
166:   return(0);
167: }

171: PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
172: {
173:   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
175:   PetscBool      flag;

178:   /* If the  matrix dimensions are not equal,or no of nonzeros */
179:   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
180:     flag = PETSC_FALSE;
181:   }
182: 
183:   /* if the a->i are the same */
184:   PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);
185: 
186:   /* if a->j are the same */
187:   PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);

189:   MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);
190:   return(0);
191: }

195: PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscBool  *done)
196: {
197:   PetscInt       i;
198:   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;

201:   *m    = A->rmap->n;
202:   *ia   = a->i;
203:   *ja   = a->j;
204:   *done = PETSC_TRUE;
205:   if (oshift) {
206:     for (i=0; i<(*ia)[*m]; i++) {
207:       (*ja)[i]++;
208:     }
209:     for (i=0; i<=(*m); i++) (*ia)[i]++;
210:   }
211:   return(0);
212: }

216: PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool  symmetric,PetscBool  blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscBool  *done)
217: {
218:   PetscInt   i;
219:   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;

222:   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
223:   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
224:   if (oshift) {
225:     for (i=0; i<=(*m); i++) (*ia)[i]--;
226:     for (i=0; i<(*ia)[*m]; i++) {
227:       (*ja)[i]--;
228:     }
229:   }
230:   return(0);
231: }

235: PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
236: {
237:   Mat               B;
238:   PetscErrorCode    ierr;
239:   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
240:   const PetscInt    *rj;
241:   const PetscScalar *ra;
242:   MPI_Comm          comm;

245:   MatGetSize(A,PETSC_NULL,&N);
246:   MatGetLocalSize(A,&m,PETSC_NULL);
247:   MatGetOwnershipRange(A,&rstart,PETSC_NULL);
248: 
249:   /* count the number of nonzeros per row */
250:   for (i=0; i<m; i++) {
251:     MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
252:     for (j=0; j<len; j++) {
253:       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
254:     }
255:     MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
256:     nzeros += len;
257:   }

259:   /* malloc space for nonzeros */
260:   PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);
261:   PetscMalloc((N+1)*sizeof(PetscInt),&ia);
262:   PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);

264:   nzeros = 0;
265:   ia[0]  = 0;
266:   for (i=0; i<m; i++) {
267:     MatGetRow(A,i+rstart,&len,&rj,&ra);
268:     cnt     = 0;
269:     for (j=0; j<len; j++) {
270:       if (rj[j] != i+rstart) { /* if not diagonal */
271:         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
272:         ja[nzeros+cnt++] = rj[j];
273:       }
274:     }
275:     MatRestoreRow(A,i+rstart,&len,&rj,&ra);
276:     nzeros += cnt;
277:     ia[i+1] = nzeros;
278:   }

280:   PetscObjectGetComm((PetscObject)A,&comm);
281:   MatCreate(comm,&B);
282:   MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
283:   MatSetType(B,type);
284:   MatMPIAdjSetPreallocation(B,ia,ja,a);

286:   if (reuse == MAT_REUSE_MATRIX) {
287:     MatHeaderReplace(A,B);
288:   } else {
289:     *newmat = B;
290:   }
291:   return(0);
292: }

294: /* -------------------------------------------------------------------*/
295: static struct _MatOps MatOps_Values = {0,
296:        MatGetRow_MPIAdj,
297:        MatRestoreRow_MPIAdj,
298:        0,
299: /* 4*/ 0,
300:        0,
301:        0,
302:        0,
303:        0,
304:        0,
305: /*10*/ 0,
306:        0,
307:        0,
308:        0,
309:        0,
310: /*15*/ 0,
311:        MatEqual_MPIAdj,
312:        0,
313:        0,
314:        0,
315: /*20*/ 0,
316:        0,
317:        MatSetOption_MPIAdj,
318:        0,
319: /*24*/ 0,
320:        0,
321:        0,
322:        0,
323:        0,
324: /*29*/ 0,
325:        0,
326:        0,
327:        0,
328:        0,
329: /*34*/ 0,
330:        0,
331:        0,
332:        0,
333:        0,
334: /*39*/ 0,
335:        0,
336:        0,
337:        0,
338:        0,
339: /*44*/ 0,
340:        0,
341:        0,
342:        0,
343:        0,
344: /*49*/ 0,
345:        MatGetRowIJ_MPIAdj,
346:        MatRestoreRowIJ_MPIAdj,
347:        0,
348:        0,
349: /*54*/ 0,
350:        0,
351:        0,
352:        0,
353:        0,
354: /*59*/ 0,
355:        MatDestroy_MPIAdj,
356:        MatView_MPIAdj,
357:        MatConvertFrom_MPIAdj,
358:        0,
359: /*64*/ 0,
360:        0,
361:        0,
362:        0,
363:        0,
364: /*69*/ 0,
365:        0,
366:        0,
367:        0,
368:        0,
369: /*74*/ 0,
370:        0,
371:        0,
372:        0,
373:        0,
374: /*79*/ 0,
375:        0,
376:        0,
377:        0,
378:        0,
379: /*84*/ 0,
380:        0,
381:        0,
382:        0,
383:        0,
384: /*89*/ 0,
385:        0,
386:        0,
387:        0,
388:        0,
389: /*94*/ 0,
390:        0,
391:        0,
392:        0};

394: EXTERN_C_BEGIN
397: PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
398: {
399:   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
401: #if defined(PETSC_USE_DEBUG)
402:   PetscInt       ii;
403: #endif

406:   PetscLayoutSetUp(B->rmap);
407:   PetscLayoutSetUp(B->cmap);

409: #if defined(PETSC_USE_DEBUG)
410:   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
411:   for (ii=1; ii<B->rmap->n; ii++) {
412:     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
413:   }
414:   for (ii=0; ii<i[B->rmap->n]; ii++) {
415:     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
416:   }
417: #endif
418:   B->preallocated = PETSC_TRUE;

420:   b->j      = j;
421:   b->i      = i;
422:   b->values = values;

424:   b->nz               = i[B->rmap->n];
425:   b->diag             = 0;
426:   b->symmetric        = PETSC_FALSE;
427:   b->freeaij          = PETSC_TRUE;

429:   MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
430:   MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
431:   return(0);
432: }
433: EXTERN_C_END

438: {
439:   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
441:   const PetscInt *ranges;
442:   MPI_Comm       acomm,bcomm;
443:   MPI_Group      agroup,bgroup;
444:   PetscMPIInt    i,rank,size,nranks,*ranks;

447:   *B = PETSC_NULL;
448:   acomm = ((PetscObject)A)->comm;
449:   MPI_Comm_size(acomm,&size);
450:   MPI_Comm_size(acomm,&rank);
451:   MatGetOwnershipRanges(A,&ranges);
452:   for (i=0,nranks=0; i<size; i++) {
453:     if (ranges[i+1] - ranges[i] > 0) nranks++;
454:   }
455:   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
456:     PetscObjectReference((PetscObject)A);
457:     *B = A;
458:     return(0);
459:   }

461:   PetscMalloc(nranks*sizeof(PetscMPIInt),&ranks);
462:   for (i=0,nranks=0; i<size; i++) {
463:     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
464:   }
465:   MPI_Comm_group(acomm,&agroup);
466:   MPI_Group_incl(agroup,nranks,ranks,&bgroup);
467:   PetscFree(ranks);
468:   MPI_Comm_create(acomm,bgroup,&bcomm);
469:   MPI_Group_free(&agroup);
470:   MPI_Group_free(&bgroup);
471:   if (bcomm != MPI_COMM_NULL) {
472:     PetscInt   m,N;
473:     Mat_MPIAdj *b;
474:     MatGetLocalSize(A,&m,PETSC_NULL);
475:     MatGetSize(A,PETSC_NULL,&N);
476:     MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);
477:     b = (Mat_MPIAdj*)(*B)->data;
478:     b->freeaij = PETSC_FALSE;
479:     MPI_Comm_free(&bcomm);
480:   }
481:   return(0);
482: }

486: /*@
487:    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows

489:    Collective

491:    Input Arguments:
492: .  A - original MPIAdj matrix

494:    Output Arguments:
495: .  B - matrix on subcommunicator, PETSC_NULL on ranks that owned zero rows of A

497:    Level: developer

499:    Note:
500:    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.

502:    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.

504: .seealso: MatCreateMPIAdj()
505: @*/
506: PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
507: {

512:   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
513:   return(0);
514: }

516: /*MC
517:    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
518:    intended for use constructing orderings and partitionings.

520:   Level: beginner

522: .seealso: MatCreateMPIAdj
523: M*/

525: EXTERN_C_BEGIN
528: PetscErrorCode  MatCreate_MPIAdj(Mat B)
529: {
530:   Mat_MPIAdj     *b;

534:   PetscNewLog(B,Mat_MPIAdj,&b);
535:   B->data             = (void*)b;
536:   PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
537:   B->assembled        = PETSC_FALSE;
538: 
539:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
540:                                     "MatMPIAdjSetPreallocation_MPIAdj",
541:                                      MatMPIAdjSetPreallocation_MPIAdj);
542:   PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",
543:                                     "MatMPIAdjCreateNonemptySubcommMat_MPIAdj",
544:                                      MatMPIAdjCreateNonemptySubcommMat_MPIAdj);
545:   PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);
546:   return(0);
547: }
548: EXTERN_C_END

552: /*@C
553:    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements

555:    Logically Collective on MPI_Comm

557:    Input Parameters:
558: +  A - the matrix
559: .  i - the indices into j for the start of each row
560: .  j - the column indices for each row (sorted for each row).
561:        The indices in i and j start with zero (NOT with one).
562: -  values - [optional] edge weights

564:    Level: intermediate

566: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
567: @*/
568: PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
569: {

573:   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
574:   return(0);
575: }

579: /*@C
580:    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
581:    The matrix does not have numerical values associated with it, but is
582:    intended for ordering (to reduce bandwidth etc) and partitioning.

584:    Collective on MPI_Comm

586:    Input Parameters:
587: +  comm - MPI communicator
588: .  m - number of local rows
589: .  N - number of global columns
590: .  i - the indices into j for the start of each row
591: .  j - the column indices for each row (sorted for each row).
592:        The indices in i and j start with zero (NOT with one).
593: -  values -[optional] edge weights

595:    Output Parameter:
596: .  A - the matrix 

598:    Level: intermediate

600:    Notes: This matrix object does not support most matrix operations, include
601:    MatSetValues().
602:    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
603:    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you 
604:     call from Fortran you need not create the arrays with PetscMalloc().
605:    Should not include the matrix diagonals.

607:    If you already have a matrix, you can create its adjacency matrix by a call
608:    to MatConvert, specifying a type of MATMPIADJ.

610:    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC

612: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
613: @*/
614: PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
615: {

619:   MatCreate(comm,A);
620:   MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
621:   MatSetType(*A,MATMPIADJ);
622:   MatMPIAdjSetPreallocation(*A,i,j,values);
623:   return(0);
624: }