Actual source code: mpiadj.c
petsc-3.6.1 2015-08-06
2: /*
3: Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4: */
5: #include <../src/mat/impls/adj/mpi/mpiadj.h> /*I "petscmat.h" I*/
9: PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10: {
11: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
12: PetscErrorCode ierr;
13: PetscInt i,j,m = A->rmap->n;
14: const char *name;
15: PetscViewerFormat format;
18: PetscObjectGetName((PetscObject)A,&name);
19: PetscViewerGetFormat(viewer,&format);
20: if (format == PETSC_VIEWER_ASCII_INFO) {
21: return(0);
22: } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23: SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
24: } else {
25: PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);
26: PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);
27: for (i=0; i<m; i++) {
28: PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);
29: for (j=a->i[i]; j<a->i[i+1]; j++) {
30: PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
31: }
32: PetscViewerASCIISynchronizedPrintf(viewer,"\n");
33: }
34: PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);
35: PetscViewerFlush(viewer);
36: PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);
37: }
38: return(0);
39: }
43: PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44: {
46: PetscBool iascii;
49: PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);
50: if (iascii) {
51: MatView_MPIAdj_ASCII(A,viewer);
52: }
53: return(0);
54: }
58: PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59: {
60: Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
64: #if defined(PETSC_USE_LOG)
65: PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
66: #endif
67: PetscFree(a->diag);
68: if (a->freeaij) {
69: if (a->freeaijwithfree) {
70: if (a->i) free(a->i);
71: if (a->j) free(a->j);
72: } else {
73: PetscFree(a->i);
74: PetscFree(a->j);
75: PetscFree(a->values);
76: }
77: }
78: PetscFree(a->rowvalues);
79: PetscFree(mat->data);
80: PetscObjectChangeTypeName((PetscObject)mat,0);
81: PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);
82: PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);
83: return(0);
84: }
88: PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
89: {
90: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
94: switch (op) {
95: case MAT_SYMMETRIC:
96: case MAT_STRUCTURALLY_SYMMETRIC:
97: case MAT_HERMITIAN:
98: a->symmetric = flg;
99: break;
100: case MAT_SYMMETRY_ETERNAL:
101: break;
102: default:
103: PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
104: break;
105: }
106: return(0);
107: }
110: /*
111: Adds diagonal pointers to sparse matrix structure.
112: */
116: PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
117: {
118: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
120: PetscInt i,j,m = A->rmap->n;
123: PetscMalloc1(m,&a->diag);
124: PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));
125: for (i=0; i<A->rmap->n; i++) {
126: for (j=a->i[i]; j<a->i[i+1]; j++) {
127: if (a->j[j] == i) {
128: a->diag[i] = j;
129: break;
130: }
131: }
132: }
133: return(0);
134: }
138: PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
139: {
140: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
144: row -= A->rmap->rstart;
146: if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148: *nz = a->i[row+1] - a->i[row];
149: if (v) {
150: PetscInt j;
151: if (a->rowvalues_alloc < *nz) {
152: PetscFree(a->rowvalues);
153: a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
154: PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);
155: }
156: for (j=0; j<*nz; j++) a->rowvalues[j] = a->values[a->i[row]+j];
157: *v = (*nz) ? a->rowvalues : NULL;
158: }
159: if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
160: return(0);
161: }
165: PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
166: {
168: return(0);
169: }
173: PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
174: {
175: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
177: PetscBool flag;
180: /* If the matrix dimensions are not equal,or no of nonzeros */
181: if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
182: flag = PETSC_FALSE;
183: }
185: /* if the a->i are the same */
186: PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);
188: /* if a->j are the same */
189: PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);
191: MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));
192: return(0);
193: }
197: PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool *done)
198: {
199: PetscInt i;
200: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
201: PetscInt **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
204: *m = A->rmap->n;
205: *ia = a->i;
206: *ja = a->j;
207: *done = PETSC_TRUE;
208: if (oshift) {
209: for (i=0; i<(*ia)[*m]; i++) {
210: (*ja)[i]++;
211: }
212: for (i=0; i<=(*m); i++) (*ia)[i]++;
213: }
214: return(0);
215: }
219: PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool *done)
220: {
221: PetscInt i;
222: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
223: PetscInt **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
226: if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
227: if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
228: if (oshift) {
229: for (i=0; i<=(*m); i++) (*ia)[i]--;
230: for (i=0; i<(*ia)[*m]; i++) {
231: (*ja)[i]--;
232: }
233: }
234: return(0);
235: }
239: PetscErrorCode MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
240: {
241: Mat B;
242: PetscErrorCode ierr;
243: PetscInt i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
244: const PetscInt *rj;
245: const PetscScalar *ra;
246: MPI_Comm comm;
249: MatGetSize(A,NULL,&N);
250: MatGetLocalSize(A,&m,NULL);
251: MatGetOwnershipRange(A,&rstart,NULL);
253: /* count the number of nonzeros per row */
254: for (i=0; i<m; i++) {
255: MatGetRow(A,i+rstart,&len,&rj,NULL);
256: for (j=0; j<len; j++) {
257: if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */
258: }
259: nzeros += len;
260: MatRestoreRow(A,i+rstart,&len,&rj,NULL);
261: }
263: /* malloc space for nonzeros */
264: PetscMalloc1(nzeros+1,&a);
265: PetscMalloc1(N+1,&ia);
266: PetscMalloc1(nzeros+1,&ja);
268: nzeros = 0;
269: ia[0] = 0;
270: for (i=0; i<m; i++) {
271: MatGetRow(A,i+rstart,&len,&rj,&ra);
272: cnt = 0;
273: for (j=0; j<len; j++) {
274: if (rj[j] != i+rstart) { /* if not diagonal */
275: a[nzeros+cnt] = (PetscInt) PetscAbsScalar(ra[j]);
276: ja[nzeros+cnt++] = rj[j];
277: }
278: }
279: MatRestoreRow(A,i+rstart,&len,&rj,&ra);
280: nzeros += cnt;
281: ia[i+1] = nzeros;
282: }
284: PetscObjectGetComm((PetscObject)A,&comm);
285: MatCreate(comm,&B);
286: MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
287: MatSetType(B,type);
288: MatMPIAdjSetPreallocation(B,ia,ja,a);
290: if (reuse == MAT_REUSE_MATRIX) {
291: MatHeaderReplace(A,B);
292: } else {
293: *newmat = B;
294: }
295: return(0);
296: }
298: /* -------------------------------------------------------------------*/
299: static struct _MatOps MatOps_Values = {0,
300: MatGetRow_MPIAdj,
301: MatRestoreRow_MPIAdj,
302: 0,
303: /* 4*/ 0,
304: 0,
305: 0,
306: 0,
307: 0,
308: 0,
309: /*10*/ 0,
310: 0,
311: 0,
312: 0,
313: 0,
314: /*15*/ 0,
315: MatEqual_MPIAdj,
316: 0,
317: 0,
318: 0,
319: /*20*/ 0,
320: 0,
321: MatSetOption_MPIAdj,
322: 0,
323: /*24*/ 0,
324: 0,
325: 0,
326: 0,
327: 0,
328: /*29*/ 0,
329: 0,
330: 0,
331: 0,
332: 0,
333: /*34*/ 0,
334: 0,
335: 0,
336: 0,
337: 0,
338: /*39*/ 0,
339: 0,
340: 0,
341: 0,
342: 0,
343: /*44*/ 0,
344: 0,
345: MatShift_Basic,
346: 0,
347: 0,
348: /*49*/ 0,
349: MatGetRowIJ_MPIAdj,
350: MatRestoreRowIJ_MPIAdj,
351: 0,
352: 0,
353: /*54*/ 0,
354: 0,
355: 0,
356: 0,
357: 0,
358: /*59*/ 0,
359: MatDestroy_MPIAdj,
360: MatView_MPIAdj,
361: MatConvertFrom_MPIAdj,
362: 0,
363: /*64*/ 0,
364: 0,
365: 0,
366: 0,
367: 0,
368: /*69*/ 0,
369: 0,
370: 0,
371: 0,
372: 0,
373: /*74*/ 0,
374: 0,
375: 0,
376: 0,
377: 0,
378: /*79*/ 0,
379: 0,
380: 0,
381: 0,
382: 0,
383: /*84*/ 0,
384: 0,
385: 0,
386: 0,
387: 0,
388: /*89*/ 0,
389: 0,
390: 0,
391: 0,
392: 0,
393: /*94*/ 0,
394: 0,
395: 0,
396: 0,
397: 0,
398: /*99*/ 0,
399: 0,
400: 0,
401: 0,
402: 0,
403: /*104*/ 0,
404: 0,
405: 0,
406: 0,
407: 0,
408: /*109*/ 0,
409: 0,
410: 0,
411: 0,
412: 0,
413: /*114*/ 0,
414: 0,
415: 0,
416: 0,
417: 0,
418: /*119*/ 0,
419: 0,
420: 0,
421: 0,
422: 0,
423: /*124*/ 0,
424: 0,
425: 0,
426: 0,
427: 0,
428: /*129*/ 0,
429: 0,
430: 0,
431: 0,
432: 0,
433: /*134*/ 0,
434: 0,
435: 0,
436: 0,
437: 0,
438: /*139*/ 0,
439: 0,
440: 0
441: };
445: static PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
446: {
447: Mat_MPIAdj *b = (Mat_MPIAdj*)B->data;
449: #if defined(PETSC_USE_DEBUG)
450: PetscInt ii;
451: #endif
454: PetscLayoutSetUp(B->rmap);
455: PetscLayoutSetUp(B->cmap);
457: #if defined(PETSC_USE_DEBUG)
458: if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
459: for (ii=1; ii<B->rmap->n; ii++) {
460: if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
461: }
462: for (ii=0; ii<i[B->rmap->n]; ii++) {
463: if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
464: }
465: #endif
466: B->preallocated = PETSC_TRUE;
468: b->j = j;
469: b->i = i;
470: b->values = values;
472: b->nz = i[B->rmap->n];
473: b->diag = 0;
474: b->symmetric = PETSC_FALSE;
475: b->freeaij = PETSC_TRUE;
477: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
478: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
479: return(0);
480: }
484: static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
485: {
486: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
488: const PetscInt *ranges;
489: MPI_Comm acomm,bcomm;
490: MPI_Group agroup,bgroup;
491: PetscMPIInt i,rank,size,nranks,*ranks;
494: *B = NULL;
495: PetscObjectGetComm((PetscObject)A,&acomm);
496: MPI_Comm_size(acomm,&size);
497: MPI_Comm_size(acomm,&rank);
498: MatGetOwnershipRanges(A,&ranges);
499: for (i=0,nranks=0; i<size; i++) {
500: if (ranges[i+1] - ranges[i] > 0) nranks++;
501: }
502: if (nranks == size) { /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
503: PetscObjectReference((PetscObject)A);
504: *B = A;
505: return(0);
506: }
508: PetscMalloc1(nranks,&ranks);
509: for (i=0,nranks=0; i<size; i++) {
510: if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
511: }
512: MPI_Comm_group(acomm,&agroup);
513: MPI_Group_incl(agroup,nranks,ranks,&bgroup);
514: PetscFree(ranks);
515: MPI_Comm_create(acomm,bgroup,&bcomm);
516: MPI_Group_free(&agroup);
517: MPI_Group_free(&bgroup);
518: if (bcomm != MPI_COMM_NULL) {
519: PetscInt m,N;
520: Mat_MPIAdj *b;
521: MatGetLocalSize(A,&m,NULL);
522: MatGetSize(A,NULL,&N);
523: MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);
524: b = (Mat_MPIAdj*)(*B)->data;
525: b->freeaij = PETSC_FALSE;
526: MPI_Comm_free(&bcomm);
527: }
528: return(0);
529: }
533: /*@
534: MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
536: Collective
538: Input Arguments:
539: . A - original MPIAdj matrix
541: Output Arguments:
542: . B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
544: Level: developer
546: Note:
547: This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
549: The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
551: .seealso: MatCreateMPIAdj()
552: @*/
553: PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
554: {
559: PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
560: return(0);
561: }
563: /*MC
564: MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
565: intended for use constructing orderings and partitionings.
567: Level: beginner
569: .seealso: MatCreateMPIAdj
570: M*/
574: PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
575: {
576: Mat_MPIAdj *b;
580: PetscNewLog(B,&b);
581: B->data = (void*)b;
582: PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
583: B->assembled = PETSC_FALSE;
585: PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);
586: PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);
587: PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);
588: return(0);
589: }
593: /*@C
594: MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
596: Logically Collective on MPI_Comm
598: Input Parameters:
599: + A - the matrix
600: . i - the indices into j for the start of each row
601: . j - the column indices for each row (sorted for each row).
602: The indices in i and j start with zero (NOT with one).
603: - values - [optional] edge weights
605: Level: intermediate
607: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
608: @*/
609: PetscErrorCode MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
610: {
614: PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
615: return(0);
616: }
620: /*@C
621: MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
622: The matrix does not have numerical values associated with it, but is
623: intended for ordering (to reduce bandwidth etc) and partitioning.
625: Collective on MPI_Comm
627: Input Parameters:
628: + comm - MPI communicator
629: . m - number of local rows
630: . N - number of global columns
631: . i - the indices into j for the start of each row
632: . j - the column indices for each row (sorted for each row).
633: The indices in i and j start with zero (NOT with one).
634: - values -[optional] edge weights
636: Output Parameter:
637: . A - the matrix
639: Level: intermediate
641: Notes: This matrix object does not support most matrix operations, include
642: MatSetValues().
643: You must NOT free the ii, values and jj arrays yourself. PETSc will free them
644: when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
645: call from Fortran you need not create the arrays with PetscMalloc().
646: Should not include the matrix diagonals.
648: If you already have a matrix, you can create its adjacency matrix by a call
649: to MatConvert, specifying a type of MATMPIADJ.
651: Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
653: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
654: @*/
655: PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
656: {
660: MatCreate(comm,A);
661: MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
662: MatSetType(*A,MATMPIADJ);
663: MatMPIAdjSetPreallocation(*A,i,j,values);
664: return(0);
665: }