Actual source code: mpiadj.c
1: #define PETSCMAT_DLL
3: /*
4: Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5: */
6: #include ../src/mat/impls/adj/mpi/mpiadj.h
10: PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
11: {
12: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
13: PetscErrorCode ierr;
14: PetscInt i,j,m = A->rmap->n;
15: const char *name;
16: PetscViewerFormat format;
19: PetscObjectGetName((PetscObject)A,&name);
20: PetscViewerGetFormat(viewer,&format);
21: if (format == PETSC_VIEWER_ASCII_INFO) {
22: return(0);
23: } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24: SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
25: } else {
26: PetscViewerASCIIUseTabs(viewer,PETSC_NO);
27: for (i=0; i<m; i++) {
28: PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);
29: for (j=a->i[i]; j<a->i[i+1]; j++) {
30: PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);
31: }
32: PetscViewerASCIISynchronizedPrintf(viewer,"\n");
33: }
34: PetscViewerASCIIUseTabs(viewer,PETSC_YES);
35: }
36: PetscViewerFlush(viewer);
37: return(0);
38: }
42: PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
43: {
45: PetscTruth iascii;
48: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
49: if (iascii) {
50: MatView_MPIAdj_ASCII(A,viewer);
51: } else {
52: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53: }
54: return(0);
55: }
59: PetscErrorCode MatDestroy_MPIAdj(Mat mat)
60: {
61: Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
65: #if defined(PETSC_USE_LOG)
66: PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
67: #endif
68: PetscFree(a->diag);
69: if (a->freeaij) {
70: if (a->freeaijwithfree) {
71: if (a->i) free(a->i);
72: if (a->j) free(a->j);
73: } else {
74: PetscFree(a->i);
75: PetscFree(a->j);
76: PetscFree(a->values);
77: }
78: }
79: PetscFree(a);
80: PetscObjectChangeTypeName((PetscObject)mat,0);
81: PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);
82: return(0);
83: }
87: PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscTruth flg)
88: {
89: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
93: switch (op) {
94: case MAT_SYMMETRIC:
95: case MAT_STRUCTURALLY_SYMMETRIC:
96: case MAT_HERMITIAN:
97: a->symmetric = flg;
98: break;
99: case MAT_SYMMETRY_ETERNAL:
100: break;
101: default:
102: PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);
103: break;
104: }
105: return(0);
106: }
109: /*
110: Adds diagonal pointers to sparse matrix structure.
111: */
115: PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
116: {
117: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
119: PetscInt i,j,m = A->rmap->n;
122: PetscMalloc(m*sizeof(PetscInt),&a->diag);
123: PetscLogObjectMemory(A,m*sizeof(PetscInt));
124: for (i=0; i<A->rmap->n; i++) {
125: for (j=a->i[i]; j<a->i[i+1]; j++) {
126: if (a->j[j] == i) {
127: a->diag[i] = j;
128: break;
129: }
130: }
131: }
132: return(0);
133: }
137: PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
138: {
139: Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
140: PetscInt *itmp;
143: row -= A->rmap->rstart;
145: if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
147: *nz = a->i[row+1] - a->i[row];
148: if (v) *v = PETSC_NULL;
149: if (idx) {
150: itmp = a->j + a->i[row];
151: if (*nz) {
152: *idx = itmp;
153: }
154: else *idx = 0;
155: }
156: return(0);
157: }
161: PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
162: {
164: return(0);
165: }
169: PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
170: {
171: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
173: PetscTruth flag;
176: /* If the matrix dimensions are not equal,or no of nonzeros */
177: if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
178: flag = PETSC_FALSE;
179: }
180:
181: /* if the a->i are the same */
182: PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);
183:
184: /* if a->j are the same */
185: PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);
187: MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);
188: return(0);
189: }
193: PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
194: {
195: PetscInt i;
196: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
199: *m = A->rmap->n;
200: *ia = a->i;
201: *ja = a->j;
202: *done = PETSC_TRUE;
203: if (oshift) {
204: for (i=0; i<(*ia)[*m]; i++) {
205: (*ja)[i]++;
206: }
207: for (i=0; i<=(*m); i++) (*ia)[i]++;
208: }
209: return(0);
210: }
214: PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
215: {
216: PetscInt i;
217: Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
220: if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
221: if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
222: if (oshift) {
223: for (i=0; i<=(*m); i++) (*ia)[i]--;
224: for (i=0; i<(*ia)[*m]; i++) {
225: (*ja)[i]--;
226: }
227: }
228: return(0);
229: }
233: PetscErrorCode MatConvertFrom_MPIAdj(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
234: {
235: Mat B;
236: PetscErrorCode ierr;
237: PetscInt i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
238: const PetscInt *rj;
239: const PetscScalar *ra;
240: MPI_Comm comm;
243: MatGetSize(A,PETSC_NULL,&N);
244: MatGetLocalSize(A,&m,PETSC_NULL);
245: MatGetOwnershipRange(A,&rstart,PETSC_NULL);
246:
247: /* count the number of nonzeros per row */
248: for (i=0; i<m; i++) {
249: MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);
250: for (j=0; j<len; j++) {
251: if (rj[j] == i+rstart) {len--; break;} /* don't count diagonal */
252: }
253: MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);
254: nzeros += len;
255: }
257: /* malloc space for nonzeros */
258: PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);
259: PetscMalloc((N+1)*sizeof(PetscInt),&ia);
260: PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);
262: nzeros = 0;
263: ia[0] = 0;
264: for (i=0; i<m; i++) {
265: MatGetRow(A,i+rstart,&len,&rj,&ra);
266: cnt = 0;
267: for (j=0; j<len; j++) {
268: if (rj[j] != i+rstart) { /* if not diagonal */
269: a[nzeros+cnt] = (PetscInt) PetscAbsScalar(ra[j]);
270: ja[nzeros+cnt++] = rj[j];
271: }
272: }
273: MatRestoreRow(A,i+rstart,&len,&rj,&ra);
274: nzeros += cnt;
275: ia[i+1] = nzeros;
276: }
278: PetscObjectGetComm((PetscObject)A,&comm);
279: MatCreate(comm,&B);
280: MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
281: MatSetType(B,type);
282: MatMPIAdjSetPreallocation(B,ia,ja,a);
284: if (reuse == MAT_REUSE_MATRIX) {
285: MatHeaderReplace(A,B);
286: } else {
287: *newmat = B;
288: }
289: return(0);
290: }
292: /* -------------------------------------------------------------------*/
293: static struct _MatOps MatOps_Values = {0,
294: MatGetRow_MPIAdj,
295: MatRestoreRow_MPIAdj,
296: 0,
297: /* 4*/ 0,
298: 0,
299: 0,
300: 0,
301: 0,
302: 0,
303: /*10*/ 0,
304: 0,
305: 0,
306: 0,
307: 0,
308: /*15*/ 0,
309: MatEqual_MPIAdj,
310: 0,
311: 0,
312: 0,
313: /*20*/ 0,
314: 0,
315: MatSetOption_MPIAdj,
316: 0,
317: /*24*/ 0,
318: 0,
319: 0,
320: 0,
321: 0,
322: /*29*/ 0,
323: 0,
324: 0,
325: 0,
326: 0,
327: /*34*/ 0,
328: 0,
329: 0,
330: 0,
331: 0,
332: /*39*/ 0,
333: 0,
334: 0,
335: 0,
336: 0,
337: /*44*/ 0,
338: 0,
339: 0,
340: 0,
341: 0,
342: /*49*/ 0,
343: MatGetRowIJ_MPIAdj,
344: MatRestoreRowIJ_MPIAdj,
345: 0,
346: 0,
347: /*54*/ 0,
348: 0,
349: 0,
350: 0,
351: 0,
352: /*59*/ 0,
353: MatDestroy_MPIAdj,
354: MatView_MPIAdj,
355: MatConvertFrom_MPIAdj,
356: 0,
357: /*64*/ 0,
358: 0,
359: 0,
360: 0,
361: 0,
362: /*69*/ 0,
363: 0,
364: 0,
365: 0,
366: 0,
367: /*74*/ 0,
368: 0,
369: 0,
370: 0,
371: 0,
372: /*79*/ 0,
373: 0,
374: 0,
375: 0,
376: 0,
377: /*84*/ 0,
378: 0,
379: 0,
380: 0,
381: 0,
382: /*89*/ 0,
383: 0,
384: 0,
385: 0,
386: 0,
387: /*94*/ 0,
388: 0,
389: 0,
390: 0};
395: PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
396: {
397: Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
399: #if defined(PETSC_USE_DEBUG)
400: PetscInt ii;
401: #endif
404: PetscLayoutSetBlockSize(B->rmap,1);
405: PetscLayoutSetBlockSize(B->cmap,1);
406: PetscLayoutSetUp(B->rmap);
407: PetscLayoutSetUp(B->cmap);
409: #if defined(PETSC_USE_DEBUG)
410: if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
411: for (ii=1; ii<B->rmap->n; ii++) {
412: if (i[ii] < 0 || i[ii] < i[ii-1]) {
413: SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
414: }
415: }
416: for (ii=0; ii<i[B->rmap->n]; ii++) {
417: if (j[ii] < 0 || j[ii] >= B->cmap->N) {
418: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
419: }
420: }
421: #endif
422: B->preallocated = PETSC_TRUE;
424: b->j = j;
425: b->i = i;
426: b->values = values;
428: b->nz = i[B->rmap->n];
429: b->diag = 0;
430: b->symmetric = PETSC_FALSE;
431: b->freeaij = PETSC_TRUE;
433: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
434: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
435: return(0);
436: }
439: /*MC
440: MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
441: intended for use constructing orderings and partitionings.
443: Level: beginner
445: .seealso: MatCreateMPIAdj
446: M*/
451: PetscErrorCode MatCreate_MPIAdj(Mat B)
452: {
453: Mat_MPIAdj *b;
455: PetscMPIInt size,rank;
458: MPI_Comm_size(((PetscObject)B)->comm,&size);
459: MPI_Comm_rank(((PetscObject)B)->comm,&rank);
461: PetscNewLog(B,Mat_MPIAdj,&b);
462: B->data = (void*)b;
463: PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));
464: B->mapping = 0;
465: B->assembled = PETSC_FALSE;
466:
467: PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
468: "MatMPIAdjSetPreallocation_MPIAdj",
469: MatMPIAdjSetPreallocation_MPIAdj);
470: PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);
471: return(0);
472: }
477: /*@C
478: MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
480: Collective on MPI_Comm
482: Input Parameters:
483: + A - the matrix
484: . i - the indices into j for the start of each row
485: . j - the column indices for each row (sorted for each row).
486: The indices in i and j start with zero (NOT with one).
487: - values - [optional] edge weights
489: Level: intermediate
491: .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
492: @*/
493: PetscErrorCode MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
494: {
495: PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
498: PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);
499: if (f) {
500: (*f)(B,i,j,values);
501: }
502: return(0);
503: }
507: /*@C
508: MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
509: The matrix does not have numerical values associated with it, but is
510: intended for ordering (to reduce bandwidth etc) and partitioning.
512: Collective on MPI_Comm
514: Input Parameters:
515: + comm - MPI communicator
516: . m - number of local rows
517: . N - number of global columns
518: . i - the indices into j for the start of each row
519: . j - the column indices for each row (sorted for each row).
520: The indices in i and j start with zero (NOT with one).
521: - values -[optional] edge weights
523: Output Parameter:
524: . A - the matrix
526: Level: intermediate
528: Notes: This matrix object does not support most matrix operations, include
529: MatSetValues().
530: You must NOT free the ii, values and jj arrays yourself. PETSc will free them
531: when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
532: call from Fortran you need not create the arrays with PetscMalloc().
533: Should not include the matrix diagonals.
535: If you already have a matrix, you can create its adjacency matrix by a call
536: to MatConvert, specifying a type of MATMPIADJ.
538: Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
540: .seealso: MatCreate(), MatConvert(), MatGetOrdering()
541: @*/
542: PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
543: {
547: MatCreate(comm,A);
548: MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);
549: MatSetType(*A,MATMPIADJ);
550: MatMPIAdjSetPreallocation(*A,i,j,values);
551: return(0);
552: }