Actual source code: chaco.c
1: #define PETSCMAT_DLL
3: #include ../src/mat/impls/adj/mpi/mpiadj.h
5: #ifdef PETSC_HAVE_UNISTD_H
6: #include <unistd.h>
7: #endif
9: #ifdef PETSC_HAVE_STDLIB_H
10: #include <stdlib.h>
11: #endif
14: /* Chaco does not have an include file */
16: float *ewgts, float *x, float *y, float *z, char *outassignname,
17: char *outfilename, short *assignment, int architecture, int ndims_tot,
18: int mesh_dims[3], double *goal, int global_method, int local_method,
19: int rqi_flag, int vmax, int ndims, double eigtol, long seed);
23: /*
24: int nvtxs; number of vertices in full graph
25: int *start; start of edge list for each vertex
26: int *adjacency; edge list data
27: int *vwgts; weights for all vertices
28: float *ewgts; weights for all edges
29: float *x, *y, *z; coordinates for inertial method
30: char *outassignname; name of assignment output file
31: char *outfilename; output file name
32: short *assignment; set number of each vtx (length n)
33: int architecture; 0 => hypercube, d => d-dimensional mesh
34: int ndims_tot; total number of cube dimensions to divide
35: int mesh_dims[3]; dimensions of mesh of processors
36: double *goal; desired set sizes for each set
37: int global_method; global partitioning algorithm
38: int local_method; local partitioning algorithm
39: int rqi_flag; should I use RQI/Symmlq eigensolver?
40: int vmax; how many vertices to coarsen down to?
41: int ndims; number of eigenvectors (2^d sets)
42: double eigtol; tolerance on eigenvectors
43: long seed; for random graph mutations
44: */
48: typedef struct {
49: int architecture;
50: int ndims_tot;
51: int mesh_dims[3];
52: int rqi_flag;
53: int numbereigen;
54: double eigtol;
55: int global_method; /* global method */
56: int local_method; /* local method */
57: int nbvtxcoarsed; /* number of vertices for the coarse graph */
58: char *mesg_log;
59: } MatPartitioning_Chaco;
61: #define SIZE_LOG 10000 /* size of buffer for msg_log */
65: static PetscErrorCode MatPartitioningApply_Chaco(MatPartitioning part, IS *partitioning)
66: {
68: int *parttab, *locals, i, size, rank;
69: Mat mat = part->adj, matMPI, matSeq;
70: int nb_locals;
71: Mat_MPIAdj *adj;
72: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
73: PetscTruth flg;
74: #ifdef PETSC_HAVE_UNISTD_H
75: int fd_stdout, fd_pipe[2], count,err;
76: #endif
80: FREE_GRAPH = 0; /* otherwise Chaco will attempt to free memory for adjacency graph */
81:
82: MPI_Comm_size(((PetscObject)mat)->comm, &size);
84: PetscTypeCompare((PetscObject) mat, MATMPIADJ, &flg);
86: /* check if the matrix is sequential, use MatGetSubMatrices if necessary */
87: if (size > 1) {
88: int M, N;
89: IS isrow, iscol;
90: Mat *A;
92: if (flg) {
93: SETERRQ(0, "Distributed matrix format MPIAdj is not supported for sequential partitioners");
94: }
95: PetscPrintf(((PetscObject)part)->comm, "Converting distributed matrix to sequential: this could be a performance loss\n");
97: MatGetSize(mat, &M, &N);
98: ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow);
99: ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol);
100: MatGetSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A);
101: ISDestroy(isrow);
102: ISDestroy(iscol);
103: matSeq = *A;
104: PetscFree(A);
105: } else
106: matSeq = mat;
108: /* check for the input format that is supported only for a MPIADJ type
109: and set it to matMPI */
110: if (!flg) {
111: MatConvert(matSeq, MATMPIADJ, MAT_INITIAL_MATRIX, &matMPI);
112: } else {
113: matMPI = matSeq;
114: }
115: adj = (Mat_MPIAdj *) matMPI->data; /* finaly adj contains adjacency graph */
117: {
118: /* arguments for Chaco library */
119: int nvtxs = mat->rmap->N; /* number of vertices in full graph */
120: int *start = adj->i; /* start of edge list for each vertex */
121: int *adjacency; /* = adj -> j; edge list data */
122: int *vwgts = NULL; /* weights for all vertices */
123: float *ewgts = NULL; /* weights for all edges */
124: float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
125: char *outassignname = NULL; /* name of assignment output file */
126: char *outfilename = NULL; /* output file name */
127: short *assignment; /* set number of each vtx (length n) */
128: int architecture = chaco->architecture; /* 0 => hypercube, d => d-dimensional mesh */
129: int ndims_tot = chaco->ndims_tot; /* total number of cube dimensions to divide */
130: int *mesh_dims = chaco->mesh_dims; /* dimensions of mesh of processors */
131: double *goal = NULL; /* desired set sizes for each set */
132: int global_method = chaco->global_method; /* global partitioning algorithm */
133: int local_method = chaco->local_method; /* local partitioning algorithm */
134: int rqi_flag = chaco->rqi_flag; /* should I use RQI/Symmlq eigensolver? */
135: int vmax = chaco->nbvtxcoarsed; /* how many vertices to coarsen down to? */
136: int ndims = chaco->numbereigen; /* number of eigenvectors (2^d sets) */
137: double eigtol = chaco->eigtol; /* tolerance on eigenvectors */
138: long seed = 123636512; /* for random graph mutations */
140: /* return value of Chaco */
141: PetscMalloc((mat->rmap->N) * sizeof(short), &assignment);
142:
143: /* index change for libraries that have fortran implementation */
144: PetscMalloc(sizeof(int) * start[nvtxs], &adjacency);
145: for (i = 0; i < start[nvtxs]; i++)
146: adjacency[i] = (adj->j)[i] + 1;
148: /* redirect output to buffer: chaco -> mesg_log */
149: #ifdef PETSC_HAVE_UNISTD_H
150: fd_stdout = dup(1);
151: pipe(fd_pipe);
152: close(1);
153: dup2(fd_pipe[1], 1);
154: PetscMalloc(SIZE_LOG * sizeof(char), &(chaco->mesg_log));
155: #endif
157: /* library call */
158: interface(nvtxs, start, adjacency, vwgts, ewgts, x, y, z,
159: outassignname, outfilename, assignment, architecture, ndims_tot,
160: mesh_dims, goal, global_method, local_method, rqi_flag, vmax, ndims,
161: eigtol, seed);
163: #ifdef PETSC_HAVE_UNISTD_H
164: err = fflush(stdout);
165: if (err) SETERRQ(PETSC_ERR_SYS,"fflush() failed on stdout");
166: count = read(fd_pipe[0], chaco->mesg_log, (SIZE_LOG - 1) * sizeof(char));
167: if (count < 0)
168: count = 0;
169: chaco->mesg_log[count] = 0;
170: close(1);
171: dup2(fd_stdout, 1);
172: close(fd_stdout);
173: close(fd_pipe[0]);
174: close(fd_pipe[1]);
175: #endif
177: if (ierr) { SETERRQ(PETSC_ERR_LIB, chaco->mesg_log); }
179: PetscFree(adjacency);
181: PetscMalloc((mat->rmap->N) * sizeof(int), &parttab);
182: for (i = 0; i < nvtxs; i++) {
183: parttab[i] = assignment[i];
184: }
185: PetscFree(assignment);
186: }
188: /* Creation of the index set */
189: MPI_Comm_rank(((PetscObject)part)->comm, &rank);
190: MPI_Comm_size(((PetscObject)part)->comm, &size);
191: nb_locals = mat->rmap->N / size;
192: locals = parttab + rank * nb_locals;
193: if (rank < mat->rmap->N % size) {
194: nb_locals++;
195: locals += rank;
196: } else
197: locals += mat->rmap->N % size;
198: ISCreateGeneral(((PetscObject)part)->comm, nb_locals, locals, partitioning);
200: /* destroy temporary objects */
201: PetscFree(parttab);
202: if (matSeq != mat) {
203: MatDestroy(matSeq);
204: }
205: if (matMPI != mat) {
206: MatDestroy(matMPI);
207: }
208: return(0);
209: }
214: PetscErrorCode MatPartitioningView_Chaco(MatPartitioning part, PetscViewer viewer)
215: {
217: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
218: PetscErrorCode ierr;
219: PetscMPIInt rank;
220: PetscTruth iascii;
223: MPI_Comm_rank(((PetscObject)part)->comm, &rank);
224: PetscTypeCompare((PetscObject) viewer, PETSC_VIEWER_ASCII, &iascii);
225: if (iascii) {
226: if (!rank && chaco->mesg_log) {
227: PetscViewerASCIIPrintf(viewer, "%s\n", chaco->mesg_log);
228: }
229: } else {
230: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this Chaco partitioner",((PetscObject) viewer)->type_name);
231: }
232: return(0);
233: }
237: /*@
238: MatPartitioningChacoSetGlobal - Set method for global partitioning.
240: Input Parameter:
241: . part - the partitioning context
242: . method - MP_CHACO_MULTILEVEL_KL, MP_CHACO_SPECTRAL, MP_CHACO_LINEAR,
243: MP_CHACO_RANDOM or MP_CHACO_SCATTERED
245: Level: advanced
247: @*/
248: PetscErrorCode MatPartitioningChacoSetGlobal(MatPartitioning part, MPChacoGlobalType method)
249: {
250: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
254: switch (method) {
255: case MP_CHACO_MULTILEVEL_KL:
256: chaco->global_method = 1;
257: break;
258: case MP_CHACO_SPECTRAL:
259: chaco->global_method = 2;
260: break;
261: case MP_CHACO_LINEAR:
262: chaco->global_method = 4;
263: break;
264: case MP_CHACO_RANDOM:
265: chaco->global_method = 5;
266: break;
267: case MP_CHACO_SCATTERED:
268: chaco->global_method = 6;
269: break;
270: default:
271: SETERRQ(PETSC_ERR_SUP, "Chaco: Unknown or unsupported option");
272: }
273: return(0);
274: }
278: /*@
279: MatPartitioningChacoSetLocal - Set method for local partitioning.
281: Input Parameter:
282: . part - the partitioning context
283: . method - MP_CHACO_KERNIGHAN_LIN or MP_CHACO_NONE
285: Level: advanced
287: @*/
288: PetscErrorCode MatPartitioningChacoSetLocal(MatPartitioning part, MPChacoLocalType method)
289: {
290: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
294: switch (method) {
295: case MP_CHACO_KERNIGHAN_LIN:
296: chaco->local_method = 1;
297: break;
298: case MP_CHACO_NONE:
299: chaco->local_method = 2;
300: break;
301: default:
302: SETERRQ(PETSC_ERR_ARG_CORRUPT, "Chaco: Unknown or unsupported option");
303: }
305: return(0);
306: }
310: /*@
311: MatPartitioningChacoSetCoarseLevel - Set the coarse level
312:
313: Input Parameter:
314: . part - the partitioning context
315: . level - the coarse level in range [0.0,1.0]
317: Level: advanced
319: @*/
320: PetscErrorCode MatPartitioningChacoSetCoarseLevel(MatPartitioning part, PetscReal level)
321: {
322: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
326: if (level < 0 || level > 1.0) {
327: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,
328: "Chaco: level of coarsening out of range [0.01-1.0]");
329: } else
330: chaco->nbvtxcoarsed = (int)(part->adj->cmap->N * level);
332: if (chaco->nbvtxcoarsed < 20)
333: chaco->nbvtxcoarsed = 20;
335: return(0);
336: }
340: /*@
341: MatPartitioningChacoSetEigenSolver - Set method for eigensolver.
343: Input Parameter:
344: . method - MP_CHACO_LANCZOS or MP_CHACO_RQI_SYMMLQ
346: Level: advanced
348: @*/
349: PetscErrorCode MatPartitioningChacoSetEigenSolver(MatPartitioning part,
350: MPChacoEigenType method)
351: {
352: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
356: switch (method) {
357: case MP_CHACO_LANCZOS:
358: chaco->rqi_flag = 0;
359: break;
360: case MP_CHACO_RQI_SYMMLQ:
361: chaco->rqi_flag = 1;
362: break;
363: default:
364: SETERRQ(PETSC_ERR_ARG_CORRUPT, "Chaco: Unknown or unsupported option");
365: }
367: return(0);
368: }
372: /*@
373: MatPartitioningChacoSetEigenTol - Set tolerance for eigensolver.
375: Input Parameter:
376: . tol - Tolerance requested.
378: Level: advanced
380: @*/
381: PetscErrorCode MatPartitioningChacoSetEigenTol(MatPartitioning part, PetscReal tol)
382: {
383: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
387: if (tol <= 0.0) {
388: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,
389: "Chaco: Eigensolver tolerance out of range");
390: } else
391: chaco->eigtol = tol;
393: return(0);
394: }
398: /*@
399: MatPartitioningChacoSetEigenNumber - Set number of eigenvectors for partitioning.
401: Input Parameter:
402: . num - This argument should have a value of 1, 2 or 3 indicating
403: partitioning by bisection, quadrisection, or octosection.
405: Level: advanced
407: @*/
408: PetscErrorCode MatPartitioningChacoSetEigenNumber(MatPartitioning part, int num)
409: {
410: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
414: if (num > 3 || num < 1) {
415: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,
416: "Chaco: number of eigenvectors out of range");
417: } else
418: chaco->numbereigen = num;
420: return(0);
421: }
425: PetscErrorCode MatPartitioningSetFromOptions_Chaco(MatPartitioning part)
426: {
428: int i;
429: PetscReal r;
430: PetscTruth flag;
431: const char *global[] =
432: { "multilevel-kl", "spectral", "linear", "random", "scattered" };
433: const char *local[] = { "kernighan-lin", "none" };
434: const char *eigen[] = { "lanczos", "rqi_symmlq" };
437: PetscOptionsHead("Set Chaco partitioning options");
439: PetscOptionsEList("-mat_partitioning_chaco_global",
440: "Global method to use", "MatPartitioningChacoSetGlobal", global, 5,
441: global[0], &i, &flag);
442: if (flag)
443: MatPartitioningChacoSetGlobal(part, (MPChacoGlobalType)i);
445: PetscOptionsEList("-mat_partitioning_chaco_local",
446: "Local method to use", "MatPartitioningChacoSetLocal", local, 2,
447: local[0], &i, &flag);
448: if (flag)
449: MatPartitioningChacoSetLocal(part, (MPChacoLocalType)i);
451: PetscOptionsReal("-mat_partitioning_chaco_coarse_level",
452: "Coarse level", "MatPartitioningChacoSetCoarseLevel", 0, &r,
453: &flag);
454: if (flag)
455: MatPartitioningChacoSetCoarseLevel(part, r);
457: PetscOptionsEList("-mat_partitioning_chaco_eigen_solver",
458: "Eigensolver to use in spectral method", "MatPartitioningChacoSetEigenSolver",
459: eigen, 2, eigen[0], &i, &flag);
460: if (flag)
461: MatPartitioningChacoSetEigenSolver(part, (MPChacoEigenType)i);
463: PetscOptionsReal("-mat_partitioning_chaco_eigen_tol",
464: "Tolerance for eigensolver", "MatPartitioningChacoSetEigenTol", 0.001,
465: &r, &flag);
466: if (flag)
467: MatPartitioningChacoSetEigenTol(part, r);
469: PetscOptionsInt("-mat_partitioning_chaco_eigen_number",
470: "Number of eigenvectors: 1, 2, or 3 (bi-, quadri-, or octosection)",
471: "MatPartitioningChacoSetEigenNumber", 1, &i, &flag);
472: if (flag)
473: MatPartitioningChacoSetEigenNumber(part, i);
475: PetscOptionsTail();
476: return(0);
477: }
482: PetscErrorCode MatPartitioningDestroy_Chaco(MatPartitioning part)
483: {
484: MatPartitioning_Chaco *chaco = (MatPartitioning_Chaco *) part->data;
485: PetscErrorCode ierr;
488: PetscFree(chaco->mesg_log);
489: PetscFree(chaco);
490: return(0);
491: }
493: /*MC
494: MAT_PARTITIONING_CHACO - Creates a partitioning context via the external package Chaco.
496: Collective on MPI_Comm
498: Input Parameter:
499: . part - the partitioning context
501: Options Database Keys:
502: + -mat_partitioning_chaco_global <multilevel-kl> (one of) multilevel-kl spectral linear random scattered
503: . -mat_partitioning_chaco_local <kernighan-lin> (one of) kernighan-lin none
504: . -mat_partitioning_chaco_coarse_level <0>: Coarse level (MatPartitioningChacoSetCoarseLevel)
505: . -mat_partitioning_chaco_eigen_solver <lanczos> (one of) lanczos rqi_symmlq
506: . -mat_partitioning_chaco_eigen_tol <0.001>: Tolerance for eigensolver (MatPartitioningChacoSetEigenTol)
507: - -mat_partitioning_chaco_eigen_number <1>: Number of eigenvectors: 1, 2, or 3 (bi-, quadri-, or octosection) (MatPartitioningChacoSetEigenNumber)
509: Level: beginner
511: Notes: See http://www.cs.sandia.gov/CRF/chac.html
513: .keywords: Partitioning, create, context
515: .seealso: MatPartitioningSetType(), MatPartitioningType
517: M*/
521: PetscErrorCode MatPartitioningCreate_Chaco(MatPartitioning part)
522: {
524: MatPartitioning_Chaco *chaco;
527: PetscNewLog(part,MatPartitioning_Chaco, &chaco);
528: part->data = (void*) chaco;
530: chaco->architecture = 1;
531: chaco->ndims_tot = 0;
532: chaco->mesh_dims[0] = part->n;
533: chaco->global_method = 1;
534: chaco->local_method = 1;
535: chaco->rqi_flag = 0;
536: chaco->nbvtxcoarsed = 200;
537: chaco->numbereigen = 1;
538: chaco->eigtol = 0.001;
539: chaco->mesg_log = NULL;
541: part->ops->apply = MatPartitioningApply_Chaco;
542: part->ops->view = MatPartitioningView_Chaco;
543: part->ops->destroy = MatPartitioningDestroy_Chaco;
544: part->ops->setfromoptions = MatPartitioningSetFromOptions_Chaco;
546: return(0);
547: }