Actual source code: party.c
1: #define PETSCMAT_DLL
3: #include ../src/mat/impls/adj/mpi/mpiadj.h
5: #ifdef PETSC_HAVE_UNISTD_H
6: #include <unistd.h>
7: #endif
9: #ifdef PETSC_HAVE_STDLIB_H
10: #include <stdlib.h>
11: #endif
13: /*
14: Currently using Party-1.99
15: */
17: #include "party_lib.h"
20: typedef struct {
21: char redm[15];
22: char redo[15];
23: int rec;
24: int output;
25: char global_method[15]; /* global method */
26: char local_method[15]; /* local method */
27: int nbvtxcoarsed; /* number of vertices for the coarse graph */
28: char *mesg_log;
29: } MatPartitioning_Party;
31: #define SIZE_LOG 10000 /* size of buffer for msg_log */
35: static PetscErrorCode MatPartitioningApply_Party(MatPartitioning part, IS * partitioning)
36: {
38: int *locals, *parttab = NULL, rank, size;
39: Mat mat = part->adj, matMPI, matSeq;
40: int nb_locals;
41: Mat_MPIAdj *adj = (Mat_MPIAdj *) mat->data;
42: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
43: PetscTruth flg;
44: #ifdef PETSC_HAVE_UNISTD_H
45: int fd_stdout, fd_pipe[2], count,err;
46: #endif
49: /* check if the matrix is sequential, use MatGetSubMatrices if necessary */
50: PetscTypeCompare((PetscObject) mat, MATMPIADJ, &flg);
51: MPI_Comm_size(((PetscObject)mat)->comm, &size);
52: MPI_Comm_rank(((PetscObject)part)->comm, &rank);
53: if (size > 1) {
54: int M, N;
55: IS isrow, iscol;
56: Mat *A;
58: if (flg) SETERRQ(PETSC_ERR_SUP,"Distributed matrix format MPIAdj is not supported for sequential partitioners");
59: PetscPrintf(((PetscObject)part)->comm,"Converting distributed matrix to sequential: this could be a performance loss\n");
60: MatGetSize(mat, &M, &N);
61: ISCreateStride(PETSC_COMM_SELF, M, 0, 1, &isrow);
62: ISCreateStride(PETSC_COMM_SELF, N, 0, 1, &iscol);
63: MatGetSubMatrices(mat, 1, &isrow, &iscol, MAT_INITIAL_MATRIX, &A);
64: ISDestroy(isrow);
65: ISDestroy(iscol);
66: matSeq = *A;
67: PetscFree(A);
68: } else {
69: matSeq = mat;
70: }
71: /* check for the input format that is supported only for a MPIADJ type
72: and set it to matMPI */
74: if (!flg) {
75: MatConvert(matSeq, MATMPIADJ, MAT_INITIAL_MATRIX, &matMPI);
76: } else {
77: matMPI = matSeq;
78: }
80: adj = (Mat_MPIAdj *) matMPI->data; /* finaly adj contains adjacency graph */
81: {
82: /* Party library arguments definition */
83: int n = mat->rmap->N; /* number of vertices in full graph */
84: int *edge_p = adj->i; /* start of edge list for each vertex */
85: int *edge = adj->j; /* edge list data */
86: int *vertex_w = NULL; /* weights for all vertices */
87: int *edge_w = NULL; /* weights for all edges */
88: float *x = NULL, *y = NULL, *z = NULL; /* coordinates for inertial method */
89: int p = part->n; /* number of parts to create */
90: int *part_party; /* set number of each vtx (length n) */
91: int cutsize; /* number of edge cut */
92: char *global = party->global_method; /* global partitioning algorithm */
93: char *local = party->local_method; /* local partitioning algorithm */
94: int redl = party->nbvtxcoarsed; /* how many vertices to coarsen down to? */
95: char *redm = party->redm;
96: char *redo = party->redo;
97: int rec = party->rec;
98: int output = party->output;
100: PetscMalloc((mat->rmap->N) * sizeof(int), &part_party);
102: /* redirect output to buffer party->mesg_log */
103: #ifdef PETSC_HAVE_UNISTD_H
104: fd_stdout = dup(1);
105: pipe(fd_pipe);
106: close(1);
107: dup2(fd_pipe[1], 1);
108: PetscMalloc(SIZE_LOG * sizeof(char), &(party->mesg_log));
109: #endif
111: /* library call */
112: party_lib_times_start();
113: party_lib(n, vertex_w, x, y, z, edge_p, edge, edge_w,
114: p, part_party, &cutsize, redl, redm, redo,
115: global, local, rec, output);
117: party_lib_times_output(output);
118: part_info(n, vertex_w, edge_p, edge, edge_w, p, part_party, output);
120: #ifdef PETSC_HAVE_UNISTD_H
121: err = fflush(stdout);
122: if (err) SETERRQ(PETSC_ERR_SYS,"fflush() failed on stdout");
123: count =
124: read(fd_pipe[0], party->mesg_log, (SIZE_LOG - 1) * sizeof(char));
125: if (count < 0)
126: count = 0;
127: party->mesg_log[count] = 0;
128: close(1);
129: dup2(fd_stdout, 1);
130: close(fd_stdout);
131: close(fd_pipe[0]);
132: close(fd_pipe[1]);
133: #endif
134: /* if in the call we got an error, we say it */
135: if (ierr) SETERRQ(PETSC_ERR_LIB, party->mesg_log);
136: parttab = part_party;
137: }
139: /* Creation of the index set */
140: MPI_Comm_rank(((PetscObject)part)->comm, &rank);
141: MPI_Comm_size(((PetscObject)part)->comm, &size);
142: nb_locals = mat->rmap->N / size;
143: locals = parttab + rank * nb_locals;
144: if (rank < mat->rmap->N % size) {
145: nb_locals++;
146: locals += rank;
147: } else {
148: locals += mat->rmap->N % size;
149: }
150: ISCreateGeneral(((PetscObject)part)->comm, nb_locals, locals, partitioning);
152: /* destroying old objects */
153: PetscFree(parttab);
154: if (matSeq != mat) {
155: MatDestroy(matSeq);
156: }
157: if (matMPI != mat) {
158: MatDestroy(matMPI);
159: }
160: return(0);
161: }
165: PetscErrorCode MatPartitioningView_Party(MatPartitioning part, PetscViewer viewer)
166: {
167: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
168: PetscErrorCode ierr;
169: PetscMPIInt rank;
170: PetscTruth iascii;
173: MPI_Comm_rank(((PetscObject)part)->comm, &rank);
174: PetscTypeCompare((PetscObject) viewer, PETSC_VIEWER_ASCII, &iascii);
175: if (iascii) {
176: if (!rank && party->mesg_log) {
177: PetscViewerASCIIPrintf(viewer, "%s\n", party->mesg_log);
178: }
179: } else {
180: SETERRQ1(PETSC_ERR_SUP, "Viewer type %s not supported for this Party partitioner",((PetscObject) viewer)((PetscObject))->type_name);
181: }
182: return(0);
183: }
187: /*@C
188: MatPartitioningPartySetGlobal - Set method for global partitioning.
190: Input Parameter:
191: . part - the partitioning context
192: . method - May be one of MP_PARTY_OPT, MP_PARTY_LIN, MP_PARTY_SCA,
193: MP_PARTY_RAN, MP_PARTY_GBF, MP_PARTY_GCF, MP_PARTY_BUB or MP_PARTY_DEF, or
194: alternatively a string describing the method. Two or more methods can be
195: combined like "gbf,gcf". Check the Party Library Users Manual for details.
197: Level: advanced
199: @*/
200: PetscErrorCode MatPartitioningPartySetGlobal(MatPartitioning part, const char *global)
201: {
202: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
205: PetscStrcpy(party->global_method, global);
206: return(0);
207: }
211: /*@C
212: MatPartitioningPartySetLocal - Set method for local partitioning.
214: Input Parameter:
215: . part - the partitioning context
216: . method - One of MP_PARTY_HELPFUL_SETS, MP_PARTY_KERNIGHAN_LIN, or MP_PARTY_NONE.
217: Check the Party Library Users Manual for details.
219: Level: advanced
221: @*/
222: PetscErrorCode MatPartitioningPartySetLocal(MatPartitioning part, const char *local)
223: {
224: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
227: PetscStrcpy(party->local_method, local);
228: return(0);
229: }
233: /*@
234: MatPartitioningPartySetCoarseLevel - Set the coarse level
235:
236: Input Parameter:
237: . part - the partitioning context
238: . level - the coarse level in range [0.0,1.0]
240: Level: advanced
242: @*/
243: PetscErrorCode MatPartitioningPartySetCoarseLevel(MatPartitioning part, PetscReal level)
244: {
245: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
248: if (level < 0 || level > 1.0) {
249: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Party: level of coarsening out of range [0.01-1.0]");
250: } else {
251: party->nbvtxcoarsed = (int)(part->adj->cmap->N * level);
252: }
253: if (party->nbvtxcoarsed < 20) party->nbvtxcoarsed = 20;
254: return(0);
255: }
259: /*@
260: MatPartitioningPartySetMatchOptimization - Activate matching optimization for graph reduction
261:
262: Input Parameter:
263: . part - the partitioning context
264: . opt - activate optimization
266: Level: advanced
268: @*/
269: PetscErrorCode MatPartitioningPartySetMatchOptimization(MatPartitioning part,
270: PetscTruth opt)
271: {
272: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
275: if (opt)
276: PetscStrcpy(party->redo, "w3");
277: else
278: PetscStrcpy(party->redo, "");
279: return(0);
280: }
284: /*@
285: MatPartitioningPartySetBipart - Activate or deactivate recursive bisection.
286:
287: Input Parameter:
288: . part - the partitioning context
289: . bp - PETSC_TRUE to activate recursive bisection
291: Level: advanced
293: @*/
294: PetscErrorCode MatPartitioningPartySetBipart(MatPartitioning part, PetscTruth bp)
295: {
296: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
299: if (bp)
300: party->rec = 1;
301: else
302: party->rec = 0;
303: return(0);
304: }
308: PetscErrorCode MatPartitioningSetFromOptions_Party(MatPartitioning part)
309: {
311: PetscTruth flag, b;
312: char value[15];
313: PetscReal r;
316: PetscOptionsHead("Set Party partitioning options");
318: PetscOptionsString("-mat_partitioning_party_global",
319: "Global method to use", "MatPartitioningPartySetGlobal", "gcf,gbf",
320: value, 15, &flag);
321: if (flag)
322: MatPartitioningPartySetGlobal(part, value);
324: PetscOptionsString("-mat_partitioning_party_local",
325: "Local method to use", "MatPartitioningPartySetLocal", "kl", value, 15,
326: &flag);
327: if (flag)
328: MatPartitioningPartySetLocal(part, value);
330: PetscOptionsReal("-mat_partitioning_party_coarse_level",
331: "Coarse level", "MatPartitioningPartySetCoarseLevel", 0, &r,
332: &flag);
333: if (flag)
334: MatPartitioningPartySetCoarseLevel(part, r);
336: PetscOptionsTruth("-mat_partitioning_party_match_optimization",
337: "Matching optimization on/off (boolean)",
338: "MatPartitioningPartySetMatchOptimization", PETSC_TRUE, &b, &flag);
339: if (flag)
340: MatPartitioningPartySetMatchOptimization(part, b);
342: PetscOptionsTruth("-mat_partitioning_party_bipart",
343: "Bipartitioning option on/off (boolean)",
344: "MatPartitioningPartySetBipart", PETSC_TRUE, &b, &flag);
345: if (flag)
346: MatPartitioningPartySetBipart(part, b);
348: PetscOptionsTail();
349: return(0);
350: }
355: PetscErrorCode MatPartitioningDestroy_Party(MatPartitioning part)
356: {
357: MatPartitioning_Party *party = (MatPartitioning_Party *) part->data;
358: PetscErrorCode ierr;
361: PetscFree(party->mesg_log);
362: PetscFree(party);
363: return(0);
364: }
366: /*MC
367: MAT_PARTITIONING_PARTY - Creates a partitioning context via the external package Party.
369: Collective on MPI_Comm
371: Input Parameter:
372: . part - the partitioning context
374: Options Database Keys:
375: + -mat_partitioning_party_global <gcf,gbf>: Global method to use (MatPartitioningPartySetGlobal)
376: . -mat_partitioning_party_local <kl>: Local method to use (MatPartitioningPartySetLocal)
377: . -mat_partitioning_party_coarse_level <0>: Coarse level (MatPartitioningPartySetCoarseLevel)
378: . -mat_partitioning_party_match_optimization: <true> Matching optimization on/off (boolean) (MatPartitioningPartySetMatchOptimization)
379: - -mat_partitioning_party_bipart: <true> Bipartitioning option on/off (boolean) (MatPartitioningPartySetBipart)
381: Level: beginner
383: Notes: See http://wwwcs.upb.de/fachbereich/AG/monien/RESEARCH/PART/party.html
385: .keywords: Partitioning, create, context
387: .seealso: MatPartitioningSetType(), MatPartitioningType
389: M*/
394: PetscErrorCode MatPartitioningCreate_Party(MatPartitioning part)
395: {
397: MatPartitioning_Party *party;
400: PetscNewLog(part,MatPartitioning_Party, &party);
401: part->data = (void*) party;
403: PetscStrcpy(party->global_method, "gcf,gbf");
404: PetscStrcpy(party->local_method, "kl");
405: PetscStrcpy(party->redm, "lam");
406: PetscStrcpy(party->redo, "w3");
408: party->nbvtxcoarsed = 200;
409: party->rec = 1;
410: party->output = 1;
411: party->mesg_log = NULL;
413: part->ops->apply = MatPartitioningApply_Party;
414: part->ops->view = MatPartitioningView_Party;
415: part->ops->destroy = MatPartitioningDestroy_Party;
416: part->ops->setfromoptions = MatPartitioningSetFromOptions_Party;
417: return(0);
418: }