Actual source code: pmetis.c

  1: #define PETSCMAT_DLL
  2: 
 3:  #include ../src/mat/impls/adj/mpi/mpiadj.h

  5: /* 
  6:    Currently using ParMetis-2.0. The following include file has
  7:    to be changed to par_kmetis.h for ParMetis-1.0
  8: */
 10: #include "parmetis.h"

 13: /*
 14:       The first 5 elements of this structure are the input control array to Metis
 15: */
 16: typedef struct {
 17:   int cuts;         /* number of cuts made (output) */
 18:   int foldfactor;
 19:   int parallel;     /* use parallel partitioner for coarse problem */
 20:   int indexing;     /* 0 indicates C indexing, 1 Fortran */
 21:   int printout;     /* indicates if one wishes Metis to print info */
 22:   MPI_Comm comm_pmetis;
 23: } MatPartitioning_Parmetis;

 25: /*
 26:    Uses the ParMETIS parallel matrix partitioner to partition the matrix in parallel
 27: */
 30: static PetscErrorCode MatPartitioningApply_Parmetis(MatPartitioning part,IS *partitioning)
 31: {
 32:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis*)part->data;
 33:   PetscErrorCode           ierr;
 34:   int                      *locals,size,rank;
 35:   int                      *vtxdist,*xadj,*adjncy,itmp = 0;
 36:   int                      wgtflag=0, numflag=0, ncon=1, nparts=part->n, options[3],  i,j;
 37:   Mat                      mat = part->adj;
 38:   Mat_MPIAdj               *adj = (Mat_MPIAdj *)mat->data;
 39:   PetscTruth               flg;
 40:   float                    *tpwgts,*ubvec;
 41:   PetscInt                 bs = 1,nold;

 44:   MPI_Comm_size(((PetscObject)mat)->comm,&size);

 46:   PetscTypeCompare((PetscObject)mat,MATMPIADJ,&flg);
 47:   if (!flg) {
 48:     /* bs indicates if the converted matrix is "reduced" from the original and hence the 
 49:        resulting partition results need to be stretched to match the original matrix */
 50:     nold = mat->rmap->n;
 51:     MatConvert(mat,MATMPIADJ,MAT_INITIAL_MATRIX,&mat);
 52:     bs   = nold/mat->rmap->n;
 53:     adj  = (Mat_MPIAdj *)mat->data;
 54:   }

 56:   vtxdist = mat->rmap->range;
 57:   xadj    = adj->i;
 58:   adjncy  = adj->j;
 59:   MPI_Comm_rank(((PetscObject)part)->comm,&rank);
 60: #if 0
 61:   if (!(vtxdist[rank+1] - vtxdist[rank])) {
 62:     SETERRQ(PETSC_ERR_LIB,"Does not support any processor with no entries");
 63:   }
 64: #endif
 65: #if defined(PETSC_USE_DEBUG)
 66:   /* check that matrix has no diagonal entries */
 67:   {
 68:     int rstart;
 69:     MatGetOwnershipRange(mat,&rstart,PETSC_NULL);
 70:     for (i=0; i<mat->rmap->n; i++) {
 71:       for (j=xadj[i]; j<xadj[i+1]; j++) {
 72:         if (adjncy[j] == i+rstart) SETERRQ1(PETSC_ERR_ARG_WRONG,"Row %d has diagonal entry; Parmetis forbids diagonal entry",i+rstart);
 73:       }
 74:     }
 75:   }
 76: #endif

 78:   PetscMalloc((mat->rmap->n+1)*sizeof(int),&locals);

 80:   if (PetscLogPrintInfo) {itmp = parmetis->printout; parmetis->printout = 127;}
 81:   PetscMalloc(ncon*nparts*sizeof(float),&tpwgts);
 82:   for (i=0; i<ncon; i++) {
 83:     for (j=0; j<nparts; j++) {
 84:       if (part->part_weights) {
 85:         tpwgts[i*nparts+j] = part->part_weights[i*nparts+j];
 86:       } else {
 87:         tpwgts[i*nparts+j] = 1./nparts;
 88:       }
 89:     }
 90:   }
 91:   PetscMalloc(ncon*sizeof(float),&ubvec);
 92:   for (i=0; i<ncon; i++) {
 93:     ubvec[i] = 1.05;
 94:   }
 95:   options[0] = 0;
 96:   /* ParMETIS has no error conditions ??? */
 97:   ParMETIS_V3_PartKway(vtxdist,xadj,adjncy,part->vertex_weights,adj->values,&wgtflag,&numflag,&ncon,&nparts,tpwgts,ubvec,options,&parmetis->cuts,locals,&parmetis->comm_pmetis);
 98:   PetscFree(tpwgts);
 99:   PetscFree(ubvec);
100:   if (PetscLogPrintInfo) {parmetis->printout = itmp;}

102:   if (bs > 1) {
103:     PetscInt *newlocals;
104:     PetscMalloc(bs*mat->rmap->n*sizeof(PetscInt),&newlocals);
105:     for (i=0; i<mat->rmap->n; i++) {
106:       for (j=0; j<bs; j++) {
107:         newlocals[bs*i + j] = locals[i];
108:       }
109:     }
110:     ISCreateGeneral(((PetscObject)part)->comm,bs*mat->rmap->n,newlocals,partitioning);
111:     PetscFree(newlocals);
112:   } else {
113:     ISCreateGeneral(((PetscObject)part)->comm,mat->rmap->n,locals,partitioning);
114:   }
115:   PetscFree(locals);

117:   if (!flg) {
118:     MatDestroy(mat);
119:   }
120:   return(0);
121: }


126: PetscErrorCode MatPartitioningView_Parmetis(MatPartitioning part,PetscViewer viewer)
127: {
128:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;
130:   int rank;
131:   PetscTruth               iascii;

134:   MPI_Comm_rank(((PetscObject)part)->comm,&rank);
135:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
136:   if (iascii) {
137:     if (parmetis->parallel == 2) {
138:       PetscViewerASCIIPrintf(viewer,"  Using parallel coarse grid partitioner\n");
139:     } else {
140:       PetscViewerASCIIPrintf(viewer,"  Using sequential coarse grid partitioner\n");
141:     }
142:     PetscViewerASCIIPrintf(viewer,"  Using %d fold factor\n",parmetis->foldfactor);
143:     PetscViewerASCIISynchronizedPrintf(viewer,"  [%d]Number of cuts found %d\n",rank,parmetis->cuts);
144:     PetscViewerFlush(viewer);
145:   } else {
146:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this Parmetis partitioner",((PetscObject)viewer)->type_name);
147:   }

149:   return(0);
150: }

154: /*@
155:      MatPartitioningParmetisSetCoarseSequential - Use the sequential code to 
156:          do the partitioning of the coarse grid.

158:   Collective on MatPartitioning

160:   Input Parameter:
161: .  part - the partitioning context

163:    Level: advanced

165: @*/
166: PetscErrorCode  MatPartitioningParmetisSetCoarseSequential(MatPartitioning part)
167: {
168:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;

171:   parmetis->parallel = 1;
172:   return(0);
173: }

177: /*@
178:   MatPartitioningParmetisGetEdgeCut - Returns the number of edge cuts in the vertex partition.

180:   Input Parameter:
181: . part - the partitioning context

183:   Output Parameter:
184: . cut - the edge cut

186:    Level: advanced

188: @*/
189: PetscErrorCode  MatPartitioningParmetisGetEdgeCut(MatPartitioning part, PetscInt *cut)
190: {
191:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *) part->data;

194:   *cut = parmetis->cuts;
195:   return(0);
196: }

200: PetscErrorCode MatPartitioningSetFromOptions_Parmetis(MatPartitioning part)
201: {
203:   PetscTruth     flag = PETSC_FALSE;

206:   PetscOptionsHead("Set ParMeTiS partitioning options");
207:   PetscOptionsTruth("-mat_partitioning_parmetis_coarse_sequential","Use sequential coarse partitioner","MatPartitioningParmetisSetCoarseSequential",flag,&flag,PETSC_NULL);
208:     if (flag) {
209:       MatPartitioningParmetisSetCoarseSequential(part);
210:     }
211:   PetscOptionsTail();
212:   return(0);
213: }


218: PetscErrorCode MatPartitioningDestroy_Parmetis(MatPartitioning part)
219: {
220:   MatPartitioning_Parmetis *parmetis = (MatPartitioning_Parmetis *)part->data;

224:   MPI_Comm_free(&(parmetis->comm_pmetis));
225:   PetscFree(parmetis);
226:   return(0);
227: }


230: /*MC
231:    MAT_PARTITIONING_PARMETIS - Creates a partitioning context via the external package PARMETIS.

233:    Collective on MPI_Comm

235:    Input Parameter:
236: .  part - the partitioning context

238:    Options Database Keys:
239: +  -mat_partitioning_parmetis_coarse_sequential - use sequential PARMETIS coarse partitioner

241:    Level: beginner

243:    Notes: See http://www-users.cs.umn.edu/~karypis/metis/

245: .keywords: Partitioning, create, context

247: .seealso: MatPartitioningSetType(), MatPartitioningType

249: M*/

254: PetscErrorCode  MatPartitioningCreate_Parmetis(MatPartitioning part)
255: {
257:   MatPartitioning_Parmetis *parmetis;

260:   PetscNewLog(part,MatPartitioning_Parmetis,&parmetis);
261:   part->data                = (void*)parmetis;

263:   parmetis->cuts       = 0;   /* output variable */
264:   parmetis->foldfactor = 150; /*folding factor */
265:   parmetis->parallel   = 2;   /* use parallel partitioner for coarse grid */
266:   parmetis->indexing   = 0;   /* index numbering starts from 0 */
267:   parmetis->printout   = 0;   /* print no output while running */

269:   MPI_Comm_dup(((PetscObject)part)->comm,&(parmetis->comm_pmetis));

271:   part->ops->apply          = MatPartitioningApply_Parmetis;
272:   part->ops->view           = MatPartitioningView_Parmetis;
273:   part->ops->destroy        = MatPartitioningDestroy_Parmetis;
274:   part->ops->setfromoptions = MatPartitioningSetFromOptions_Parmetis;
275:   return(0);
276: }

281: /*@
282:  MatMeshToVertexGraph -   This routine does not exist because ParMETIS does not provide the functionality.  Uses the ParMETIS package to
283:                        convert a Mat that represents a mesh to a Mat the represents the graph of the coupling 
284:                        between vertices of the cells and is suitable for partitioning with the MatPartitioning object. Use this to partition
285:                        vertices of a mesh. More likely you should use MatMeshToCellGraph()

287:    Collective on Mat

289:    Input Parameter:
290: +     mesh - the graph that represents the mesh
291: -     ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and 
292:                      quadralaterials, 3 for tetrahedrals and 4 for hexahedrals

294:    Output Parameter:
295: .     dual - the dual graph

297:    Notes:
298:      Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()

300:      The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that rows cell. The number of rows in mesh is 
301:      number of cells, the number of columns is the number of vertices.

303:    Level: advanced

305: .seealso: MatMeshToCellGraph(), MatCreateMPIAdj(), MatPartitioningCreate()
306:    
307: @*/
308: PetscErrorCode MatMeshToVertexGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
309: {
311:   SETERRQ(PETSC_ERR_SUP,"ParMETIS does not provide this functionality");
312:   return(0);
313: }

317: /*@
318:      MatMeshToCellGraph -   Uses the ParMETIS package to convert a Mat that represents a mesh to a Mat the represents the graph of the coupling 
319:                        between cells (the "dual" graph) and is suitable for partitioning with the MatPartitioning object. Use this to partition
320:                        cells of a mesh.

322:    Collective on Mat

324:    Input Parameter:
325: +     mesh - the graph that represents the mesh
326: -     ncommonnodes - mesh elements that share this number of common nodes are considered neighbors, use 2 for triangules and 
327:                      quadralaterials, 3 for tetrahedrals and 4 for hexahedrals

329:    Output Parameter:
330: .     dual - the dual graph

332:    Notes:
333:      Currently requires ParMetis to be installed and uses ParMETIS_V3_Mesh2Dual()

335:      The columns of each row of the Mat mesh are the global vertex numbers of the vertices of that rows cell. The number of rows in mesh is 
336:      number of cells, the number of columns is the number of vertices.
337:    

339:    Level: advanced

341: .seealso: MatMeshToVertexGraph(), MatCreateMPIAdj(), MatPartitioningCreate()


344: @*/
345: PetscErrorCode MatMeshToCellGraph(Mat mesh,PetscInt ncommonnodes,Mat *dual)
346: {
347:   PetscErrorCode           ierr;
348:   int                      *newxadj,*newadjncy;
349:   int                      numflag=0;
350:   Mat_MPIAdj               *adj = (Mat_MPIAdj *)mesh->data,*newadj;
351:   PetscTruth               flg;

354:   PetscTypeCompare((PetscObject)mesh,MATMPIADJ,&flg);
355:   if (!flg) SETERRQ(PETSC_ERR_SUP,"Must use MPIAdj matrix type");

357:   /* ParMETIS has no error conditions ??? */
358:   CHKMEMQ;
359:   ParMETIS_V3_Mesh2Dual(mesh->rmap->range,adj->i,adj->j,&numflag,&ncommonnodes,&newxadj,&newadjncy,&((PetscObject)mesh)->comm);
360:   CHKMEMQ;
361:   MatCreateMPIAdj(((PetscObject)mesh)->comm,mesh->rmap->n,mesh->rmap->N,newxadj,newadjncy,PETSC_NULL,dual);
362:   newadj = (Mat_MPIAdj *)(*dual)->data;
363:   newadj->freeaijwithfree = PETSC_TRUE; /* signal the matrix should be freed with system free since space was allocated by ParMETIS */
364:   return(0);
365: }