Actual source code: da1.c
1: #define PETSCDM_DLL
2: /*
3: Code for manipulating distributed regular 1d arrays in parallel.
4: This file was created by Peter Mell 6/30/95
5: */
7: #include private/daimpl.h
9: const char *DAPeriodicTypes[] = {"NONPERIODIC","XPERIODIC","YPERIODIC","XYPERIODIC",
10: "XYZPERIODIC","XZPERIODIC","YZPERIODIC","ZPERIODIC","XYZGHOSTED","DAPeriodicType","DA_",0};
14: PetscErrorCode DAView_1d(DA da,PetscViewer viewer)
15: {
17: PetscMPIInt rank;
18: PetscTruth iascii,isdraw;
21: MPI_Comm_rank(((PetscObject)da)->comm,&rank);
23: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
24: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
25: if (iascii) {
26: PetscViewerFormat format;
28: PetscViewerGetFormat(viewer, &format);
29: if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
30: DALocalInfo info;
31: DAGetLocalInfo(da,&info);
32: PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D m %D w %D s %D\n",rank,da->M,
33: da->m,da->w,da->s);
34: PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D\n",info.xs,info.xs+info.xm);
35: PetscViewerFlush(viewer);
36: }
37: } else if (isdraw) {
38: PetscDraw draw;
39: double ymin = -1,ymax = 1,xmin = -1,xmax = da->M,x;
40: PetscInt base;
41: char node[10];
42: PetscTruth isnull;
44: PetscViewerDrawGetDraw(viewer,0,&draw);
45: PetscDrawIsNull(draw,&isnull); if (isnull) return(0);
47: PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);
48: PetscDrawSynchronizedClear(draw);
50: /* first processor draws all node lines */
51: if (!rank) {
52: PetscInt xmin_tmp;
53: ymin = 0.0; ymax = 0.3;
54:
55: /* ADIC doesn't like doubles in a for loop */
56: for (xmin_tmp =0; xmin_tmp < da->M; xmin_tmp++) {
57: PetscDrawLine(draw,(double)xmin_tmp,ymin,(double)xmin_tmp,ymax,PETSC_DRAW_BLACK);
58: }
60: xmin = 0.0; xmax = da->M - 1;
61: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);
62: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_BLACK);
63: }
65: PetscDrawSynchronizedFlush(draw);
66: PetscDrawPause(draw);
68: /* draw my box */
69: ymin = 0; ymax = 0.3; xmin = da->xs / da->w; xmax = (da->xe / da->w) - 1;
70: PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);
71: PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);
72: PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);
73: PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);
75: /* Put in index numbers */
76: base = da->base / da->w;
77: for (x=xmin; x<=xmax; x++) {
78: sprintf(node,"%d",(int)base++);
79: PetscDrawString(draw,x,ymin,PETSC_DRAW_RED,node);
80: }
82: PetscDrawSynchronizedFlush(draw);
83: PetscDrawPause(draw);
84: } else {
85: SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for DA 1d",((PetscObject)viewer)->type_name);
86: }
87: return(0);
88: }
92: /*
93: Processes command line options to determine if/how a DA
94: is to be viewed. Called by DACreateXX()
95: */
96: PetscErrorCode DAView_Private(DA da)
97: {
99: PetscTruth flg1 = PETSC_FALSE;
100: PetscViewer view;
103: PetscOptionsBegin(((PetscObject)da)->comm,((PetscObject)da)->prefix,"DA viewing options","DA");
104: PetscOptionsTruth("-da_view","Print information about the DA's distribution","DAView",PETSC_FALSE,&flg1,PETSC_NULL);
105: if (flg1) {
106: PetscViewerASCIIGetStdout(((PetscObject)da)->comm,&view);
107: DAView(da,view);
108: }
109: flg1 = PETSC_FALSE;
110: PetscOptionsTruth("-da_view_draw","Draw how the DA is distributed","DAView",PETSC_FALSE,&flg1,PETSC_NULL);
111: if (flg1) {DAView(da,PETSC_VIEWER_DRAW_(((PetscObject)da)->comm));}
112: PetscOptionsEnd();
113: return(0);
114: }
119: PetscErrorCode DACreate_1D(DA da)
120: {
121: const PetscInt dim = da->dim;
122: const PetscInt M = da->M;
123: const PetscInt dof = da->w;
124: const PetscInt s = da->s;
125: const PetscInt sDist = s*dof; /* absolute stencil distance */
126: const PetscInt *lx = da->lx;
127: const DAPeriodicType wrap = da->wrap;
128: MPI_Comm comm;
129: Vec local, global;
130: VecScatter ltog, gtol;
131: IS to, from;
132: PetscTruth flg1 = PETSC_FALSE, flg2 = PETSC_FALSE;
133: PetscMPIInt rank, size;
134: PetscInt i,*idx,nn,left,xs,xe,x,Xs,Xe,start,end,m;
135: PetscErrorCode ierr;
138: if (dim != PETSC_DECIDE && dim != 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Dimension should be 1: %D",dim);
139: if (dof < 1) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
140: if (s < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);
142: da->dim = 1;
143: PetscMalloc(dof*sizeof(char*),&da->fieldname);
144: PetscMemzero(da->fieldname,dof*sizeof(char*));
145: PetscObjectGetComm((PetscObject) da, &comm);
146: MPI_Comm_size(comm,&size);
147: MPI_Comm_rank(comm,&rank);
149: da->m = size;
150: m = da->m;
152: if (M < m) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"More processors than data points! %D %D",m,M);
153: if ((M-1) < s) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Array is too small for stencil! %D %D",M-1,s);
155: /*
156: Determine locally owned region
157: xs is the first local node number, x is the number of local nodes
158: */
159: if (!lx) {
160: PetscOptionsGetTruth(PETSC_NULL,"-da_partition_blockcomm",&flg1,PETSC_NULL);
161: PetscOptionsGetTruth(PETSC_NULL,"-da_partition_nodes_at_end",&flg2,PETSC_NULL);
162: if (flg1) { /* Block Comm type Distribution */
163: xs = rank*M/m;
164: x = (rank + 1)*M/m - xs;
165: } else if (flg2) { /* The odd nodes are evenly distributed across last nodes */
166: x = (M + rank)/m;
167: if (M/m == x) { xs = rank*x; }
168: else { xs = rank*(x-1) + (M+rank)%(x*m); }
169: } else { /* The odd nodes are evenly distributed across the first k nodes */
170: /* Regular PETSc Distribution */
171: x = M/m + ((M % m) > rank);
172: if (rank >= (M % m)) {xs = (rank * (PetscInt)(M/m) + M % m);}
173: else {xs = rank * (PetscInt)(M/m) + rank;}
174: }
175: } else {
176: x = lx[rank];
177: xs = 0;
178: for (i=0; i<rank; i++) {
179: xs += lx[i];
180: }
181: /* verify that data user provided is consistent */
182: left = xs;
183: for (i=rank; i<size; i++) {
184: left += lx[i];
185: }
186: if (left != M) {
187: SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M %D %D",left,M);
188: }
189: }
191: /* From now on x,xs,xe,Xs,Xe are the exact location in the array */
192: x *= dof;
193: xs *= dof;
194: xe = xs + x;
196: /* determine ghost region */
197: if (wrap == DA_XPERIODIC || wrap == DA_XYZGHOSTED) {
198: Xs = xs - sDist;
199: Xe = xe + sDist;
200: } else {
201: if ((xs-sDist) >= 0) Xs = xs-sDist; else Xs = 0;
202: if ((xe+sDist) <= M*dof) Xe = xe+sDist; else Xe = M*dof;
203: }
205: /* allocate the base parallel and sequential vectors */
206: da->Nlocal = x;
207: VecCreateMPIWithArray(comm,da->Nlocal,PETSC_DECIDE,0,&global);
208: VecSetBlockSize(global,dof);
209: da->nlocal = (Xe-Xs);
210: VecCreateSeqWithArray(PETSC_COMM_SELF,da->nlocal,0,&local);
211: VecSetBlockSize(local,dof);
212:
213: /* Create Local to Global Vector Scatter Context */
214: /* local to global inserts non-ghost point region into global */
215: VecGetOwnershipRange(global,&start,&end);
216: ISCreateStride(comm,x,start,1,&to);
217: ISCreateStride(comm,x,xs-Xs,1,&from);
218: VecScatterCreate(local,from,global,to,<og);
219: PetscLogObjectParent(da,ltog);
220: ISDestroy(from);
221: ISDestroy(to);
223: /* Create Global to Local Vector Scatter Context */
224: /* global to local must retrieve ghost points */
225: if (wrap == DA_XYZGHOSTED) {
226: if (size == 1) {
227: ISCreateStride(comm,(xe-xs),sDist,1,&to);
228: } else if (!rank) {
229: ISCreateStride(comm,(Xe-xs),sDist,1,&to);
230: } else if (rank == size-1) {
231: ISCreateStride(comm,(xe-Xs),0,1,&to);
232: } else {
233: ISCreateStride(comm,(Xe-Xs),0,1,&to);
234: }
235: } else {
236: ISCreateStride(comm,(Xe-Xs),0,1,&to);
237: }
238:
239: PetscMalloc((x+2*sDist)*sizeof(PetscInt),&idx);
240: PetscLogObjectMemory(da,(x+2*sDist)*sizeof(PetscInt));
242: nn = 0;
243: if (wrap == DA_XPERIODIC) { /* Handle all cases with wrap first */
245: for (i=0; i<sDist; i++) { /* Left ghost points */
246: if ((xs-sDist+i)>=0) { idx[nn++] = xs-sDist+i;}
247: else { idx[nn++] = M*dof+(xs-sDist+i);}
248: }
250: for (i=0; i<x; i++) { idx [nn++] = xs + i;} /* Non-ghost points */
251:
252: for (i=0; i<sDist; i++) { /* Right ghost points */
253: if ((xe+i)<M*dof) { idx [nn++] = xe+i; }
254: else { idx [nn++] = (xe+i) - M*dof;}
255: }
256: } else if (wrap == DA_XYZGHOSTED) {
258: if (sDist <= xs) {for (i=0; i<sDist; i++) {idx[nn++] = xs - sDist + i;}}
260: for (i=0; i<x; i++) { idx [nn++] = xs + i;}
261:
262: if ((xe+sDist)<=M*dof) {for (i=0; i<sDist; i++) {idx[nn++]=xe+i;}}
264: } else { /* Now do all cases with no wrapping */
266: if (sDist <= xs) {for (i=0; i<sDist; i++) {idx[nn++] = xs - sDist + i;}}
267: else {for (i=0; i<xs; i++) {idx[nn++] = i;}}
269: for (i=0; i<x; i++) { idx [nn++] = xs + i;}
270:
271: if ((xe+sDist)<=M*dof) {for (i=0; i<sDist; i++) {idx[nn++]=xe+i;}}
272: else {for (i=xe; i<(M*dof); i++) {idx[nn++]=i;}}
273: }
275: ISCreateGeneral(comm,nn,idx,&from);
276: VecScatterCreate(global,from,local,to,>ol);
277: PetscLogObjectParent(da,to);
278: PetscLogObjectParent(da,from);
279: PetscLogObjectParent(da,gtol);
280: ISDestroy(to);
281: ISDestroy(from);
282: VecDestroy(local);
283: VecDestroy(global);
285: da->xs = xs; da->xe = xe; da->ys = 0; da->ye = 1; da->zs = 0; da->ze = 1;
286: da->Xs = Xs; da->Xe = Xe; da->Ys = 0; da->Ye = 1; da->Zs = 0; da->Ze = 1;
288: da->gtol = gtol;
289: da->ltog = ltog;
290: da->base = xs;
291: da->ops->view = DAView_1d;
293: /*
294: Set the local to global ordering in the global vector, this allows use
295: of VecSetValuesLocal().
296: */
297: if (wrap == DA_XYZGHOSTED) {
298: PetscInt *tmpidx;
299: if (size == 1) {
300: PetscMalloc((nn+2*sDist)*sizeof(PetscInt),&tmpidx);
301: for (i=0; i<sDist; i++) tmpidx[i] = -1;
302: PetscMemcpy(tmpidx+sDist,idx,nn*sizeof(PetscInt));
303: for (i=nn+sDist; i<nn+2*sDist; i++) tmpidx[i] = -1;
304: PetscFree(idx);
305: idx = tmpidx;
306: nn += 2*sDist;
307: } else if (!rank) { /* must preprend -1 marker for ghost location that have no global value */
308: PetscMalloc((nn+sDist)*sizeof(PetscInt),&tmpidx);
309: for (i=0; i<sDist; i++) tmpidx[i] = -1;
310: PetscMemcpy(tmpidx+sDist,idx,nn*sizeof(PetscInt));
311: PetscFree(idx);
312: idx = tmpidx;
313: nn += sDist;
314: } else if (rank == size-1) { /* must postpend -1 marker for ghost location that have no global value */
315: PetscMalloc((nn+sDist)*sizeof(PetscInt),&tmpidx);
316: PetscMemcpy(tmpidx,idx,nn*sizeof(PetscInt));
317: for (i=nn; i<nn+sDist; i++) tmpidx[i] = -1;
318: PetscFree(idx);
319: idx = tmpidx;
320: nn += sDist;
321: }
322: }
323: ISLocalToGlobalMappingCreateNC(comm,nn,idx,&da->ltogmap);
324: ISLocalToGlobalMappingBlock(da->ltogmap,da->w,&da->ltogmapb);
325: PetscLogObjectParent(da,da->ltogmap);
327: da->idx = idx;
328: da->Nl = nn;
330: PetscPublishAll(da);
331: return(0);
332: }
337: /*@C
338: DACreate1d - Creates an object that will manage the communication of one-dimensional
339: regular array data that is distributed across some processors.
341: Collective on MPI_Comm
343: Input Parameters:
344: + comm - MPI communicator
345: . wrap - type of periodicity should the array have, if any. Use
346: either DA_NONPERIODIC or DA_XPERIODIC
347: . M - global dimension of the array (use -M to indicate that it may be set to a different value
348: from the command line with -da_grid_x <M>)
349: . dof - number of degrees of freedom per node
350: . s - stencil width
351: - lx - array containing number of nodes in the X direction on each processor,
352: or PETSC_NULL. If non-null, must be of length as m.
354: Output Parameter:
355: . da - the resulting distributed array object
357: Options Database Key:
358: + -da_view - Calls DAView() at the conclusion of DACreate1d()
359: . -da_grid_x <nx> - number of grid points in x direction; can set if M < 0
360: - -da_refine_x - refinement factor
362: Level: beginner
364: Notes:
365: The array data itself is NOT stored in the DA, it is stored in Vec objects;
366: The appropriate vector objects can be obtained with calls to DACreateGlobalVector()
367: and DACreateLocalVector() and calls to VecDuplicate() if more are needed.
370: .keywords: distributed array, create, one-dimensional
372: .seealso: DADestroy(), DAView(), DACreate2d(), DACreate3d(), DAGlobalToLocalBegin(), DASetRefinementFactor(),
373: DAGlobalToLocalEnd(), DALocalToGlobal(), DALocalToLocalBegin(), DALocalToLocalEnd(), DAGetRefinementFactor(),
374: DAGetInfo(), DACreateGlobalVector(), DACreateLocalVector(), DACreateNaturalVector(), DALoad(), DAView(), DAGetOwnershipRanges()
376: @*/
377: PetscErrorCode DACreate1d(MPI_Comm comm, DAPeriodicType wrap, PetscInt M, PetscInt dof, PetscInt s, const PetscInt lx[], DA *da)
378: {
380: PetscMPIInt size;
383: DACreate(comm, da);
384: DASetDim(*da, 1);
385: DASetSizes(*da, M, PETSC_DECIDE, PETSC_DECIDE);
386: MPI_Comm_size(comm, &size);
387: DASetNumProcs(*da, size, PETSC_DECIDE, PETSC_DECIDE);
388: DASetPeriodicity(*da, wrap);
389: DASetDof(*da, dof);
390: DASetStencilWidth(*da, s);
391: DASetVertexDivision(*da, lx, PETSC_NULL, PETSC_NULL);
392: /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
393: DASetFromOptions(*da);
394: DASetType(*da, DA1D);
395: return(0);
396: }