Actual source code: dacorn.c
1: #define PETSCDM_DLL
3: /*
4: Code for manipulating distributed regular arrays in parallel.
5: */
7: #include private/daimpl.h
11: /*@
12: DASetCoordinates - Sets into the DA a vector that indicates the
13: coordinates of the local nodes (NOT including ghost nodes).
15: Collective on DA
17: Input Parameter:
18: + da - the distributed array
19: - c - coordinate vector
21: Note:
22: The coordinates should NOT include those for all ghost points
24: Level: intermediate
26: .keywords: distributed array, get, corners, nodes, local indices, coordinates
28: .seealso: DAGetGhostCorners(), DAGetCoordinates(), DASetUniformCoordinates(). DAGetGhostCoordinates(), DAGetCoordinateDA()
29: @*/
30: PetscErrorCode DASetCoordinates(DA da,Vec c)
31: {
37: PetscObjectReference((PetscObject)c);
38: if (da->coordinates) {VecDestroy(da->coordinates);}
39: da->coordinates = c;
40: VecSetBlockSize(c,da->dim);
41: if (da->ghosted_coordinates) { /* The ghosted coordinates are no longer valid */
42: VecDestroy(da->ghosted_coordinates);
43: da->ghosted_coordinates = PETSC_NULL;
44: }
45: return(0);
46: }
50: /*@
51: DAGetCoordinates - Gets the node coordinates associated with a DA.
53: Collective on DA
55: Input Parameter:
56: . da - the distributed array
58: Output Parameter:
59: . c - coordinate vector
61: Note:
62: Each process has only the coordinates for its local nodes (does NOT have the
63: coordinates for the ghost nodes).
65: For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
66: and (x_0,y_0,z_0,x_1,y_1,z_1...)
68: The user is responsible for destroying this vector.
70: Level: intermediate
72: .keywords: distributed array, get, corners, nodes, local indices, coordinates
74: .seealso: DAGetGhostCorners(), DASetCoordinates(), DASetUniformCoordinates(), DAGetGhostedCoordinates(), DAGetCoordinateDA()
75: @*/
76: PetscErrorCode DAGetCoordinates(DA da,Vec *c)
77: {
83: if (da->coordinates) {PetscObjectReference((PetscObject) da->coordinates);}
84: *c = da->coordinates;
85: return(0);
86: }
90: /*@
91: DAGetCoordinateDA - Gets the DA that scatters between global and local DA coordinates
93: Collective on DA
95: Input Parameter:
96: . da - the distributed array
98: Output Parameter:
99: . dac - coordinate DA
101: Note: The user is responsible for destroying this DA when finished
103: Level: intermediate
105: .keywords: distributed array, get, corners, nodes, local indices, coordinates
107: .seealso: DAGetGhostCorners(), DASetCoordinates(), DASetUniformCoordinates(), DAGetCoordinates(), DAGetGhostedCoordinates()
108: @*/
109: PetscErrorCode DAGetCoordinateDA(DA da,DA *cda)
110: {
111: PetscMPIInt size;
115: if (!da->da_coordinates) {
116: MPI_Comm_size(((PetscObject)da)->comm,&size);
117: if (da->dim == 1) {
118: PetscInt s,m,*lc,l;
119: DAPeriodicType pt;
120: DAGetInfo(da,0,&m,0,0,0,0,0,0,&s,&pt,0);
121: DAGetCorners(da,0,0,0,&l,0,0);
122: PetscMalloc(size*sizeof(PetscInt),&lc);
123: MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);
124: DACreate1d(((PetscObject)da)->comm,pt,m,1,s,lc,&da->da_coordinates);
125: PetscFree(lc);
126: } else if (da->dim == 2) {
127: PetscInt i,s,m,*lc,*ld,l,k,n,M,N;
128: DAPeriodicType pt;
129: DAGetInfo(da,0,&m,&n,0,&M,&N,0,0,&s,&pt,0);
130: DAGetCorners(da,0,0,0,&l,&k,0);
131: PetscMalloc2(size,PetscInt,&lc,size,PetscInt,&ld);
132: /* only first M values in lc matter */
133: MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);
134: /* every Mth value in ld matters */
135: MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);
136: for ( i=0; i<N; i++) {
137: ld[i] = ld[M*i];
138: }
139: DACreate2d(((PetscObject)da)->comm,pt,DA_STENCIL_BOX,m,n,M,N,2,s,lc,ld,&da->da_coordinates);
140: PetscFree2(lc,ld);
141: } else if (da->dim == 3) {
142: PetscInt i,s,m,*lc,*ld,*le,l,k,q,n,M,N,P,p;
143: DAPeriodicType pt;
144: DAGetInfo(da,0,&m,&n,&p,&M,&N,&P,0,&s,&pt,0);
145: DAGetCorners(da,0,0,0,&l,&k,&q);
146: PetscMalloc3(size,PetscInt,&lc,size,PetscInt,&ld,size,PetscInt,&le);
147: /* only first M values in lc matter */
148: MPI_Allgather(&l,1,MPIU_INT,lc,1,MPIU_INT,((PetscObject)da)->comm);
149: /* every Mth value in ld matters */
150: MPI_Allgather(&k,1,MPIU_INT,ld,1,MPIU_INT,((PetscObject)da)->comm);
151: for ( i=0; i<N; i++) {
152: ld[i] = ld[M*i];
153: }
154: MPI_Allgather(&q,1,MPIU_INT,le,1,MPIU_INT,((PetscObject)da)->comm);
155: for ( i=0; i<P; i++) {
156: le[i] = le[M*N*i];
157: }
158: DACreate3d(((PetscObject)da)->comm,pt,DA_STENCIL_BOX,m,n,p,M,N,P,3,s,lc,ld,le,&da->da_coordinates);
159: PetscFree3(lc,ld,le);
160: }
161: }
162: PetscObjectReference((PetscObject) da->da_coordinates);
163: *cda = da->da_coordinates;
164: return(0);
165: }
170: /*@
171: DAGetGhostedCoordinates - Gets the node coordinates associated with a DA.
173: Collective on DA
175: Input Parameter:
176: . da - the distributed array
178: Output Parameter:
179: . c - coordinate vector
181: Note:
182: Each process has only the coordinates for its local AND ghost nodes
184: For two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
185: and (x_0,y_0,z_0,x_1,y_1,z_1...)
187: The user is responsible for destroying this vector.
189: Level: intermediate
191: .keywords: distributed array, get, corners, nodes, local indices, coordinates
193: .seealso: DAGetGhostCorners(), DASetCoordinates(), DASetUniformCoordinates(), DAGetCoordinates(), DAGetCoordinateDA()
194: @*/
195: PetscErrorCode DAGetGhostedCoordinates(DA da,Vec *c)
196: {
202: if (!da->coordinates) SETERRQ(PETSC_ERR_ORDER,"You must call DASetCoordinates() before this call");
203: if (!da->ghosted_coordinates) {
204: DA dac;
205: DAGetCoordinateDA(da,&dac);
206: DACreateLocalVector(dac,&da->ghosted_coordinates);
207: if (dac == da) {PetscObjectDereference((PetscObject)dac);}
208: DAGlobalToLocalBegin(dac,da->coordinates,INSERT_VALUES,da->ghosted_coordinates);
209: DAGlobalToLocalEnd(dac,da->coordinates,INSERT_VALUES,da->ghosted_coordinates);
210: DADestroy(dac);
211: }
212: PetscObjectReference((PetscObject) da->ghosted_coordinates);
213: *c = da->ghosted_coordinates;
214: return(0);
215: }
219: /*@C
220: DASetFieldName - Sets the names of individual field components in multicomponent
221: vectors associated with a DA.
223: Not Collective
225: Input Parameters:
226: + da - the distributed array
227: . nf - field number for the DA (0, 1, ... dof-1), where dof indicates the
228: number of degrees of freedom per node within the DA
229: - names - the name of the field (component)
231: Level: intermediate
233: .keywords: distributed array, get, component name
235: .seealso: DAGetFieldName()
236: @*/
237: PetscErrorCode DASetFieldName(DA da,PetscInt nf,const char name[])
238: {
242:
244: if (nf < 0 || nf >= da->w) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
245: if (da->fieldname[nf]) {PetscFree(da->fieldname[nf]);}
246:
247: PetscStrallocpy(name,&da->fieldname[nf]);
248: return(0);
249: }
253: /*@C
254: DAGetFieldName - Gets the names of individual field components in multicomponent
255: vectors associated with a DA.
257: Not Collective
259: Input Parameter:
260: + da - the distributed array
261: - nf - field number for the DA (0, 1, ... dof-1), where dof indicates the
262: number of degrees of freedom per node within the DA
264: Output Parameter:
265: . names - the name of the field (component)
267: Level: intermediate
269: .keywords: distributed array, get, component name
271: .seealso: DASetFieldName()
272: @*/
273: PetscErrorCode DAGetFieldName(DA da,PetscInt nf,char **name)
274: {
276:
279: if (nf < 0 || nf >= da->w) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Invalid field number: %D",nf);
280: *name = da->fieldname[nf];
281: return(0);
282: }
286: /*@
287: DAGetCorners - Returns the global (x,y,z) indices of the lower left
288: corner of the local region, excluding ghost points.
290: Not Collective
292: Input Parameter:
293: . da - the distributed array
295: Output Parameters:
296: + x,y,z - the corner indices (where y and z are optional; these are used
297: for 2D and 3D problems)
298: - m,n,p - widths in the corresponding directions (where n and p are optional;
299: these are used for 2D and 3D problems)
301: Note:
302: The corner information is independent of the number of degrees of
303: freedom per node set with the DACreateXX() routine. Thus the x, y, z, and
304: m, n, p can be thought of as coordinates on a logical grid, where each
305: grid point has (potentially) several degrees of freedom.
306: Any of y, z, n, and p can be passed in as PETSC_NULL if not needed.
308: Level: beginner
310: .keywords: distributed array, get, corners, nodes, local indices
312: .seealso: DAGetGhostCorners(), DAGetOwnershipRanges()
313: @*/
314: PetscErrorCode DAGetCorners(DA da,PetscInt *x,PetscInt *y,PetscInt *z,PetscInt *m,PetscInt *n,PetscInt *p)
315: {
316: PetscInt w;
320: /* since the xs, xe ... have all been multiplied by the number of degrees
321: of freedom per cell, w = da->w, we divide that out before returning.*/
322: w = da->w;
323: if (x) *x = da->xs/w; if(m) *m = (da->xe - da->xs)/w;
324: /* the y and z have NOT been multiplied by w */
325: if (y) *y = da->ys; if (n) *n = (da->ye - da->ys);
326: if (z) *z = da->zs; if (p) *p = (da->ze - da->zs);
327: return(0);
328: }
332: /*@
333: DAGetLocalBoundingBox - Returns the local bounding box for the DA.
335: Not Collective
337: Input Parameter:
338: . da - the distributed array
340: Output Parameters:
341: + lmin - local minimum coordinates (length dim, optional)
342: - lmax - local maximim coordinates (length dim, optional)
344: Level: beginner
346: .keywords: distributed array, get, coordinates
348: .seealso: DAGetCoordinateDA(), DAGetCoordinates(), DAGetBoundingBox()
349: @*/
350: PetscErrorCode DAGetLocalBoundingBox(DA da,PetscReal lmin[],PetscReal lmax[])
351: {
353: Vec coords = PETSC_NULL;
354: PetscInt dim,i,j;
355: PetscScalar *local_coords;
356: PetscReal min[3]={PETSC_MAX,PETSC_MAX,PETSC_MAX},max[3]={PETSC_MIN,PETSC_MIN,PETSC_MIN};
357: PetscInt N,Ni;
361: dim = da->dim;
362: DAGetCoordinates(da,&coords);
363: VecGetArray(coords,&local_coords);
364: VecGetSize(coords,&N);
365: Ni = N/dim;
366: for (i=0; i<Ni; i++) {
367: for (j=0; j<dim; j++) {
368: min[j] = PetscMin(min[j],PetscRealPart(local_coords[i*dim+j]));
369: max[j] = PetscMax(min[j],PetscRealPart(local_coords[i*dim+j]));
370: }
371: }
372: VecRestoreArray(coords,&local_coords);
373: VecDestroy(coords);
374: if (lmin) {PetscMemcpy(lmin,min,dim*sizeof(PetscReal));}
375: if (lmax) {PetscMemcpy(lmax,max,dim*sizeof(PetscReal));}
376: return(0);
377: }
381: /*@
382: DAGetBoundingBox - Returns the global bounding box for the DA.
384: Collective on DA
386: Input Parameter:
387: . da - the distributed array
389: Output Parameters:
390: + gmin - global minimum coordinates (length dim, optional)
391: - gmax - global maximim coordinates (length dim, optional)
393: Level: beginner
395: .keywords: distributed array, get, coordinates
397: .seealso: DAGetCoordinateDA(), DAGetCoordinates(), DAGetLocalBoundingBox()
398: @*/
399: PetscErrorCode DAGetBoundingBox(DA da,PetscReal gmin[],PetscReal gmax[])
400: {
402: PetscMPIInt count;
403: PetscReal lmin[3],lmax[3];
407: count = PetscMPIIntCast(da->dim);
408: DAGetLocalBoundingBox(da,lmin,lmax);
409: if (gmin) {MPI_Allreduce(lmin,gmin,count,MPIU_REAL,MPI_MIN,((PetscObject)da)->comm);}
410: if (gmax) {MPI_Allreduce(lmax,gmax,count,MPIU_REAL,MPI_MAX,((PetscObject)da)->comm);}
411: return(0);
412: }