Actual source code: daindex.c
1: #define PETSCDM_DLL
3: /*
4: Code for manipulating distributed regular arrays in parallel.
5: */
7: #include private/daimpl.h
11: /*@C
12: DAGetGlobalIndices - Returns the global node number of all local nodes,
13: including ghost nodes.
15: Not Collective
17: Input Parameter:
18: . da - the distributed array
20: Output Parameters:
21: + n - the number of local elements, including ghost nodes (or PETSC_NULL)
22: - idx - the global indices
24: Level: intermediate
26: Note:
27: For DA_STENCIL_STAR stencils the inactive corner ghost nodes are also included
28: in the list of local indices (even though those nodes are not updated
29: during calls to DAXXXToXXX().
31: Essentially the same data is returned in the form of a local-to-global mapping
32: with the routine DAGetISLocalToGlobalMapping();
34: Fortran Note:
35: This routine is used differently from Fortran
36: .vb
37: DA da
38: integer n,da_array(1)
39: PetscOffset i_da
40: integer ierr
41: call DAGetGlobalIndices(da,n,da_array,i_da,ierr)
43: C Access first local entry in list
44: value = da_array(i_da + 1)
45: .ve
47: See the Fortran chapter of the users manual for details
49: .keywords: distributed array, get, global, indices, local-to-global
51: .seealso: DACreate2d(), DAGetGhostCorners(), DAGetCorners(), DALocalToGlobal()
52: DAGlobalToLocalBegin(), DAGlobalToLocalEnd(), DALocalToLocalBegin(), DAGetAO(), DAGetGlobalIndicesF90()
53: DAGetISLocalToGlobalMapping(), DACreate3d(), DACreate1d(), DALocalToLocalEnd(), DAGetOwnershipRanges()
54: @*/
55: PetscErrorCode DAGetGlobalIndices(DA da,PetscInt *n,PetscInt **idx)
56: {
59: if (n) *n = da->Nl;
60: if (idx) *idx = da->idx;
61: return(0);
62: }
66: /*
67: Gets the natural number for each global number on the process.
69: Used by DAGetAO() and DAGlobalToNatural_Create()
70: */
71: PetscErrorCode DAGetNatural_Private(DA da,PetscInt *outNlocal,IS *isnatural)
72: {
74: PetscInt Nlocal,i,j,k,*lidx,lict = 0;
77: Nlocal = (da->xe-da->xs);
78: if (da->dim > 1) {
79: Nlocal *= (da->ye-da->ys);
80: }
81: if (da->dim > 2) {
82: Nlocal *= (da->ze-da->zs);
83: }
84:
85: PetscMalloc(Nlocal*sizeof(PetscInt),&lidx);
86:
87: if (da->dim == 1) {
88: for (i=da->xs; i<da->xe; i++) {
89: /* global number in natural ordering */
90: lidx[lict++] = i;
91: }
92: } else if (da->dim == 2) {
93: for (j=da->ys; j<da->ye; j++) {
94: for (i=da->xs; i<da->xe; i++) {
95: /* global number in natural ordering */
96: lidx[lict++] = i + j*da->M*da->w;
97: }
98: }
99: } else if (da->dim == 3) {
100: for (k=da->zs; k<da->ze; k++) {
101: for (j=da->ys; j<da->ye; j++) {
102: for (i=da->xs; i<da->xe; i++) {
103: lidx[lict++] = i + j*da->M*da->w + k*da->M*da->N*da->w;
104: }
105: }
106: }
107: }
108: *outNlocal = Nlocal;
109: ISCreateGeneral(((PetscObject)da)->comm,Nlocal,lidx,isnatural);
110: PetscFree(lidx);
111: return(0);
112: }
116: /*@
117: DAGetAO - Gets the application ordering context for a distributed array.
119: Collective on DA
121: Input Parameter:
122: . da - the distributed array
124: Output Parameters:
125: . ao - the application ordering context for DAs
127: Level: intermediate
129: Notes:
130: In this case, the AO maps to the natural grid ordering that would be used
131: for the DA if only 1 processor were employed (ordering most rapidly in the
132: x-direction, then y, then z). Multiple degrees of freedom are numbered
133: for each node (rather than 1 component for the whole grid, then the next
134: component, etc.)
136: .keywords: distributed array, get, global, indices, local-to-global
138: .seealso: DACreate2d(), DAGetGhostCorners(), DAGetCorners(), DALocalToGlocal()
139: DAGlobalToLocalBegin(), DAGlobalToLocalEnd(), DALocalToLocalBegin(), DALocalToLocalEnd(), DAGetGlobalIndices(), DAGetOwnershipRanges(),
140: AO, AOPetscToApplication(), AOApplicationToPetsc()
141: @*/
142: PetscErrorCode DAGetAO(DA da,AO *ao)
143: {
148: /*
149: Build the natural ordering to PETSc ordering mappings.
150: */
151: if (!da->ao) {
152: IS ispetsc,isnatural;
154: PetscInt Nlocal;
156: DAGetNatural_Private(da,&Nlocal,&isnatural);
157: ISCreateStride(((PetscObject)da)->comm,Nlocal,da->base,1,&ispetsc);
158: AOCreateBasicIS(isnatural,ispetsc,&da->ao);
159: PetscLogObjectParent(da,da->ao);
160: ISDestroy(ispetsc);
161: ISDestroy(isnatural);
162: }
163: *ao = da->ao;
164: return(0);
165: }
167: /*MC
168: DAGetGlobalIndicesF90 - Returns a Fortran90 pointer to the list of
169: global indices (global node number of all local nodes, including
170: ghost nodes).
172: Synopsis:
173: DAGetGlobalIndicesF90(DA da,integer n,{integer, pointer :: idx(:)},integer ierr)
175: Input Parameter:
176: . da - the distributed array
178: Output Parameters:
179: + n - the number of local elements, including ghost nodes (or PETSC_NULL)
180: . idx - the Fortran90 pointer to the global indices
181: - ierr - error code
183: Level: intermediate
185: Notes:
186: Not yet supported for all F90 compilers
188: .keywords: distributed array, get, global, indices, local-to-global, f90
190: .seealso: DAGetGlobalIndices()
191: M*/