Actual source code: mmsbaij.c
1: #define PETSCMAT_DLL
3: /*
4: Support for the parallel SBAIJ matrix vector multiply
5: */
6: #include ../src/mat/impls/sbaij/mpi/mpisbaij.h
8: EXTERN PetscErrorCode MatSetValues_SeqSBAIJ(Mat,PetscInt,const PetscInt [],PetscInt,const PetscInt [],const PetscScalar [],InsertMode);
13: PetscErrorCode MatSetUpMultiply_MPISBAIJ(Mat mat)
14: {
15: Mat_MPISBAIJ *sbaij = (Mat_MPISBAIJ*)mat->data;
16: Mat_SeqBAIJ *B = (Mat_SeqBAIJ*)(sbaij->B->data);
18: PetscInt Nbs = sbaij->Nbs,i,j,*indices,*aj = B->j,ec = 0,*garray,*sgarray;
19: PetscInt bs = mat->rmap->bs,*stmp,mbs=sbaij->mbs, vec_size,nt;
20: IS from,to;
21: Vec gvec;
22: PetscMPIInt rank=sbaij->rank,lsize,size=sbaij->size;
23: PetscInt *owners=sbaij->rangebs,*sowners,*ec_owner,k;
24: PetscScalar *ptr;
27: if (sbaij->sMvctx) {
28: /* This two lines should be in DisAssemble_MPISBAIJ(). Don't know why it causes crash there? */
29: VecScatterDestroy(sbaij->sMvctx);
30: sbaij->sMvctx = 0;
31: }
32:
33: /* For the first stab we make an array as long as the number of columns */
34: /* mark those columns that are in sbaij->B */
35: PetscMalloc(Nbs*sizeof(PetscInt),&indices);
36: PetscMemzero(indices,Nbs*sizeof(PetscInt));
37: for (i=0; i<mbs; i++) {
38: for (j=0; j<B->ilen[i]; j++) {
39: if (!indices[aj[B->i[i] + j]]) ec++;
40: indices[aj[B->i[i] + j] ] = 1;
41: }
42: }
44: /* form arrays of columns we need */
45: PetscMalloc(ec*sizeof(PetscInt),&garray);
46: PetscMalloc2(2*ec,PetscInt,&sgarray,ec,PetscInt,&ec_owner);
47:
48: ec = 0;
49: for (j=0; j<size; j++){
50: for (i=owners[j]; i<owners[j+1]; i++){
51: if (indices[i]) {
52: garray[ec] = i;
53: ec_owner[ec] = j;
54: ec++;
55: }
56: }
57: }
59: /* make indices now point into garray */
60: for (i=0; i<ec; i++) indices[garray[i]] = i;
62: /* compact out the extra columns in B */
63: for (i=0; i<mbs; i++) {
64: for (j=0; j<B->ilen[i]; j++) aj[B->i[i] + j] = indices[aj[B->i[i] + j]];
65: }
66: B->nbs = ec;
67: sbaij->B->cmap->n = sbaij->B->cmap->N = ec*mat->rmap->bs;
68: PetscLayoutSetUp((sbaij->B->cmap));
69: PetscFree(indices);
71: /* create local vector that is used to scatter into */
72: VecCreateSeq(PETSC_COMM_SELF,ec*bs,&sbaij->lvec);
74: /* create two temporary index sets for building scatter-gather */
75: PetscMalloc(2*ec*sizeof(PetscInt),&stmp);
76: for (i=0; i<ec; i++) stmp[i] = bs*garray[i];
77: ISCreateBlock(PETSC_COMM_SELF,bs,ec,stmp,&from);
78:
79: for (i=0; i<ec; i++) { stmp[i] = bs*i; }
80: ISCreateBlock(PETSC_COMM_SELF,bs,ec,stmp,&to);
82: /* generate the scatter context
83: -- Mvctx and lvec are not used by MatMult_MPISBAIJ(), but usefule for some applications */
84: VecCreateMPIWithArray(((PetscObject)mat)->comm,mat->cmap->n,mat->cmap->N,PETSC_NULL,&gvec);
85: VecScatterCreate(gvec,from,sbaij->lvec,to,&sbaij->Mvctx);
86: VecDestroy(gvec);
88: sbaij->garray = garray;
89: PetscLogObjectParent(mat,sbaij->Mvctx);
90: PetscLogObjectParent(mat,sbaij->lvec);
91: PetscLogObjectParent(mat,from);
92: PetscLogObjectParent(mat,to);
94: ISDestroy(from);
95: ISDestroy(to);
97: /* create parallel vector that is used by SBAIJ matrix to scatter from/into */
98: lsize = (mbs + ec)*bs;
99: VecCreateMPI(((PetscObject)mat)->comm,lsize,PETSC_DETERMINE,&sbaij->slvec0);
100: VecDuplicate(sbaij->slvec0,&sbaij->slvec1);
101: VecGetSize(sbaij->slvec0,&vec_size);
103: sowners = sbaij->slvec0->map->range;
104:
105: /* x index in the IS sfrom */
106: for (i=0; i<ec; i++) {
107: j = ec_owner[i];
108: sgarray[i] = garray[i] + (sowners[j]/bs - owners[j]);
109: }
110: /* b index in the IS sfrom */
111: k = sowners[rank]/bs + mbs;
112: for (i=ec,j=0; i< 2*ec; i++,j++) sgarray[i] = k + j;
113:
114: for (i=0; i<2*ec; i++) stmp[i] = bs*sgarray[i];
115: ISCreateBlock(PETSC_COMM_SELF,bs,2*ec,stmp,&from);
116:
117: /* x index in the IS sto */
118: k = sowners[rank]/bs + mbs;
119: for (i=0; i<ec; i++) stmp[i] = bs*(k + i);
120: /* b index in the IS sto */
121: for (i=ec; i<2*ec; i++) stmp[i] = bs*sgarray[i-ec];
123: ISCreateBlock(PETSC_COMM_SELF,bs,2*ec,stmp,&to);
125: VecScatterCreate(sbaij->slvec0,from,sbaij->slvec1,to,&sbaij->sMvctx);
126:
127: VecGetLocalSize(sbaij->slvec1,&nt);
128: VecGetArray(sbaij->slvec1,&ptr);
129: VecCreateSeqWithArray(PETSC_COMM_SELF,bs*mbs,ptr,&sbaij->slvec1a);
130: VecCreateSeqWithArray(PETSC_COMM_SELF,nt-bs*mbs,ptr+bs*mbs,&sbaij->slvec1b);
131: VecRestoreArray(sbaij->slvec1,&ptr);
133: VecGetArray(sbaij->slvec0,&ptr);
134: VecCreateSeqWithArray(PETSC_COMM_SELF,nt-bs*mbs,ptr+bs*mbs,&sbaij->slvec0b);
135: VecRestoreArray(sbaij->slvec0,&ptr);
137: PetscFree(stmp);
138: MPI_Barrier(((PetscObject)mat)->comm);
139:
140: PetscLogObjectParent(mat,sbaij->sMvctx);
141: PetscLogObjectParent(mat,sbaij->slvec0);
142: PetscLogObjectParent(mat,sbaij->slvec1);
143: PetscLogObjectParent(mat,sbaij->slvec0b);
144: PetscLogObjectParent(mat,sbaij->slvec1a);
145: PetscLogObjectParent(mat,sbaij->slvec1b);
146: PetscLogObjectParent(mat,from);
147: PetscLogObjectParent(mat,to);
148:
149: PetscLogObjectMemory(mat,(ec+1)*sizeof(PetscInt));
150: ISDestroy(from);
151: ISDestroy(to);
152: PetscFree2(sgarray,ec_owner);
153: return(0);
154: }
158: PetscErrorCode MatSetUpMultiply_MPISBAIJ_2comm(Mat mat)
159: {
160: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)mat->data;
161: Mat_SeqBAIJ *B = (Mat_SeqBAIJ*)(baij->B->data);
162: PetscErrorCode ierr;
163: PetscInt i,j,*aj = B->j,ec = 0,*garray;
164: PetscInt bs = mat->rmap->bs,*stmp;
165: IS from,to;
166: Vec gvec;
167: #if defined (PETSC_USE_CTABLE)
168: PetscTable gid1_lid1;
169: PetscTablePosition tpos;
170: PetscInt gid,lid;
171: #else
172: PetscInt Nbs = baij->Nbs,*indices;
173: #endif
176: #if defined (PETSC_USE_CTABLE)
177: /* use a table - Mark Adams */
178: PetscTableCreate(B->mbs,&gid1_lid1);
179: for (i=0; i<B->mbs; i++) {
180: for (j=0; j<B->ilen[i]; j++) {
181: PetscInt data,gid1 = aj[B->i[i]+j] + 1;
182: PetscTableFind(gid1_lid1,gid1,&data);
183: if (!data) {
184: /* one based table */
185: PetscTableAdd(gid1_lid1,gid1,++ec);
186: }
187: }
188: }
189: /* form array of columns we need */
190: PetscMalloc(ec*sizeof(PetscInt),&garray);
191: PetscTableGetHeadPosition(gid1_lid1,&tpos);
192: while (tpos) {
193: PetscTableGetNext(gid1_lid1,&tpos,&gid,&lid);
194: gid--; lid--;
195: garray[lid] = gid;
196: }
197: PetscSortInt(ec,garray);
198: PetscTableRemoveAll(gid1_lid1);
199: for (i=0; i<ec; i++) {
200: PetscTableAdd(gid1_lid1,garray[i]+1,i+1);
201: }
202: /* compact out the extra columns in B */
203: for (i=0; i<B->mbs; i++) {
204: for (j=0; j<B->ilen[i]; j++) {
205: PetscInt gid1 = aj[B->i[i] + j] + 1;
206: PetscTableFind(gid1_lid1,gid1,&lid);
207: lid --;
208: aj[B->i[i]+j] = lid;
209: }
210: }
211: B->nbs = ec;
212: baij->B->cmap->n = baij->B->cmap->N = ec*mat->rmap->bs;
213: PetscLayoutSetUp((baij->B->cmap));
214: PetscTableDestroy(gid1_lid1);
215: /* Mark Adams */
216: #else
217: /* For the first stab we make an array as long as the number of columns */
218: /* mark those columns that are in baij->B */
219: PetscMalloc(Nbs*sizeof(PetscInt),&indices);
220: PetscMemzero(indices,Nbs*sizeof(PetscInt));
221: for (i=0; i<B->mbs; i++) {
222: for (j=0; j<B->ilen[i]; j++) {
223: if (!indices[aj[B->i[i] + j]]) ec++;
224: indices[aj[B->i[i] + j] ] = 1;
225: }
226: }
228: /* form array of columns we need */
229: PetscMalloc(ec*sizeof(PetscInt),&garray);
230: ec = 0;
231: for (i=0; i<Nbs; i++) {
232: if (indices[i]) {
233: garray[ec++] = i;
234: }
235: }
237: /* make indices now point into garray */
238: for (i=0; i<ec; i++) {
239: indices[garray[i]] = i;
240: }
242: /* compact out the extra columns in B */
243: for (i=0; i<B->mbs; i++) {
244: for (j=0; j<B->ilen[i]; j++) {
245: aj[B->i[i] + j] = indices[aj[B->i[i] + j]];
246: }
247: }
248: B->nbs = ec;
249: baij->B->cmap->n = ec*mat->rmap->bs;
250: PetscFree(indices);
251: #endif
252:
253: /* create local vector that is used to scatter into */
254: VecCreateSeq(PETSC_COMM_SELF,ec*bs,&baij->lvec);
256: /* create two temporary index sets for building scatter-gather */
257: for (i=0; i<ec; i++) {
258: garray[i] = bs*garray[i];
259: }
260: ISCreateBlock(PETSC_COMM_SELF,bs,ec,garray,&from);
261: for (i=0; i<ec; i++) {
262: garray[i] = garray[i]/bs;
263: }
265: PetscMalloc(ec*sizeof(PetscInt),&stmp);
266: for (i=0; i<ec; i++) { stmp[i] = bs*i; }
267: ISCreateBlock(PETSC_COMM_SELF,bs,ec,stmp,&to);
268: PetscFree(stmp);
270: /* create temporary global vector to generate scatter context */
271: VecCreateMPIWithArray(((PetscObject)mat)->comm,mat->cmap->n,mat->cmap->N,PETSC_NULL,&gvec);
272: VecScatterCreate(gvec,from,baij->lvec,to,&baij->Mvctx);
273: VecDestroy(gvec);
275: PetscLogObjectParent(mat,baij->Mvctx);
276: PetscLogObjectParent(mat,baij->lvec);
277: PetscLogObjectParent(mat,from);
278: PetscLogObjectParent(mat,to);
279: baij->garray = garray;
280: PetscLogObjectMemory(mat,(ec+1)*sizeof(PetscInt));
281: ISDestroy(from);
282: ISDestroy(to);
283: return(0);
284: }
286: /*
287: Takes the local part of an already assembled MPISBAIJ matrix
288: and disassembles it. This is to allow new nonzeros into the matrix
289: that require more communication in the matrix vector multiply.
290: Thus certain data-structures must be rebuilt.
292: Kind of slow! But that's what application programmers get when
293: they are sloppy.
294: */
297: PetscErrorCode DisAssemble_MPISBAIJ(Mat A)
298: {
299: Mat_MPISBAIJ *baij = (Mat_MPISBAIJ*)A->data;
300: Mat B = baij->B,Bnew;
301: Mat_SeqBAIJ *Bbaij = (Mat_SeqBAIJ*)B->data;
303: PetscInt i,j,mbs=Bbaij->mbs,n = A->cmap->N,col,*garray=baij->garray;
304: PetscInt k,bs=A->rmap->bs,bs2=baij->bs2,*rvals,*nz,ec,m=A->rmap->n;
305: MatScalar *a = Bbaij->a;
306: PetscScalar *atmp;
307: #if defined(PETSC_USE_SCALAR_MAT_SINGLE)
308: PetscInt l;
309: #endif
312: #if defined(PETSC_USE_SCALAR_MAT_SINGLE)
313: PetscMalloc(A->rmap->bs*sizeof(PetscScalar),&atmp);
314: #endif
315: /* free stuff related to matrix-vec multiply */
316: VecGetSize(baij->lvec,&ec); /* needed for PetscLogObjectMemory below */
317: VecDestroy(baij->lvec);
318: baij->lvec = 0;
319: VecScatterDestroy(baij->Mvctx);
320: baij->Mvctx = 0;
322: VecDestroy(baij->slvec0);
323: VecDestroy(baij->slvec0b);
324: baij->slvec0 = 0;
325: VecDestroy(baij->slvec1);
326: VecDestroy(baij->slvec1a);
327: VecDestroy(baij->slvec1b);
328: baij->slvec1 = 0;
330: if (baij->colmap) {
331: #if defined (PETSC_USE_CTABLE)
332: PetscTableDestroy(baij->colmap); baij->colmap = 0;
333: #else
334: PetscFree(baij->colmap);
335: baij->colmap = 0;
336: PetscLogObjectMemory(A,-Bbaij->nbs*sizeof(PetscInt));
337: #endif
338: }
340: /* make sure that B is assembled so we can access its values */
341: MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);
342: MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);
344: /* invent new B and copy stuff over */
345: PetscMalloc(mbs*sizeof(PetscInt),&nz);
346: for (i=0; i<mbs; i++) {
347: nz[i] = Bbaij->i[i+1]-Bbaij->i[i];
348: }
349: MatCreate(PETSC_COMM_SELF,&Bnew);
350: MatSetSizes(Bnew,m,n,m,n);
351: MatSetType(Bnew,((PetscObject)B)->type_name);
352: MatSeqBAIJSetPreallocation(Bnew,B->rmap->bs,0,nz);
353: PetscFree(nz);
354:
355: PetscMalloc(bs*sizeof(PetscInt),&rvals);
356: for (i=0; i<mbs; i++) {
357: rvals[0] = bs*i;
358: for (j=1; j<bs; j++) { rvals[j] = rvals[j-1] + 1; }
359: for (j=Bbaij->i[i]; j<Bbaij->i[i+1]; j++) {
360: col = garray[Bbaij->j[j]]*bs;
361: for (k=0; k<bs; k++) {
362: #if defined(PETSC_USE_SCALAR_MAT_SINGLE)
363: for (l=0; l<bs; l++) atmp[l] = a[j*bs2+l];
364: #else
365: atmp = a+j*bs2 + k*bs;
366: #endif
367: MatSetValues_SeqSBAIJ(Bnew,bs,rvals,1,&col,atmp,B->insertmode);
368: col++;
369: }
370: }
371: }
372: #if defined(PETSC_USE_SCALAR_MAT_SINGLE)
373: PetscFree(atmp);
374: #endif
375: PetscFree(baij->garray);
376: baij->garray = 0;
377: PetscFree(rvals);
378: PetscLogObjectMemory(A,-ec*sizeof(PetscInt));
379: MatDestroy(B);
380: PetscLogObjectParent(A,Bnew);
381: baij->B = Bnew;
382: A->was_assembled = PETSC_FALSE;
383: return(0);
384: }