Actual source code: pack.c
1: #define PETSCDM_DLL
2:
3: #include petscda.h
4: #include private/dmimpl.h
5: #include petscmat.h
7: typedef struct _DMCompositeOps *DMCompositeOps;
8: struct _DMCompositeOps {
9: DMOPS(DMComposite)
10: };
12: /*
13: rstart is where an array/subvector starts in the global parallel vector, so arrays
14: rstarts are meaningless (and set to the previous one) except on the processor where the array lives
15: */
17: typedef enum {DMCOMPOSITE_ARRAY, DMCOMPOSITE_DM} DMCompositeLinkType;
19: struct DMCompositeLink {
20: DMCompositeLinkType type;
21: struct DMCompositeLink *next;
22: PetscInt n,rstart; /* rstart is relative to this process */
23: PetscInt grstart; /* grstart is relative to all processes */
25: /* only used for DMCOMPOSITE_DM */
26: PetscInt *grstarts; /* global row for first unknown of this DM on each process */
27: DM dm;
29: /* only used for DMCOMPOSITE_ARRAY */
30: PetscMPIInt rank; /* process where array unknowns live */
31: };
33: struct _p_DMComposite {
34: PETSCHEADER(struct _DMCompositeOps);
35: DMHEADER
36: PetscInt n,N,rstart; /* rstart is relative to all processors, n unknowns owned by this process, N is total unknowns */
37: PetscInt nghost; /* number of all local entries include DA ghost points and any shared redundant arrays */
38: PetscInt nDM,nredundant,nmine; /* how many DM's and seperate redundant arrays used to build DMComposite (nmine is ones on this process) */
39: PetscTruth setup; /* after this is set, cannot add new links to the DMComposite */
40: struct DMCompositeLink *next;
42: PetscErrorCode (*FormCoupleLocations)(DMComposite,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt);
43: void *ctx; /* place for user to set information they may need in FormCoupleLocation */
44: };
48: /*@C
49: DMCompositeSetCoupling - Sets user provided routines that compute the coupling between the
50: seperate components (DA's and arrays) in a DMComposite to build the correct matrix nonzero structure.
53: Collective on MPI_Comm
55: Input Parameter:
56: + dmcomposite - the composite object
57: - formcouplelocations - routine to set the nonzero locations in the matrix
59: Level: advanced
61: Notes: See DMCompositeSetContext() and DMCompositeGetContext() for how to get user information into
62: this routine
64: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
65: DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
66: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(), DMCompositeSetContext(),
67: DMCompositeGetContext()
69: @*/
70: PetscErrorCode DMCompositeSetCoupling(DMComposite dmcomposite,PetscErrorCode (*FormCoupleLocations)(DMComposite,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt))
71: {
73: dmcomposite->FormCoupleLocations = FormCoupleLocations;
74: return(0);
75: }
79: /*@
80: DMCompositeSetContext - Allows user to stash data they may need within the form coupling routine they
81: set with DMCompositeSetCoupling()
84: Not Collective
86: Input Parameter:
87: + dmcomposite - the composite object
88: - ctx - the user supplied context
90: Level: advanced
92: Notes: Use DMCompositeGetContext() to retrieve the context when needed.
94: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
95: DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
96: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(), DMCompositeSetCoupling(),
97: DMCompositeGetContext()
99: @*/
100: PetscErrorCode DMCompositeSetContext(DMComposite dmcomposite,void *ctx)
101: {
103: dmcomposite->ctx = ctx;
104: return(0);
105: }
109: /*@
110: DMCompositeGetContext - Access the context set with DMCompositeSetContext()
113: Not Collective
115: Input Parameter:
116: . dmcomposite - the composite object
118: Output Parameter:
119: . ctx - the user supplied context
121: Level: advanced
123: Notes: Use DMCompositeGetContext() to retrieve the context when needed.
125: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
126: DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
127: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(), DMCompositeSetCoupling(),
128: DMCompositeSetContext()
130: @*/
131: PetscErrorCode DMCompositeGetContext(DMComposite dmcomposite,void **ctx)
132: {
134: *ctx = dmcomposite->ctx;
135: return(0);
136: }
141: /*@C
142: DMCompositeCreate - Creates a vector packer, used to generate "composite"
143: vectors made up of several subvectors.
145: Collective on MPI_Comm
147: Input Parameter:
148: . comm - the processors that will share the global vector
150: Output Parameters:
151: . packer - the packer object
153: Level: advanced
155: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
156: DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
157: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
159: @*/
160: PetscErrorCode DMCompositeCreate(MPI_Comm comm,DMComposite *packer)
161: {
163: DMComposite p;
167: *packer = PETSC_NULL;
168: #ifndef PETSC_USE_DYNAMIC_LIBRARIES
169: DMInitializePackage(PETSC_NULL);
170: #endif
172: PetscHeaderCreate(p,_p_DMComposite,struct _DMCompositeOps,DM_COOKIE,0,"DM",comm,DMCompositeDestroy,DMCompositeView);
173: PetscObjectChangeTypeName((PetscObject)p,"DMComposite");
174: p->n = 0;
175: p->next = PETSC_NULL;
176: p->nredundant = 0;
177: p->nDM = 0;
179: p->ops->createglobalvector = DMCompositeCreateGlobalVector;
180: p->ops->createlocalvector = DMCompositeCreateLocalVector;
181: p->ops->refine = DMCompositeRefine;
182: p->ops->getinterpolation = DMCompositeGetInterpolation;
183: p->ops->getmatrix = DMCompositeGetMatrix;
184: p->ops->getcoloring = DMCompositeGetColoring;
185: p->ops->globaltolocalbegin = DMCompositeGlobalToLocalBegin;
186: p->ops->globaltolocalend = DMCompositeGlobalToLocalEnd;
187: p->ops->destroy = DMCompositeDestroy;
189: *packer = p;
190: return(0);
191: }
197: /*@C
198: DMCompositeDestroy - Destroys a vector packer.
200: Collective on DMComposite
202: Input Parameter:
203: . packer - the packer object
205: Level: advanced
207: .seealso DMCompositeCreate(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),DMCompositeGetEntries()
208: DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
210: @*/
211: PetscErrorCode DMCompositeDestroy(DMComposite packer)
212: {
213: PetscErrorCode ierr;
214: struct DMCompositeLink *next, *prev;
215: PetscTruth done;
219: DMDestroy_Private((DM)packer,&done);
220: if (!done) return(0);
222: next = packer->next;
223: while (next) {
224: prev = next;
225: next = next->next;
226: if (prev->type == DMCOMPOSITE_DM) {
227: DMDestroy(prev->dm);
228: }
229: if (prev->grstarts) {
230: PetscFree(prev->grstarts);
231: }
232: PetscFree(prev);
233: }
234: PetscHeaderDestroy(packer);
235: return(0);
236: }
240: /*@
241: DMCompositeView - Views a composite DM
243: Collective on DMComposite
245: Input Parameter:
246: + packer - the DMComposite object to view
247: - v - the viewer
249: Level: intermediate
251: .seealso DMCompositeCreate()
253: @*/
254: PetscErrorCode DMCompositeView(DMComposite packer,PetscViewer v)
255: {
257: PetscTruth iascii;
260: PetscTypeCompare((PetscObject)v,PETSC_VIEWER_ASCII,&iascii);
261: if (iascii) {
262: struct DMCompositeLink *lnk = packer->next;
263: PetscInt i;
265: PetscViewerASCIIPrintf(v,"DMComposite (%s)\n",((PetscObject)packer)->prefix?((PetscObject)packer)->prefix:"no prefix");
266: PetscViewerASCIIPrintf(v," contains %d DMs and %d redundant arrays\n",packer->nDM,packer->nredundant);
267: PetscViewerASCIIPushTab(v);
268: for (i=0; lnk; lnk=lnk->next,i++) {
269: if (lnk->dm) {
270: PetscViewerASCIIPrintf(v,"Link %d: DM of type %s\n",i,((PetscObject)lnk->dm)->type_name);
271: PetscViewerASCIIPushTab(v);
272: DMView(lnk->dm,v);
273: PetscViewerASCIIPopTab(v);
274: } else {
275: PetscViewerASCIIPrintf(v,"Link %d: Redundant array of size %d owned by rank %d\n",i,lnk->n,lnk->rank);
276: }
277: }
278: PetscViewerASCIIPopTab(v);
279: }
280: return(0);
281: }
283: /* --------------------------------------------------------------------------------------*/
286: PetscErrorCode DMCompositeSetUp(DMComposite packer)
287: {
288: PetscErrorCode ierr;
289: PetscInt nprev = 0;
290: PetscMPIInt rank,size;
291: struct DMCompositeLink *next = packer->next;
292: PetscLayout map;
295: if (packer->setup) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Packer has already been setup");
296: PetscLayoutCreate(((PetscObject)packer)->comm,&map);
297: PetscLayoutSetLocalSize(map,packer->n);
298: PetscLayoutSetSize(map,PETSC_DETERMINE);
299: PetscLayoutSetBlockSize(map,1);
300: PetscLayoutSetUp(map);
301: PetscLayoutGetSize(map,&packer->N);
302: PetscLayoutGetRange(map,&packer->rstart,PETSC_NULL);
303: PetscLayoutDestroy(map);
304:
305: /* now set the rstart for each linked array/vector */
306: MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
307: MPI_Comm_size(((PetscObject)packer)->comm,&size);
308: while (next) {
309: next->rstart = nprev;
310: if ((rank == next->rank) || next->type != DMCOMPOSITE_ARRAY) nprev += next->n;
311: next->grstart = packer->rstart + next->rstart;
312: if (next->type == DMCOMPOSITE_ARRAY) {
313: MPI_Bcast(&next->grstart,1,MPIU_INT,next->rank,((PetscObject)packer)->comm);
314: } else {
315: PetscMalloc(size*sizeof(PetscInt),&next->grstarts);
316: MPI_Allgather(&next->grstart,1,MPIU_INT,next->grstarts,1,MPIU_INT,((PetscObject)packer)->comm);
317: }
318: next = next->next;
319: }
320: packer->setup = PETSC_TRUE;
321: return(0);
322: }
327: PetscErrorCode DMCompositeGetAccess_Array(DMComposite packer,struct DMCompositeLink *mine,Vec vec,PetscScalar **array)
328: {
330: PetscScalar *varray;
331: PetscMPIInt rank;
334: MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
335: if (array) {
336: if (rank == mine->rank) {
337: VecGetArray(vec,&varray);
338: *array = varray + mine->rstart;
339: VecRestoreArray(vec,&varray);
340: } else {
341: *array = 0;
342: }
343: }
344: return(0);
345: }
349: PetscErrorCode DMCompositeGetAccess_DM(DMComposite packer,struct DMCompositeLink *mine,Vec vec,Vec *global)
350: {
352: PetscScalar *array;
355: if (global) {
356: DMGetGlobalVector(mine->dm,global);
357: VecGetArray(vec,&array);
358: VecPlaceArray(*global,array+mine->rstart);
359: VecRestoreArray(vec,&array);
360: }
361: return(0);
362: }
366: PetscErrorCode DMCompositeRestoreAccess_Array(DMComposite packer,struct DMCompositeLink *mine,Vec vec,PetscScalar **array)
367: {
369: return(0);
370: }
374: PetscErrorCode DMCompositeRestoreAccess_DM(DMComposite packer,struct DMCompositeLink *mine,Vec vec,Vec *global)
375: {
379: if (global) {
380: VecResetArray(*global);
381: DMRestoreGlobalVector(mine->dm,global);
382: }
383: return(0);
384: }
388: PetscErrorCode DMCompositeScatter_Array(DMComposite packer,struct DMCompositeLink *mine,Vec vec,PetscScalar *array)
389: {
391: PetscScalar *varray;
392: PetscMPIInt rank;
395: MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
396: if (rank == mine->rank) {
397: VecGetArray(vec,&varray);
398: PetscMemcpy(array,varray+mine->rstart,mine->n*sizeof(PetscScalar));
399: VecRestoreArray(vec,&varray);
400: }
401: MPI_Bcast(array,mine->n,MPIU_SCALAR,mine->rank,((PetscObject)packer)->comm);
402: return(0);
403: }
407: PetscErrorCode DMCompositeScatter_DM(DMComposite packer,struct DMCompositeLink *mine,Vec vec,Vec local)
408: {
410: PetscScalar *array;
411: Vec global;
414: DMGetGlobalVector(mine->dm,&global);
415: VecGetArray(vec,&array);
416: VecPlaceArray(global,array+mine->rstart);
417: DMGlobalToLocalBegin(mine->dm,global,INSERT_VALUES,local);
418: DMGlobalToLocalEnd(mine->dm,global,INSERT_VALUES,local);
419: VecRestoreArray(vec,&array);
420: VecResetArray(global);
421: DMRestoreGlobalVector(mine->dm,&global);
422: return(0);
423: }
427: PetscErrorCode DMCompositeGather_Array(DMComposite packer,struct DMCompositeLink *mine,Vec vec,PetscScalar *array)
428: {
430: PetscScalar *varray;
431: PetscMPIInt rank;
434: MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
435: if (rank == mine->rank) {
436: VecGetArray(vec,&varray);
437: if (varray+mine->rstart == array) SETERRQ(PETSC_ERR_ARG_WRONG,"You need not DMCompositeGather() into objects obtained via DMCompositeGetAccess()");
438: PetscMemcpy(varray+mine->rstart,array,mine->n*sizeof(PetscScalar));
439: VecRestoreArray(vec,&varray);
440: }
441: return(0);
442: }
446: PetscErrorCode DMCompositeGather_DM(DMComposite packer,struct DMCompositeLink *mine,Vec vec,Vec local)
447: {
449: PetscScalar *array;
450: Vec global;
453: DMGetGlobalVector(mine->dm,&global);
454: VecGetArray(vec,&array);
455: VecPlaceArray(global,array+mine->rstart);
456: DMLocalToGlobal(mine->dm,local,INSERT_VALUES,global);
457: VecRestoreArray(vec,&array);
458: VecResetArray(global);
459: DMRestoreGlobalVector(mine->dm,&global);
460: return(0);
461: }
463: /* ----------------------------------------------------------------------------------*/
465: #include <stdarg.h>
469: /*@C
470: DMCompositeGetNumberDM - Get's the number of DM objects in the DMComposite
471: representation.
473: Collective on DMComposite
475: Input Parameter:
476: . packer - the packer object
478: Output Parameter:
479: . nDM - the number of DMs
481: Level: beginner
483: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
484: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeScatter(),
485: DMCompositeRestoreAccess(), DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),
486: DMCompositeGetEntries()
488: @*/
489: PetscErrorCode DMCompositeGetNumberDM(DMComposite packer,PetscInt *nDM)
490: {
493: *nDM = packer->nDM;
494: return(0);
495: }
500: /*@C
501: DMCompositeGetAccess - Allows one to access the individual packed vectors in their global
502: representation.
504: Collective on DMComposite
506: Input Parameter:
507: + packer - the packer object
508: . gvec - the global vector
509: - ... - the individual sequential or parallel objects (arrays or vectors)
511: Notes: Use DMCompositeRestoreAccess() to return the vectors when you no longer need them
512:
513: Level: advanced
515: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
516: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeScatter(),
517: DMCompositeRestoreAccess(), DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),
518: DMCompositeGetEntries()
520: @*/
521: PetscErrorCode DMCompositeGetAccess(DMComposite packer,Vec gvec,...)
522: {
523: va_list Argp;
524: PetscErrorCode ierr;
525: struct DMCompositeLink *next;
530: next = packer->next;
531: if (!packer->setup) {
532: DMCompositeSetUp(packer);
533: }
535: /* loop over packed objects, handling one at at time */
536: va_start(Argp,gvec);
537: while (next) {
538: if (next->type == DMCOMPOSITE_ARRAY) {
539: PetscScalar **array;
540: array = va_arg(Argp, PetscScalar**);
541: DMCompositeGetAccess_Array(packer,next,gvec,array);
542: } else if (next->type == DMCOMPOSITE_DM) {
543: Vec *vec;
544: vec = va_arg(Argp, Vec*);
545: DMCompositeGetAccess_DM(packer,next,gvec,vec);
546: } else {
547: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
548: }
549: next = next->next;
550: }
551: va_end(Argp);
552: return(0);
553: }
557: /*@C
558: DMCompositeRestoreAccess - Returns the vectors obtained with DACompositeGetAccess()
559: representation.
561: Collective on DMComposite
563: Input Parameter:
564: + packer - the packer object
565: . gvec - the global vector
566: - ... - the individual sequential or parallel objects (arrays or vectors)
567:
568: Level: advanced
570: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
571: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeScatter(),
572: DMCompositeRestoreAccess(), DACompositeGetAccess()
574: @*/
575: PetscErrorCode DMCompositeRestoreAccess(DMComposite packer,Vec gvec,...)
576: {
577: va_list Argp;
578: PetscErrorCode ierr;
579: struct DMCompositeLink *next;
584: next = packer->next;
585: if (!packer->setup) {
586: DMCompositeSetUp(packer);
587: }
589: /* loop over packed objects, handling one at at time */
590: va_start(Argp,gvec);
591: while (next) {
592: if (next->type == DMCOMPOSITE_ARRAY) {
593: PetscScalar **array;
594: array = va_arg(Argp, PetscScalar**);
595: DMCompositeRestoreAccess_Array(packer,next,gvec,array);
596: } else if (next->type == DMCOMPOSITE_DM) {
597: Vec *vec;
598: vec = va_arg(Argp, Vec*);
599: DMCompositeRestoreAccess_DM(packer,next,gvec,vec);
600: } else {
601: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
602: }
603: next = next->next;
604: }
605: va_end(Argp);
606: return(0);
607: }
611: /*@C
612: DMCompositeScatter - Scatters from a global packed vector into its individual local vectors
614: Collective on DMComposite
616: Input Parameter:
617: + packer - the packer object
618: . gvec - the global vector
619: - ... - the individual sequential objects (arrays or vectors)
620:
621: Level: advanced
623: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
624: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
625: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
627: @*/
628: PetscErrorCode DMCompositeScatter(DMComposite packer,Vec gvec,...)
629: {
630: va_list Argp;
631: PetscErrorCode ierr;
632: struct DMCompositeLink *next;
633: PetscInt cnt = 3;
638: next = packer->next;
639: if (!packer->setup) {
640: DMCompositeSetUp(packer);
641: }
643: /* loop over packed objects, handling one at at time */
644: va_start(Argp,gvec);
645: while (next) {
646: if (next->type == DMCOMPOSITE_ARRAY) {
647: PetscScalar *array;
648: array = va_arg(Argp, PetscScalar*);
649: DMCompositeScatter_Array(packer,next,gvec,array);
650: } else if (next->type == DMCOMPOSITE_DM) {
651: Vec vec;
652: vec = va_arg(Argp, Vec);
654: DMCompositeScatter_DM(packer,next,gvec,vec);
655: } else {
656: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
657: }
658: cnt++;
659: next = next->next;
660: }
661: va_end(Argp);
662: return(0);
663: }
667: /*@C
668: DMCompositeGather - Gathers into a global packed vector from its individual local vectors
670: Collective on DMComposite
672: Input Parameter:
673: + packer - the packer object
674: . gvec - the global vector
675: - ... - the individual sequential objects (arrays or vectors)
676:
677: Level: advanced
679: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
680: DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
681: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
683: @*/
684: PetscErrorCode DMCompositeGather(DMComposite packer,Vec gvec,...)
685: {
686: va_list Argp;
687: PetscErrorCode ierr;
688: struct DMCompositeLink *next;
693: next = packer->next;
694: if (!packer->setup) {
695: DMCompositeSetUp(packer);
696: }
698: /* loop over packed objects, handling one at at time */
699: va_start(Argp,gvec);
700: while (next) {
701: if (next->type == DMCOMPOSITE_ARRAY) {
702: PetscScalar *array;
703: array = va_arg(Argp, PetscScalar*);
704: DMCompositeGather_Array(packer,next,gvec,array);
705: } else if (next->type == DMCOMPOSITE_DM) {
706: Vec vec;
707: vec = va_arg(Argp, Vec);
709: DMCompositeGather_DM(packer,next,gvec,vec);
710: } else {
711: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
712: }
713: next = next->next;
714: }
715: va_end(Argp);
716: return(0);
717: }
721: /*@C
722: DMCompositeAddArray - adds an "redundant" array to a DMComposite. The array values will
723: be stored in part of the array on process orank.
725: Collective on DMComposite
727: Input Parameter:
728: + packer - the packer object
729: . orank - the process on which the array entries officially live, this number must be
730: the same on all processes.
731: - n - the length of the array
732:
733: Level: advanced
735: .seealso DMCompositeDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
736: DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
737: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
739: @*/
740: PetscErrorCode DMCompositeAddArray(DMComposite packer,PetscMPIInt orank,PetscInt n)
741: {
742: struct DMCompositeLink *mine,*next;
743: PetscErrorCode ierr;
744: PetscMPIInt rank;
748: next = packer->next;
749: if (packer->setup) {
750: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Cannot add an array once you have used the DMComposite");
751: }
752: #if defined(PETSC_USE_DEBUG)
753: {
754: PetscMPIInt orankmax;
755: MPI_Allreduce(&orank,&orankmax,1,MPI_INT,MPI_MAX,((PetscObject)packer)->comm);
756: if (orank != orankmax) SETERRQ2(PETSC_ERR_ARG_INCOMP,"orank %d must be equal on all processes, another process has value %d",orank,orankmax);
757: }
758: #endif
760: MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
761: /* create new link */
762: PetscNew(struct DMCompositeLink,&mine);
763: mine->n = n;
764: mine->rank = orank;
765: mine->dm = PETSC_NULL;
766: mine->type = DMCOMPOSITE_ARRAY;
767: mine->next = PETSC_NULL;
768: if (rank == mine->rank) {packer->n += n;packer->nmine++;}
770: /* add to end of list */
771: if (!next) {
772: packer->next = mine;
773: } else {
774: while (next->next) next = next->next;
775: next->next = mine;
776: }
777: packer->nredundant++;
778: return(0);
779: }
783: /*@C
784: DMCompositeAddDM - adds a DM (includes DA) vector to a DMComposite
786: Collective on DMComposite
788: Input Parameter:
789: + packer - the packer object
790: - dm - the DM object, if the DM is a da you will need to caste it with a (DM)
791:
792: Level: advanced
794: .seealso DMCompositeDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
795: DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
796: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
798: @*/
799: PetscErrorCode DMCompositeAddDM(DMComposite packer,DM dm)
800: {
801: PetscErrorCode ierr;
802: PetscInt n;
803: struct DMCompositeLink *mine,*next;
804: Vec global;
809: next = packer->next;
810: if (packer->setup) {
811: SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Cannot add a DA once you have used the DMComposite");
812: }
814: /* create new link */
815: PetscNew(struct DMCompositeLink,&mine);
816: PetscObjectReference((PetscObject)dm);
817: DMGetGlobalVector(dm,&global);
818: VecGetLocalSize(global,&n);
819: DMRestoreGlobalVector(dm,&global);
820: mine->n = n;
821: mine->dm = dm;
822: mine->type = DMCOMPOSITE_DM;
823: mine->next = PETSC_NULL;
824: packer->n += n;
826: /* add to end of list */
827: if (!next) {
828: packer->next = mine;
829: } else {
830: while (next->next) next = next->next;
831: next->next = mine;
832: }
833: packer->nDM++;
834: packer->nmine++;
835: return(0);
836: }
842: PetscErrorCode VecView_DMComposite(Vec gvec,PetscViewer viewer)
843: {
844: DMComposite packer;
845: PetscErrorCode ierr;
846: struct DMCompositeLink *next;
847: PetscTruth isdraw;
850: PetscObjectQuery((PetscObject)gvec,"DMComposite",(PetscObject*)&packer);
851: if (!packer) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector not generated from a DMComposite");
852: next = packer->next;
854: PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
855: if (!isdraw) {
856: /* do I really want to call this? */
857: VecView_MPI(gvec,viewer);
858: } else {
859: PetscInt cnt = 0;
861: /* loop over packed objects, handling one at at time */
862: while (next) {
863: if (next->type == DMCOMPOSITE_ARRAY) {
864: PetscScalar *array;
865: DMCompositeGetAccess_Array(packer,next,gvec,&array);
867: /*skip it for now */
868: } else if (next->type == DMCOMPOSITE_DM) {
869: Vec vec;
870: PetscInt bs;
872: DMCompositeGetAccess_DM(packer,next,gvec,&vec);
873: VecView(vec,viewer);
874: VecGetBlockSize(vec,&bs);
875: DMCompositeRestoreAccess_DM(packer,next,gvec,&vec);
876: PetscViewerDrawBaseAdd(viewer,bs);
877: cnt += bs;
878: } else {
879: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
880: }
881: next = next->next;
882: }
883: PetscViewerDrawBaseAdd(viewer,-cnt);
884: }
885: return(0);
886: }
892: /*@C
893: DMCompositeCreateGlobalVector - Creates a vector of the correct size to be gathered into
894: by the packer.
896: Collective on DMComposite
898: Input Parameter:
899: . packer - the packer object
901: Output Parameters:
902: . gvec - the global vector
904: Level: advanced
906: Notes: Once this has been created you cannot add additional arrays or vectors to be packed.
908: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
909: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
910: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(),
911: DMCompositeCreateLocalVector()
913: @*/
914: PetscErrorCode DMCompositeCreateGlobalVector(DMComposite packer,Vec *gvec)
915: {
916: PetscErrorCode ierr;
920: if (!packer->setup) {
921: DMCompositeSetUp(packer);
922: }
923: VecCreateMPI(((PetscObject)packer)->comm,packer->n,packer->N,gvec);
924: PetscObjectCompose((PetscObject)*gvec,"DMComposite",(PetscObject)packer);
925: VecSetOperation(*gvec,VECOP_VIEW,(void(*)(void))VecView_DMComposite);
926: return(0);
927: }
931: /*@C
932: DMCompositeCreateLocalVector - Creates a vector of the correct size to contain all ghost points
933: and redundant arrays.
935: Collective on DMComposite
937: Input Parameter:
938: . packer - the packer object
940: Output Parameters:
941: . lvec - the local vector
943: Level: advanced
945: Notes: Once this has been created you cannot add additional arrays or vectors to be packed.
947: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
948: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
949: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(),
950: DMCompositeCreateGlobalVector()
952: @*/
953: PetscErrorCode DMCompositeCreateLocalVector(DMComposite packer,Vec *lvec)
954: {
955: PetscErrorCode ierr;
959: if (!packer->setup) {
960: DMCompositeSetUp(packer);
961: }
962: VecCreateSeq(((PetscObject)packer)->comm,packer->nghost,lvec);
963: PetscObjectCompose((PetscObject)*lvec,"DMComposite",(PetscObject)packer);
964: return(0);
965: }
969: /*@C
970: DMCompositeGetLocalISs - gets an IS for each DM/array in the DMComposite, include ghost points
972: Collective on DMComposite
974: Input Parameter:
975: . packer - the packer object
977: Output Parameters:
978: . is - the individual indices for each packed vector/array. Note that this includes
979: all the ghost points that individual ghosted DA's may have. Also each process has an
980: is for EACH redundant array (not just the local redundant arrays).
981:
982: Level: advanced
984: Notes:
985: The is entries should be destroyed with ISDestroy(), the is array should be freed with PetscFree()
987: Use DMCompositeGetGlobalISs() for non-ghosted ISs.
989: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
990: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(),
991: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries()
993: @*/
994: PetscErrorCode DMCompositeGetGlobalIndices(DMComposite packer,IS *is[])
995: {
996: PetscErrorCode ierr;
997: PetscInt i,*idx,n,cnt;
998: struct DMCompositeLink *next;
999: Vec global,dglobal;
1000: PF pf;
1001: PetscScalar *array;
1002: PetscMPIInt rank;
1006: PetscMalloc(packer->nmine*sizeof(IS),is);
1007: next = packer->next;
1008: DMCompositeCreateGlobalVector(packer,&global);
1009: MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
1011: /* put 0 to N-1 into the global vector */
1012: PFCreate(PETSC_COMM_WORLD,1,1,&pf);
1013: PFSetType(pf,PFIDENTITY,PETSC_NULL);
1014: PFApplyVec(pf,PETSC_NULL,global);
1015: PFDestroy(pf);
1017: /* loop over packed objects, handling one at at time */
1018: cnt = 0;
1019: while (next) {
1021: if (next->type == DMCOMPOSITE_ARRAY) {
1022:
1023: PetscMalloc(next->n*sizeof(PetscInt),&idx);
1024: if (rank == next->rank) {
1025: VecGetArray(global,&array);
1026: array += next->rstart;
1027: for (i=0; i<next->n; i++) idx[i] = (PetscInt)PetscRealPart(array[i]);
1028: array -= next->rstart;
1029: VecRestoreArray(global,&array);
1030: }
1031: MPI_Bcast(idx,next->n,MPIU_INT,next->rank,((PetscObject)packer)->comm);
1032: ISCreateGeneral(((PetscObject)packer)->comm,next->n,idx,&(*is)[cnt]);
1033: PetscFree(idx);
1034: } else if (next->type == DMCOMPOSITE_DM) {
1035: Vec local;
1037: DMCreateLocalVector(next->dm,&local);
1038: VecGetArray(global,&array);
1039: array += next->rstart;
1040: DMGetGlobalVector(next->dm,&dglobal);
1041: VecPlaceArray(dglobal,array);
1042: DMGlobalToLocalBegin(next->dm,dglobal,INSERT_VALUES,local);
1043: DMGlobalToLocalEnd(next->dm,dglobal,INSERT_VALUES,local);
1044: array -= next->rstart;
1045: VecRestoreArray(global,&array);
1046: VecResetArray(dglobal);
1047: DMRestoreGlobalVector(next->dm,&dglobal);
1049: VecGetArray(local,&array);
1050: VecGetSize(local,&n);
1051: PetscMalloc(n*sizeof(PetscInt),&idx);
1052: for (i=0; i<n; i++) idx[i] = (PetscInt)PetscRealPart(array[i]);
1053: VecRestoreArray(local,&array);
1054: VecDestroy(local);
1055: ISCreateGeneral(((PetscObject)packer)->comm,next->n,idx,&(*is)[cnt]);
1056: PetscFree(idx);
1058: } else {
1059: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1060: }
1061: next = next->next;
1062: cnt++;
1063: }
1064: VecDestroy(global);
1065: return(0);
1066: }
1070: /*@C
1071: DMCompositeGetGlobalISs - Gets the index sets for each composed object
1073: Collective on DMComposite
1075: Input Parameter:
1076: . packer - the packer object
1078: Output Parameters:
1079: . is - the array of index sets
1080:
1081: Level: advanced
1083: Notes:
1084: The is entries should be destroyed with ISDestroy(), the is array should be freed with PetscFree()
1086: The number of IS on each process will/may be different when redundant arrays are used
1088: These could be used to extract a subset of vector entries for a "multi-physics" preconditioner
1090: Use DMCompositeGetLocalISs() for index sets that include ghost points
1092: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1093: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(),
1094: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries()
1096: @*/
1097: PetscErrorCode DMCompositeGetGlobalISs(DMComposite packer,IS *is[])
1098: {
1099: PetscErrorCode ierr;
1100: PetscInt cnt = 0;
1101: struct DMCompositeLink *next;
1102: PetscMPIInt rank;
1106: PetscMalloc(packer->nmine*sizeof(IS),is);
1107: next = packer->next;
1108: MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
1110: /* loop over packed objects, handling one at at time */
1111: while (next) {
1113: if (next->type == DMCOMPOSITE_ARRAY) {
1114:
1115: if (rank == next->rank) {
1116: ISCreateBlock(((PetscObject)packer)->comm,next->n,1,&next->grstart,&(*is)[cnt]);
1117: cnt++;
1118: }
1120: } else if (next->type == DMCOMPOSITE_DM) {
1122: ISCreateBlock(((PetscObject)packer)->comm,next->n,1,&next->grstart,&(*is)[cnt]);
1123: cnt++;
1125: } else {
1126: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1127: }
1128: next = next->next;
1129: }
1130: return(0);
1131: }
1133: /* -------------------------------------------------------------------------------------*/
1136: PetscErrorCode DMCompositeGetLocalVectors_Array(DMComposite packer,struct DMCompositeLink *mine,PetscScalar **array)
1137: {
1140: if (array) {
1141: PetscMalloc(mine->n*sizeof(PetscScalar),array);
1142: }
1143: return(0);
1144: }
1148: PetscErrorCode DMCompositeGetLocalVectors_DM(DMComposite packer,struct DMCompositeLink *mine,Vec *local)
1149: {
1152: if (local) {
1153: DMGetLocalVector(mine->dm,local);
1154: }
1155: return(0);
1156: }
1160: PetscErrorCode DMCompositeRestoreLocalVectors_Array(DMComposite packer,struct DMCompositeLink *mine,PetscScalar **array)
1161: {
1164: if (array) {
1165: PetscFree(*array);
1166: }
1167: return(0);
1168: }
1172: PetscErrorCode DMCompositeRestoreLocalVectors_DM(DMComposite packer,struct DMCompositeLink *mine,Vec *local)
1173: {
1176: if (local) {
1177: DMRestoreLocalVector(mine->dm,local);
1178: }
1179: return(0);
1180: }
1184: /*@C
1185: DMCompositeGetLocalVectors - Gets local vectors and arrays for each part of a DMComposite.'
1186: Use DMCompositeRestoreLocalVectors() to return them.
1188: Collective on DMComposite
1190: Input Parameter:
1191: . packer - the packer object
1192:
1193: Output Parameter:
1194: . ... - the individual sequential objects (arrays or vectors)
1195:
1196: Level: advanced
1198: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1199: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1200: DMCompositeRestoreLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries()
1202: @*/
1203: PetscErrorCode DMCompositeGetLocalVectors(DMComposite packer,...)
1204: {
1205: va_list Argp;
1206: PetscErrorCode ierr;
1207: struct DMCompositeLink *next;
1211: next = packer->next;
1212: /* loop over packed objects, handling one at at time */
1213: va_start(Argp,packer);
1214: while (next) {
1215: if (next->type == DMCOMPOSITE_ARRAY) {
1216: PetscScalar **array;
1217: array = va_arg(Argp, PetscScalar**);
1218: DMCompositeGetLocalVectors_Array(packer,next,array);
1219: } else if (next->type == DMCOMPOSITE_DM) {
1220: Vec *vec;
1221: vec = va_arg(Argp, Vec*);
1222: DMCompositeGetLocalVectors_DM(packer,next,vec);
1223: } else {
1224: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1225: }
1226: next = next->next;
1227: }
1228: va_end(Argp);
1229: return(0);
1230: }
1234: /*@C
1235: DMCompositeRestoreLocalVectors - Restores local vectors and arrays for each part of a DMComposite.'
1236: Use VecPakcRestoreLocalVectors() to return them.
1238: Collective on DMComposite
1240: Input Parameter:
1241: . packer - the packer object
1242:
1243: Output Parameter:
1244: . ... - the individual sequential objects (arrays or vectors)
1245:
1246: Level: advanced
1248: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1249: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1250: DMCompositeGetLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries()
1252: @*/
1253: PetscErrorCode DMCompositeRestoreLocalVectors(DMComposite packer,...)
1254: {
1255: va_list Argp;
1256: PetscErrorCode ierr;
1257: struct DMCompositeLink *next;
1261: next = packer->next;
1262: /* loop over packed objects, handling one at at time */
1263: va_start(Argp,packer);
1264: while (next) {
1265: if (next->type == DMCOMPOSITE_ARRAY) {
1266: PetscScalar **array;
1267: array = va_arg(Argp, PetscScalar**);
1268: DMCompositeRestoreLocalVectors_Array(packer,next,array);
1269: } else if (next->type == DMCOMPOSITE_DM) {
1270: Vec *vec;
1271: vec = va_arg(Argp, Vec*);
1272: DMCompositeRestoreLocalVectors_DM(packer,next,vec);
1273: } else {
1274: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1275: }
1276: next = next->next;
1277: }
1278: va_end(Argp);
1279: return(0);
1280: }
1282: /* -------------------------------------------------------------------------------------*/
1285: PetscErrorCode DMCompositeGetEntries_Array(DMComposite packer,struct DMCompositeLink *mine,PetscInt *n)
1286: {
1288: if (n) *n = mine->n;
1289: return(0);
1290: }
1294: PetscErrorCode DMCompositeGetEntries_DM(DMComposite packer,struct DMCompositeLink *mine,DM *dm)
1295: {
1297: if (dm) *dm = mine->dm;
1298: return(0);
1299: }
1303: /*@C
1304: DMCompositeGetEntries - Gets the DA, redundant size, etc for each entry in a DMComposite.
1306: Collective on DMComposite
1308: Input Parameter:
1309: . packer - the packer object
1310:
1311: Output Parameter:
1312: . ... - the individual entries, DAs or integer sizes)
1313:
1314: Level: advanced
1316: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1317: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1318: DMCompositeRestoreLocalVectors(), DMCompositeGetLocalVectors(), DMCompositeScatter(),
1319: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors()
1321: @*/
1322: PetscErrorCode DMCompositeGetEntries(DMComposite packer,...)
1323: {
1324: va_list Argp;
1325: PetscErrorCode ierr;
1326: struct DMCompositeLink *next;
1330: next = packer->next;
1331: /* loop over packed objects, handling one at at time */
1332: va_start(Argp,packer);
1333: while (next) {
1334: if (next->type == DMCOMPOSITE_ARRAY) {
1335: PetscInt *n;
1336: n = va_arg(Argp, PetscInt*);
1337: DMCompositeGetEntries_Array(packer,next,n);
1338: } else if (next->type == DMCOMPOSITE_DM) {
1339: DM *dm;
1340: dm = va_arg(Argp, DM*);
1341: DMCompositeGetEntries_DM(packer,next,dm);
1342: } else {
1343: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1344: }
1345: next = next->next;
1346: }
1347: va_end(Argp);
1348: return(0);
1349: }
1353: /*@C
1354: DMCompositeRefine - Refines a DMComposite by refining all of its DAs
1356: Collective on DMComposite
1358: Input Parameters:
1359: + packer - the packer object
1360: - comm - communicator to contain the new DM object, usually PETSC_NULL
1362: Output Parameter:
1363: . fine - new packer
1364:
1365: Level: advanced
1367: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1368: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1369: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeScatter(),
1370: DMCompositeGetEntries()
1372: @*/
1373: PetscErrorCode DMCompositeRefine(DMComposite packer,MPI_Comm comm,DMComposite *fine)
1374: {
1375: PetscErrorCode ierr;
1376: struct DMCompositeLink *next;
1377: DM dm;
1381: next = packer->next;
1382: DMCompositeCreate(comm,fine);
1384: /* loop over packed objects, handling one at at time */
1385: while (next) {
1386: if (next->type == DMCOMPOSITE_ARRAY) {
1387: DMCompositeAddArray(*fine,next->rank,next->n);
1388: } else if (next->type == DMCOMPOSITE_DM) {
1389: DMRefine(next->dm,comm,&dm);
1390: DMCompositeAddDM(*fine,dm);
1391: PetscObjectDereference((PetscObject)dm);
1392: } else {
1393: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1394: }
1395: next = next->next;
1396: }
1397: return(0);
1398: }
1400: #include petscmat.h
1402: struct MatPackLink {
1403: Mat A;
1404: struct MatPackLink *next;
1405: };
1407: struct MatPack {
1408: DMComposite right,left;
1409: struct MatPackLink *next;
1410: };
1414: PetscErrorCode MatMultBoth_Shell_Pack(Mat A,Vec x,Vec y,PetscTruth add)
1415: {
1416: struct MatPack *mpack;
1417: struct DMCompositeLink *xnext,*ynext;
1418: struct MatPackLink *anext;
1419: PetscScalar *xarray,*yarray;
1420: PetscErrorCode ierr;
1421: PetscInt i;
1422: Vec xglobal,yglobal;
1423: PetscMPIInt rank;
1426: MatShellGetContext(A,(void**)&mpack);
1427: MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);
1428: xnext = mpack->right->next;
1429: ynext = mpack->left->next;
1430: anext = mpack->next;
1432: while (xnext) {
1433: if (xnext->type == DMCOMPOSITE_ARRAY) {
1434: if (rank == xnext->rank) {
1435: VecGetArray(x,&xarray);
1436: VecGetArray(y,&yarray);
1437: if (add) {
1438: for (i=0; i<xnext->n; i++) {
1439: yarray[ynext->rstart+i] += xarray[xnext->rstart+i];
1440: }
1441: } else {
1442: PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));
1443: }
1444: VecRestoreArray(x,&xarray);
1445: VecRestoreArray(y,&yarray);
1446: }
1447: } else if (xnext->type == DMCOMPOSITE_DM) {
1448: VecGetArray(x,&xarray);
1449: VecGetArray(y,&yarray);
1450: DMGetGlobalVector(xnext->dm,&xglobal);
1451: DMGetGlobalVector(ynext->dm,&yglobal);
1452: VecPlaceArray(xglobal,xarray+xnext->rstart);
1453: VecPlaceArray(yglobal,yarray+ynext->rstart);
1454: if (add) {
1455: MatMultAdd(anext->A,xglobal,yglobal,yglobal);
1456: } else {
1457: MatMult(anext->A,xglobal,yglobal);
1458: }
1459: VecRestoreArray(x,&xarray);
1460: VecRestoreArray(y,&yarray);
1461: VecResetArray(xglobal);
1462: VecResetArray(yglobal);
1463: DMRestoreGlobalVector(xnext->dm,&xglobal);
1464: DMRestoreGlobalVector(ynext->dm,&yglobal);
1465: anext = anext->next;
1466: } else {
1467: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1468: }
1469: xnext = xnext->next;
1470: ynext = ynext->next;
1471: }
1472: return(0);
1473: }
1477: PetscErrorCode MatMultAdd_Shell_Pack(Mat A,Vec x,Vec y,Vec z)
1478: {
1481: if (z != y) SETERRQ(PETSC_ERR_SUP,"Handles y == z only");
1482: MatMultBoth_Shell_Pack(A,x,y,PETSC_TRUE);
1483: return(0);
1484: }
1488: PetscErrorCode MatMult_Shell_Pack(Mat A,Vec x,Vec y)
1489: {
1492: MatMultBoth_Shell_Pack(A,x,y,PETSC_FALSE);
1493: return(0);
1494: }
1498: PetscErrorCode MatMultTranspose_Shell_Pack(Mat A,Vec x,Vec y)
1499: {
1500: struct MatPack *mpack;
1501: struct DMCompositeLink *xnext,*ynext;
1502: struct MatPackLink *anext;
1503: PetscScalar *xarray,*yarray;
1504: PetscErrorCode ierr;
1505: Vec xglobal,yglobal;
1506: PetscMPIInt rank;
1509: MatShellGetContext(A,(void**)&mpack);
1510: MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);
1511: xnext = mpack->left->next;
1512: ynext = mpack->right->next;
1513: anext = mpack->next;
1515: while (xnext) {
1516: if (xnext->type == DMCOMPOSITE_ARRAY) {
1517: if (rank == ynext->rank) {
1518: VecGetArray(x,&xarray);
1519: VecGetArray(y,&yarray);
1520: PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));
1521: VecRestoreArray(x,&xarray);
1522: VecRestoreArray(y,&yarray);
1523: }
1524: } else if (xnext->type == DMCOMPOSITE_DM) {
1525: VecGetArray(x,&xarray);
1526: VecGetArray(y,&yarray);
1527: DMGetGlobalVector(xnext->dm,&xglobal);
1528: DMGetGlobalVector(ynext->dm,&yglobal);
1529: VecPlaceArray(xglobal,xarray+xnext->rstart);
1530: VecPlaceArray(yglobal,yarray+ynext->rstart);
1531: MatMultTranspose(anext->A,xglobal,yglobal);
1532: VecRestoreArray(x,&xarray);
1533: VecRestoreArray(y,&yarray);
1534: VecResetArray(xglobal);
1535: VecResetArray(yglobal);
1536: DMRestoreGlobalVector(xnext->dm,&xglobal);
1537: DMRestoreGlobalVector(ynext->dm,&yglobal);
1538: anext = anext->next;
1539: } else {
1540: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1541: }
1542: xnext = xnext->next;
1543: ynext = ynext->next;
1544: }
1545: return(0);
1546: }
1550: PetscErrorCode MatDestroy_Shell_Pack(Mat A)
1551: {
1552: struct MatPack *mpack;
1553: struct MatPackLink *anext,*oldanext;
1554: PetscErrorCode ierr;
1557: MatShellGetContext(A,(void**)&mpack);
1558: anext = mpack->next;
1560: while (anext) {
1561: MatDestroy(anext->A);
1562: oldanext = anext;
1563: anext = anext->next;
1564: PetscFree(oldanext);
1565: }
1566: PetscFree(mpack);
1567: PetscObjectChangeTypeName((PetscObject)A,0);
1568: return(0);
1569: }
1573: /*@C
1574: DMCompositeGetInterpolation - GetInterpolations a DMComposite by refining all of its DAs
1576: Collective on DMComposite
1578: Input Parameters:
1579: + coarse - coarse grid packer
1580: - fine - fine grid packer
1582: Output Parameter:
1583: + A - interpolation matrix
1584: - v - scaling vector
1585:
1586: Level: advanced
1588: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1589: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1590: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeScatter(),DMCompositeGetEntries()
1592: @*/
1593: PetscErrorCode DMCompositeGetInterpolation(DMComposite coarse,DMComposite fine,Mat *A,Vec *v)
1594: {
1595: PetscErrorCode ierr;
1596: PetscInt m,n,M,N;
1597: struct DMCompositeLink *nextc;
1598: struct DMCompositeLink *nextf;
1599: struct MatPackLink *nextmat,*pnextmat = 0;
1600: struct MatPack *mpack;
1601: Vec gcoarse,gfine;
1606: nextc = coarse->next;
1607: nextf = fine->next;
1608: /* use global vectors only for determining matrix layout */
1609: DMCompositeCreateGlobalVector(coarse,&gcoarse);
1610: DMCompositeCreateGlobalVector(fine,&gfine);
1611: VecGetLocalSize(gcoarse,&n);
1612: VecGetLocalSize(gfine,&m);
1613: VecGetSize(gcoarse,&N);
1614: VecGetSize(gfine,&M);
1615: VecDestroy(gcoarse);
1616: VecDestroy(gfine);
1618: PetscNew(struct MatPack,&mpack);
1619: mpack->right = coarse;
1620: mpack->left = fine;
1621: MatCreate(((PetscObject)fine)->comm,A);
1622: MatSetSizes(*A,m,n,M,N);
1623: MatSetType(*A,MATSHELL);
1624: MatShellSetContext(*A,mpack);
1625: MatShellSetOperation(*A,MATOP_MULT,(void(*)(void))MatMult_Shell_Pack);
1626: MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,(void(*)(void))MatMultTranspose_Shell_Pack);
1627: MatShellSetOperation(*A,MATOP_MULT_ADD,(void(*)(void))MatMultAdd_Shell_Pack);
1628: MatShellSetOperation(*A,MATOP_DESTROY,(void(*)(void))MatDestroy_Shell_Pack);
1630: /* loop over packed objects, handling one at at time */
1631: while (nextc) {
1632: if (nextc->type != nextf->type) SETERRQ(PETSC_ERR_ARG_INCOMP,"Two DMComposite have different layout");
1634: if (nextc->type == DMCOMPOSITE_ARRAY) {
1635: ;
1636: } else if (nextc->type == DMCOMPOSITE_DM) {
1637: PetscNew(struct MatPackLink,&nextmat);
1638: nextmat->next = 0;
1639: if (pnextmat) {
1640: pnextmat->next = nextmat;
1641: pnextmat = nextmat;
1642: } else {
1643: pnextmat = nextmat;
1644: mpack->next = nextmat;
1645: }
1646: DMGetInterpolation(nextc->dm,nextf->dm,&nextmat->A,PETSC_NULL);
1647: } else {
1648: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1649: }
1650: nextc = nextc->next;
1651: nextf = nextf->next;
1652: }
1653: return(0);
1654: }
1658: /*@C
1659: DMCompositeGetMatrix - Creates a matrix with the correct parallel layout and nonzero structure required for
1660: computing the Jacobian on a function defined using the stencils set in the DA's and coupling in the array variables
1662: Collective on DA
1664: Input Parameter:
1665: + packer - the distributed array
1666: - mtype - Supported types are MATSEQAIJ, MATMPIAIJ
1668: Output Parameters:
1669: . J - matrix with the correct nonzero structure
1670: (obviously without the correct Jacobian values)
1672: Level: advanced
1674: Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
1675: do not need to do it yourself.
1678: .seealso DAGetMatrix(), DMCompositeCreate()
1680: @*/
1681: PetscErrorCode DMCompositeGetMatrix(DMComposite packer, const MatType mtype,Mat *J)
1682: {
1683: PetscErrorCode ierr;
1684: struct DMCompositeLink *next = packer->next;
1685: PetscInt m,*dnz,*onz,i,j,mA;
1686: Mat Atmp;
1687: PetscMPIInt rank;
1688: PetscScalar zero = 0.0;
1689: PetscTruth dense = PETSC_FALSE;
1694: /* use global vector to determine layout needed for matrix */
1695: m = packer->n;
1696: MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
1697: MatCreate(((PetscObject)packer)->comm,J);
1698: MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE);
1699: MatSetType(*J,MATAIJ);
1701: /*
1702: Extremely inefficient but will compute entire Jacobian for testing
1703: */
1704: PetscOptionsGetTruth(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);
1705: if (dense) {
1706: PetscInt rstart,rend,*indices;
1707: PetscScalar *values;
1709: mA = packer->N;
1710: MatMPIAIJSetPreallocation(*J,mA,PETSC_NULL,mA-m,PETSC_NULL);
1711: MatSeqAIJSetPreallocation(*J,mA,PETSC_NULL);
1713: MatGetOwnershipRange(*J,&rstart,&rend);
1714: PetscMalloc2(mA,PetscScalar,&values,mA,PetscInt,&indices);
1715: PetscMemzero(values,mA*sizeof(PetscScalar));
1716: for (i=0; i<mA; i++) indices[i] = i;
1717: for (i=rstart; i<rend; i++) {
1718: MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES);
1719: }
1720: PetscFree2(values,indices);
1721: MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
1722: MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
1723: return(0);
1724: }
1726: MatPreallocateInitialize(((PetscObject)packer)->comm,m,m,dnz,onz);
1727: /* loop over packed objects, handling one at at time */
1728: next = packer->next;
1729: while (next) {
1730: if (next->type == DMCOMPOSITE_ARRAY) {
1731: if (rank == next->rank) { /* zero the "little" block */
1732: for (j=packer->rstart+next->rstart; j<packer->rstart+next->rstart+next->n; j++) {
1733: for (i=packer->rstart+next->rstart; i<packer->rstart+next->rstart+next->n; i++) {
1734: MatPreallocateSet(j,1,&i,dnz,onz);
1735: }
1736: }
1737: }
1738: } else if (next->type == DMCOMPOSITE_DM) {
1739: PetscInt nc,rstart,*ccols,maxnc;
1740: const PetscInt *cols,*rstarts;
1741: PetscMPIInt proc;
1743: DMGetMatrix(next->dm,mtype,&Atmp);
1744: MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);
1745: MatGetOwnershipRanges(Atmp,&rstarts);
1746: MatGetLocalSize(Atmp,&mA,PETSC_NULL);
1748: maxnc = 0;
1749: for (i=0; i<mA; i++) {
1750: MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);
1751: MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);
1752: maxnc = PetscMax(nc,maxnc);
1753: }
1754: PetscMalloc(maxnc*sizeof(PetscInt),&ccols);
1755: for (i=0; i<mA; i++) {
1756: MatGetRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);
1757: /* remap the columns taking into how much they are shifted on each process */
1758: for (j=0; j<nc; j++) {
1759: proc = 0;
1760: while (cols[j] >= rstarts[proc+1]) proc++;
1761: ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
1762: }
1763: MatPreallocateSet(packer->rstart+next->rstart+i,nc,ccols,dnz,onz);
1764: MatRestoreRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);
1765: }
1766: PetscFree(ccols);
1767: MatDestroy(Atmp);
1768: } else {
1769: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1770: }
1771: next = next->next;
1772: }
1773: if (packer->FormCoupleLocations) {
1774: (*packer->FormCoupleLocations)(packer,PETSC_NULL,dnz,onz,__rstart,__nrows,__start,__end);
1775: }
1776: MatMPIAIJSetPreallocation(*J,0,dnz,0,onz);
1777: MatSeqAIJSetPreallocation(*J,0,dnz);
1778: MatPreallocateFinalize(dnz,onz);
1780: next = packer->next;
1781: while (next) {
1782: if (next->type == DMCOMPOSITE_ARRAY) {
1783: if (rank == next->rank) {
1784: for (j=packer->rstart+next->rstart; j<packer->rstart+next->rstart+next->n; j++) {
1785: for (i=packer->rstart+next->rstart; i<packer->rstart+next->rstart+next->n; i++) {
1786: MatSetValues(*J,1,&j,1,&i,&zero,INSERT_VALUES);
1787: }
1788: }
1789: }
1790: } else if (next->type == DMCOMPOSITE_DM) {
1791: PetscInt nc,rstart,row,maxnc,*ccols;
1792: const PetscInt *cols,*rstarts;
1793: const PetscScalar *values;
1794: PetscMPIInt proc;
1796: DMGetMatrix(next->dm,mtype,&Atmp);
1797: MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);
1798: MatGetOwnershipRanges(Atmp,&rstarts);
1799: MatGetLocalSize(Atmp,&mA,PETSC_NULL);
1800: maxnc = 0;
1801: for (i=0; i<mA; i++) {
1802: MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);
1803: MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);
1804: maxnc = PetscMax(nc,maxnc);
1805: }
1806: PetscMalloc(maxnc*sizeof(PetscInt),&ccols);
1807: for (i=0; i<mA; i++) {
1808: MatGetRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);
1809: for (j=0; j<nc; j++) {
1810: proc = 0;
1811: while (cols[j] >= rstarts[proc+1]) proc++;
1812: ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
1813: }
1814: row = packer->rstart+next->rstart+i;
1815: MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES);
1816: MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);
1817: }
1818: PetscFree(ccols);
1819: MatDestroy(Atmp);
1820: } else {
1821: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1822: }
1823: next = next->next;
1824: }
1825: if (packer->FormCoupleLocations) {
1826: PetscInt __rstart;
1827: MatGetOwnershipRange(*J,&__rstart,PETSC_NULL);
1828: (*packer->FormCoupleLocations)(packer,*J,PETSC_NULL,PETSC_NULL,__rstart,0,0,0);
1829: }
1830: MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);
1831: MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);
1832: return(0);
1833: }
1837: /*@
1838: DMCompositeGetColoring - Gets the coloring required for computing the Jacobian via
1839: finite differences on a function defined using a DMComposite "grid"
1841: Collective on DA
1843: Input Parameter:
1844: + dmcomposite - the DMComposite object
1845: . ctype - IS_COLORING_GLOBAL or IS_COLORING_GHOSTED
1846: - mtype - MATAIJ or MATBAIJ
1848: Output Parameters:
1849: . coloring - matrix coloring for use in computing Jacobians (or PETSC_NULL if not needed)
1851: Level: advanced
1853: Notes: This colors each diagonal block (associated with a single DM) with a different set of colors;
1854: this it will compute the diagonal blocks of the Jacobian correctly. The off diagonal blocks are
1855: not computed, hence the Jacobian computed is not the entire Jacobian. If -dmcomposite_dense_jacobian
1856: is used then each column of the Jacobian is given a different color so the full Jacobian is computed
1857: correctly.
1859: Notes: These compute the graph coloring of the graph of A^{T}A. The coloring used
1860: for efficient (parallel or thread based) triangular solves etc is NOT yet
1861: available.
1864: .seealso ISColoringView(), ISColoringGetIS(), MatFDColoringCreate(), ISColoringType, ISColoring, DAGetColoring()
1866: @*/
1867: PetscErrorCode DMCompositeGetColoring(DMComposite dmcomposite,ISColoringType ctype,const MatType mtype,ISColoring *coloring)
1868: {
1869: PetscErrorCode ierr;
1870: PetscInt n,i,cnt;
1871: ISColoringValue *colors;
1872: PetscTruth dense = PETSC_FALSE;
1873: ISColoringValue maxcol = 0;
1877: if (ctype == IS_COLORING_GHOSTED) {
1878: SETERRQ(PETSC_ERR_SUP,"Currently you must use -dmmg_iscoloring_type global" );
1879: } else if (ctype == IS_COLORING_GLOBAL) {
1880: n = dmcomposite->n;
1881: } else SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Unknown ISColoringType");
1882: PetscMalloc(n*sizeof(ISColoringValue),&colors); /* freed in ISColoringDestroy() */
1884: PetscOptionsGetTruth(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);
1885: if (dense) {
1886: for (i=0; i<n; i++) {
1887: colors[i] = (ISColoringValue)(dmcomposite->rstart + i);
1888: }
1889: maxcol = dmcomposite->N;
1890: } else {
1891: struct DMCompositeLink *next = dmcomposite->next;
1892: PetscMPIInt rank;
1893:
1894: MPI_Comm_rank(((PetscObject)dmcomposite)->comm,&rank);
1895: cnt = 0;
1896: while (next) {
1897: if (next->type == DMCOMPOSITE_ARRAY) {
1898: if (rank == next->rank) { /* each column gets is own color */
1899: for (i=dmcomposite->rstart+next->rstart; i<dmcomposite->rstart+next->rstart+next->n; i++) {
1900: colors[cnt++] = maxcol++;
1901: }
1902: }
1903: MPI_Bcast(&maxcol,1,MPIU_COLORING_VALUE,next->rank,((PetscObject)dmcomposite)->comm);
1904: } else if (next->type == DMCOMPOSITE_DM) {
1905: ISColoring lcoloring;
1907: DMGetColoring(next->dm,IS_COLORING_GLOBAL,mtype,&lcoloring);
1908: for (i=0; i<lcoloring->N; i++) {
1909: colors[cnt++] = maxcol + lcoloring->colors[i];
1910: }
1911: maxcol += lcoloring->n;
1912: ISColoringDestroy(lcoloring);
1913: } else {
1914: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1915: }
1916: next = next->next;
1917: }
1918: }
1919: ISColoringCreate(((PetscObject)dmcomposite)->comm,maxcol,n,colors,coloring);
1920: return(0);
1921: }
1925: /*@C
1926: DMCompositeGlobalToLocalBegin - begin update of single local vector from global vector
1928: Collective on DMComposite
1930: Input Parameter:
1931: + packer - the packer object
1932: . gvec - the global vector
1933: - lvec - single local vector
1934:
1935: Level: advanced
1937: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1938: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1939: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
1941: @*/
1942: PetscErrorCode DMCompositeGlobalToLocalBegin(DMComposite packer,Vec gvec,InsertMode mode,Vec lvec)
1943: {
1944: PetscErrorCode ierr;
1945: struct DMCompositeLink *next;
1946: PetscInt cnt = 3;
1947: PetscMPIInt rank;
1948: PetscScalar *garray,*larray;
1953: next = packer->next;
1954: if (!packer->setup) {
1955: DMCompositeSetUp(packer);
1956: }
1957: MPI_Comm_rank(((PetscObject)packer)->comm,&rank);
1958: VecGetArray(gvec,&garray);
1959: VecGetArray(lvec,&larray);
1961: /* loop over packed objects, handling one at at time */
1962: while (next) {
1963: if (next->type == DMCOMPOSITE_ARRAY) {
1964: if (rank == next->rank) {
1965: PetscMemcpy(larray,garray,next->n*sizeof(PetscScalar));
1966: garray += next->n;
1967: }
1968: /* does not handle ADD_VALUES */
1969: MPI_Bcast(larray,next->n,MPIU_SCALAR,next->rank,((PetscObject)packer)->comm);
1970: larray += next->n;
1971: } else if (next->type == DMCOMPOSITE_DM) {
1972: Vec local,global;
1973: PetscInt N;
1975: DMGetGlobalVector(next->dm,&global);
1976: VecGetLocalSize(global,&N);
1977: VecPlaceArray(global,garray);
1978: DMGetLocalVector(next->dm,&local);
1979: VecPlaceArray(local,larray);
1980: DMGlobalToLocalBegin(next->dm,global,mode,local);
1981: DMGlobalToLocalEnd(next->dm,global,mode,local);
1982: VecResetArray(global);
1983: VecResetArray(local);
1984: DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr)
1985: DMRestoreGlobalVector(next->dm,&local);
1986: larray += next->n;
1987: } else {
1988: SETERRQ(PETSC_ERR_SUP,"Cannot handle that object type yet");
1989: }
1990: cnt++;
1991: next = next->next;
1992: }
1994: VecRestoreArray(gvec,PETSC_NULL);
1995: VecRestoreArray(lvec,PETSC_NULL);
1996: return(0);
1997: }
2001: /*@C
2002: DMCompositeGlobalToLocalEnd - All communication is handled in the Begin phase
2004: Collective on DMComposite
2006: Input Parameter:
2007: + packer - the packer object
2008: . gvec - the global vector
2009: - lvec - single local vector
2010:
2011: Level: advanced
2013: .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
2014: DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
2015: DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
2017: @*/
2018: PetscErrorCode DMCompositeGlobalToLocalEnd(DMComposite packer,Vec gvec,InsertMode mode,Vec lvec)
2019: {
2021: return(0);
2022: }