Actual source code: pbvec.c
1: #define PETSCVEC_DLL
2: /*
3: This file contains routines for Parallel vector operations.
4: */
5: #include ../src/vec/vec/impls/mpi/pvecimpl.h
7: #if 0
10: static PetscErrorCode VecPublish_MPI(PetscObject obj)
11: {
13: return(0);
14: }
15: #endif
19: PetscErrorCode VecDot_MPI(Vec xin,Vec yin,PetscScalar *z)
20: {
21: PetscScalar sum,work;
25: VecDot_Seq(xin,yin,&work);
26: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);
27: *z = sum;
28: return(0);
29: }
33: PetscErrorCode VecTDot_MPI(Vec xin,Vec yin,PetscScalar *z)
34: {
35: PetscScalar sum,work;
39: VecTDot_Seq(xin,yin,&work);
40: MPI_Allreduce(&work,&sum,1,MPIU_SCALAR,MPIU_SUM,((PetscObject)xin)->comm);
41: *z = sum;
42: return(0);
43: }
47: PetscErrorCode VecSetOption_MPI(Vec v,VecOption op,PetscTruth flag)
48: {
50: if (op == VEC_IGNORE_OFF_PROC_ENTRIES) {
51: v->stash.donotstash = flag;
52: } else if (op == VEC_IGNORE_NEGATIVE_INDICES) {
53: v->stash.ignorenegidx = flag;
54: }
55: return(0);
56: }
57:
58: EXTERN PetscErrorCode VecDuplicate_MPI(Vec,Vec *);
60: EXTERN PetscErrorCode VecView_MPI_Draw(Vec,PetscViewer);
65: PetscErrorCode VecPlaceArray_MPI(Vec vin,const PetscScalar *a)
66: {
68: Vec_MPI *v = (Vec_MPI *)vin->data;
71: if (v->unplacedarray) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"VecPlaceArray() was already called on this vector, without a call to VecResetArray()");
72: v->unplacedarray = v->array; /* save previous array so reset can bring it back */
73: v->array = (PetscScalar *)a;
74: if (v->localrep) {
75: VecPlaceArray(v->localrep,a);
76: }
77: return(0);
78: }
82: PetscErrorCode VecResetArray_MPI(Vec vin)
83: {
84: Vec_MPI *v = (Vec_MPI *)vin->data;
88: v->array = v->unplacedarray;
89: v->unplacedarray = 0;
90: if (v->localrep) {
91: VecResetArray(v->localrep);
92: }
93: return(0);
94: }
96: EXTERN PetscErrorCode VecLoad_Binary(PetscViewer, const VecType, Vec*);
97: EXTERN PetscErrorCode VecGetValues_MPI(Vec,PetscInt,const PetscInt [],PetscScalar []);
99: static struct _VecOps DvOps = { VecDuplicate_MPI, /* 1 */
100: VecDuplicateVecs_Default,
101: VecDestroyVecs_Default,
102: VecDot_MPI,
103: VecMDot_MPI,
104: VecNorm_MPI,
105: VecTDot_MPI,
106: VecMTDot_MPI,
107: VecScale_Seq,
108: VecCopy_Seq, /* 10 */
109: VecSet_Seq,
110: VecSwap_Seq,
111: VecAXPY_Seq,
112: VecAXPBY_Seq,
113: VecMAXPY_Seq,
114: VecAYPX_Seq,
115: VecWAXPY_Seq,
116: VecAXPBYPCZ_Seq,
117: VecPointwiseMult_Seq,
118: VecPointwiseDivide_Seq,
119: VecSetValues_MPI, /* 20 */
120: VecAssemblyBegin_MPI,
121: VecAssemblyEnd_MPI,
122: VecGetArray_Seq,
123: VecGetSize_MPI,
124: VecGetSize_Seq,
125: VecRestoreArray_Seq,
126: VecMax_MPI,
127: VecMin_MPI,
128: VecSetRandom_Seq,
129: VecSetOption_MPI,
130: VecSetValuesBlocked_MPI,
131: VecDestroy_MPI,
132: VecView_MPI,
133: VecPlaceArray_MPI,
134: VecReplaceArray_Seq,
135: VecDot_Seq,
136: VecTDot_Seq,
137: VecNorm_Seq,
138: VecMDot_Seq,
139: VecMTDot_Seq,
140: VecLoadIntoVector_Default,
141: 0, /* VecLoadIntoVectorNative */
142: VecReciprocal_Default,
143: 0, /* VecViewNative... */
144: VecConjugate_Seq,
145: 0,
146: 0,
147: VecResetArray_MPI,
148: 0,
149: VecMaxPointwiseDivide_Seq,
150: VecLoad_Binary,
151: VecPointwiseMax_Seq,
152: VecPointwiseMaxAbs_Seq,
153: VecPointwiseMin_Seq,
154: VecGetValues_MPI};
158: /*
159: VecCreate_MPI_Private - Basic create routine called by VecCreate_MPI() (i.e. VecCreateMPI()),
160: VecCreateMPIWithArray(), VecCreate_Shared() (i.e. VecCreateShared()), VecCreateGhost(),
161: VecDuplicate_MPI(), VecCreateGhostWithArray(), VecDuplicate_MPI(), and VecDuplicate_Shared()
163: If alloc is true and array is PETSC_NULL then this routine allocates the space, otherwise
164: no space is allocated.
165: */
166: PetscErrorCode VecCreate_MPI_Private(Vec v,PetscTruth alloc,PetscInt nghost,const PetscScalar array[])
167: {
168: Vec_MPI *s;
173: PetscNewLog(v,Vec_MPI,&s);
174: v->data = (void*)s;
175: PetscMemcpy(v->ops,&DvOps,sizeof(DvOps));
176: s->nghost = nghost;
177: v->mapping = 0;
178: v->bmapping = 0;
179: v->petscnative = PETSC_TRUE;
181: if (v->map->bs == -1) v->map->bs = 1;
182: PetscLayoutSetUp(v->map);
183: s->array = (PetscScalar *)array;
184: s->array_allocated = 0;
185: if (alloc && !array) {
186: PetscInt n = v->map->n+nghost;
187: PetscMalloc(n*sizeof(PetscScalar),&s->array);
188: PetscLogObjectMemory(v,n*sizeof(PetscScalar));
189: PetscMemzero(s->array,v->map->n*sizeof(PetscScalar));
190: s->array_allocated = s->array;
191: }
193: /* By default parallel vectors do not have local representation */
194: s->localrep = 0;
195: s->localupdate = 0;
197: v->stash.insertmode = NOT_SET_VALUES;
198: /* create the stashes. The block-size for bstash is set later when
199: VecSetValuesBlocked is called.
200: */
201: VecStashCreate_Private(((PetscObject)v)->comm,1,&v->stash);
202: VecStashCreate_Private(((PetscObject)v)->comm,v->map->bs,&v->bstash);
203:
204: #if defined(PETSC_HAVE_MATLAB_ENGINE)
205: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEnginePut_C","VecMatlabEnginePut_Default",VecMatlabEnginePut_Default);
206: PetscObjectComposeFunctionDynamic((PetscObject)v,"PetscMatlabEngineGet_C","VecMatlabEngineGet_Default",VecMatlabEngineGet_Default);
207: #endif
208: PetscObjectChangeTypeName((PetscObject)v,VECMPI);
209: PetscPublishAll(v);
210: return(0);
211: }
213: /*MC
214: VECMPI - VECMPI = "mpi" - The basic parallel vector
216: Options Database Keys:
217: . -vec_type mpi - sets the vector type to VECMPI during a call to VecSetFromOptions()
219: Level: beginner
221: .seealso: VecCreate(), VecSetType(), VecSetFromOptions(), VecCreateMpiWithArray(), VECMPI, VecType, VecCreateMPI(), VecCreateMpi()
222: M*/
227: PetscErrorCode VecCreate_MPI(Vec vv)
228: {
232: VecCreate_MPI_Private(vv,PETSC_TRUE,0,0);
233: return(0);
234: }
239: /*@C
240: VecCreateMPIWithArray - Creates a parallel, array-style vector,
241: where the user provides the array space to store the vector values.
243: Collective on MPI_Comm
245: Input Parameters:
246: + comm - the MPI communicator to use
247: . n - local vector length, cannot be PETSC_DECIDE
248: . N - global vector length (or PETSC_DECIDE to have calculated)
249: - array - the user provided array to store the vector values
251: Output Parameter:
252: . vv - the vector
253:
254: Notes:
255: Use VecDuplicate() or VecDuplicateVecs() to form additional vectors of the
256: same type as an existing vector.
258: If the user-provided array is PETSC_NULL, then VecPlaceArray() can be used
259: at a later stage to SET the array for storing the vector values.
261: PETSc does NOT free the array when the vector is destroyed via VecDestroy().
262: The user should not free the array until the vector is destroyed.
264: Level: intermediate
266: Concepts: vectors^creating with array
268: .seealso: VecCreateSeqWithArray(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateGhost(),
269: VecCreateMPI(), VecCreateGhostWithArray(), VecPlaceArray()
271: @*/
272: PetscErrorCode VecCreateMPIWithArray(MPI_Comm comm,PetscInt n,PetscInt N,const PetscScalar array[],Vec *vv)
273: {
277: if (n == PETSC_DECIDE) {
278: SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size of vector");
279: }
280: PetscSplitOwnership(comm,&n,&N);
281: VecCreate(comm,vv);
282: VecSetSizes(*vv,n,N);
283: VecCreate_MPI_Private(*vv,PETSC_FALSE,0,array);
284: return(0);
285: }
289: /*
290: This is used in VecGhostGetLocalForm and VecGhostRestoreLocalForm to ensure
291: that the state is updated if either vector has changed since the last time
292: one of these functions was called. It could apply to any PetscObject, but
293: VecGhost is quite different from other objects in that two separate vectors
294: look at the same memory.
296: In principle, we could only propagate state to the local vector on
297: GetLocalForm and to the global vector on RestoreLocalForm, but this version is
298: more conservative (i.e. robust against misuse) and simpler.
300: Note that this function is correct and changes nothing if both arguments are the
301: same, which is the case in serial.
302: */
303: static PetscErrorCode VecGhostStateSync_Private(Vec g,Vec l)
304: {
306: PetscInt gstate,lstate;
309: PetscObjectStateQuery((PetscObject)g,&gstate);
310: PetscObjectStateQuery((PetscObject)l,&lstate);
311: PetscObjectSetState((PetscObject)g,PetscMax(gstate,lstate));
312: PetscObjectSetState((PetscObject)l,PetscMax(gstate,lstate));
313: return(0);
314: }
318: /*@
319: VecGhostGetLocalForm - Obtains the local ghosted representation of
320: a parallel vector created with VecCreateGhost().
322: Not Collective
324: Input Parameter:
325: . g - the global vector. Vector must be have been obtained with either
326: VecCreateGhost(), VecCreateGhostWithArray() or VecCreateSeq().
328: Output Parameter:
329: . l - the local (ghosted) representation
331: Notes:
332: This routine does not actually update the ghost values, but rather it
333: returns a sequential vector that includes the locations for the ghost
334: values and their current values. The returned vector and the original
335: vector passed in share the same array that contains the actual vector data.
337: One should call VecGhostRestoreLocalForm() or VecDestroy() once one is
338: finished using the object.
340: Level: advanced
342: Concepts: vectors^ghost point access
344: .seealso: VecCreateGhost(), VecGhostRestoreLocalForm(), VecCreateGhostWithArray()
346: @*/
347: PetscErrorCode VecGhostGetLocalForm(Vec g,Vec *l)
348: {
350: PetscTruth isseq,ismpi;
356: PetscTypeCompare((PetscObject)g,VECSEQ,&isseq);
357: PetscTypeCompare((PetscObject)g,VECMPI,&ismpi);
358: if (ismpi) {
359: Vec_MPI *v = (Vec_MPI*)g->data;
360: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
361: *l = v->localrep;
362: } else if (isseq) {
363: *l = g;
364: } else {
365: SETERRQ1(PETSC_ERR_ARG_WRONG,"Vector type %s does not have local representation",((PetscObject)g)->type_name);
366: }
367: VecGhostStateSync_Private(g,*l);
368: PetscObjectReference((PetscObject)*l);
369: return(0);
370: }
374: /*@
375: VecGhostRestoreLocalForm - Restores the local ghosted representation of
376: a parallel vector obtained with VecGhostGetLocalForm().
378: Not Collective
380: Input Parameter:
381: + g - the global vector
382: - l - the local (ghosted) representation
384: Notes:
385: This routine does not actually update the ghost values, but rather it
386: returns a sequential vector that includes the locations for the ghost values
387: and their current values.
389: Level: advanced
391: .seealso: VecCreateGhost(), VecGhostGetLocalForm(), VecCreateGhostWithArray()
392: @*/
393: PetscErrorCode VecGhostRestoreLocalForm(Vec g,Vec *l)
394: {
398: VecGhostStateSync_Private(g,*l);
399: PetscObjectDereference((PetscObject)*l);
400: return(0);
401: }
405: /*@
406: VecGhostUpdateBegin - Begins the vector scatter to update the vector from
407: local representation to global or global representation to local.
409: Collective on Vec
411: Input Parameters:
412: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
413: . insertmode - one of ADD_VALUES or INSERT_VALUES
414: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
416: Notes:
417: Use the following to update the ghost regions with correct values from the owning process
418: .vb
419: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
420: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
421: .ve
423: Use the following to accumulate the ghost region values onto the owning processors
424: .vb
425: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
426: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
427: .ve
429: To accumulate the ghost region values onto the owning processors and then update
430: the ghost regions correctly, call the later followed by the former, i.e.,
431: .vb
432: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
433: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
434: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
435: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
436: .ve
438: Level: advanced
440: .seealso: VecCreateGhost(), VecGhostUpdateEnd(), VecGhostGetLocalForm(),
441: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
443: @*/
444: PetscErrorCode VecGhostUpdateBegin(Vec g,InsertMode insertmode,ScatterMode scattermode)
445: {
446: Vec_MPI *v;
452: v = (Vec_MPI*)g->data;
453: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
454: if (!v->localupdate) return(0);
455:
456: if (scattermode == SCATTER_REVERSE) {
457: VecScatterBegin(v->localupdate,v->localrep,g,insertmode,scattermode);
458: } else {
459: VecScatterBegin(v->localupdate,g,v->localrep,insertmode,scattermode);
460: }
461: return(0);
462: }
466: /*@
467: VecGhostUpdateEnd - End the vector scatter to update the vector from
468: local representation to global or global representation to local.
470: Collective on Vec
472: Input Parameters:
473: + g - the vector (obtained with VecCreateGhost() or VecDuplicate())
474: . insertmode - one of ADD_VALUES or INSERT_VALUES
475: - scattermode - one of SCATTER_FORWARD or SCATTER_REVERSE
477: Notes:
479: Use the following to update the ghost regions with correct values from the owning process
480: .vb
481: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
482: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
483: .ve
485: Use the following to accumulate the ghost region values onto the owning processors
486: .vb
487: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
488: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
489: .ve
491: To accumulate the ghost region values onto the owning processors and then update
492: the ghost regions correctly, call the later followed by the former, i.e.,
493: .vb
494: VecGhostUpdateBegin(v,ADD_VALUES,SCATTER_REVERSE);
495: VecGhostUpdateEnd(v,ADD_VALUES,SCATTER_REVERSE);
496: VecGhostUpdateBegin(v,INSERT_VALUES,SCATTER_FORWARD);
497: VecGhostUpdateEnd(v,INSERT_VALUES,SCATTER_FORWARD);
498: .ve
500: Level: advanced
502: .seealso: VecCreateGhost(), VecGhostUpdateBegin(), VecGhostGetLocalForm(),
503: VecGhostRestoreLocalForm(),VecCreateGhostWithArray()
505: @*/
506: PetscErrorCode VecGhostUpdateEnd(Vec g,InsertMode insertmode,ScatterMode scattermode)
507: {
508: Vec_MPI *v;
514: v = (Vec_MPI*)g->data;
515: if (!v->localrep) SETERRQ(PETSC_ERR_ARG_WRONG,"Vector is not ghosted");
516: if (!v->localupdate) return(0);
518: if (scattermode == SCATTER_REVERSE) {
519: VecScatterEnd(v->localupdate,v->localrep,g,insertmode,scattermode);
520: } else {
521: VecScatterEnd(v->localupdate,g,v->localrep,insertmode,scattermode);
522: }
523: return(0);
524: }
528: /*@C
529: VecCreateGhostWithArray - Creates a parallel vector with ghost padding on each processor;
530: the caller allocates the array space.
532: Collective on MPI_Comm
534: Input Parameters:
535: + comm - the MPI communicator to use
536: . n - local vector length
537: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
538: . nghost - number of local ghost points
539: . ghosts - global indices of ghost points (or PETSC_NULL if not needed)
540: - array - the space to store the vector values (as long as n + nghost)
542: Output Parameter:
543: . vv - the global vector representation (without ghost points as part of vector)
544:
545: Notes:
546: Use VecGhostGetLocalForm() to access the local, ghosted representation
547: of the vector.
549: This also automatically sets the ISLocalToGlobalMapping() for this vector.
551: Level: advanced
553: Concepts: vectors^creating with array
554: Concepts: vectors^ghosted
556: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
557: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
558: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
560: @*/
561: PetscErrorCode VecCreateGhostWithArray(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
562: {
563: PetscErrorCode ierr;
564: Vec_MPI *w;
565: PetscScalar *larray;
566: IS from,to;
567: ISLocalToGlobalMapping ltog;
568: PetscInt rstart,i,*indices;
571: *vv = 0;
573: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
574: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
575: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
576: PetscSplitOwnership(comm,&n,&N);
577: /* Create global representation */
578: VecCreate(comm,vv);
579: VecSetSizes(*vv,n,N);
580: VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost,array);
581: w = (Vec_MPI *)(*vv)->data;
582: /* Create local representation */
583: VecGetArray(*vv,&larray);
584: VecCreateSeqWithArray(PETSC_COMM_SELF,n+nghost,larray,&w->localrep);
585: PetscLogObjectParent(*vv,w->localrep);
586: VecRestoreArray(*vv,&larray);
588: /*
589: Create scatter context for scattering (updating) ghost values
590: */
591: ISCreateGeneral(comm,nghost,ghosts,&from);
592: ISCreateStride(PETSC_COMM_SELF,nghost,n,1,&to);
593: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
594: PetscLogObjectParent(*vv,w->localupdate);
595: ISDestroy(to);
596: ISDestroy(from);
598: /* set local to global mapping for ghosted vector */
599: PetscMalloc((n+nghost)*sizeof(PetscInt),&indices);
600: VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
601: for (i=0; i<n; i++) {
602: indices[i] = rstart + i;
603: }
604: for (i=0; i<nghost; i++) {
605: indices[n+i] = ghosts[i];
606: }
607: ISLocalToGlobalMappingCreate(comm,n+nghost,indices,<og);
608: PetscFree(indices);
609: VecSetLocalToGlobalMapping(*vv,ltog);
610: ISLocalToGlobalMappingDestroy(ltog);
611: PetscFree(indices);
612: return(0);
613: }
617: /*@
618: VecCreateGhost - Creates a parallel vector with ghost padding on each processor.
620: Collective on MPI_Comm
622: Input Parameters:
623: + comm - the MPI communicator to use
624: . n - local vector length
625: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
626: . nghost - number of local ghost points
627: - ghosts - global indices of ghost points
629: Output Parameter:
630: . vv - the global vector representation (without ghost points as part of vector)
631:
632: Notes:
633: Use VecGhostGetLocalForm() to access the local, ghosted representation
634: of the vector.
636: This also automatically sets the ISLocalToGlobalMapping() for this vector.
638: Level: advanced
640: Concepts: vectors^ghosted
642: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
643: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(), VecGhostUpdateBegin(),
644: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecGhostUpdateEnd(),
645: VecCreateGhostBlock(), VecCreateGhostBlockWithArray()
647: @*/
648: PetscErrorCode VecCreateGhost(MPI_Comm comm,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
649: {
653: VecCreateGhostWithArray(comm,n,N,nghost,ghosts,0,vv);
654: return(0);
655: }
659: PetscErrorCode VecDuplicate_MPI(Vec win,Vec *v)
660: {
662: Vec_MPI *vw,*w = (Vec_MPI *)win->data;
663: PetscScalar *array;
666: VecCreate(((PetscObject)win)->comm,v);
668: /* use the map that exists aleady in win */
669: PetscLayoutDestroy((*v)->map);
670: (*v)->map = win->map;
671: win->map->refcnt++;
673: VecCreate_MPI_Private(*v,PETSC_TRUE,w->nghost,0);
674: vw = (Vec_MPI *)(*v)->data;
675: PetscMemcpy((*v)->ops,win->ops,sizeof(struct _VecOps));
677: /* save local representation of the parallel vector (and scatter) if it exists */
678: if (w->localrep) {
679: VecGetArray(*v,&array);
680: VecCreateSeqWithArray(PETSC_COMM_SELF,win->map->n+w->nghost,array,&vw->localrep);
681: PetscMemcpy(vw->localrep->ops,w->localrep->ops,sizeof(struct _VecOps));
682: VecRestoreArray(*v,&array);
683: PetscLogObjectParent(*v,vw->localrep);
684: vw->localupdate = w->localupdate;
685: if (vw->localupdate) {
686: PetscObjectReference((PetscObject)vw->localupdate);
687: }
688: }
690: /* New vector should inherit stashing property of parent */
691: (*v)->stash.donotstash = win->stash.donotstash;
692: (*v)->stash.ignorenegidx = win->stash.ignorenegidx;
693:
694: PetscOListDuplicate(((PetscObject)win)->olist,&((PetscObject)(*v))->olist);
695: PetscFListDuplicate(((PetscObject)win)->qlist,&((PetscObject)(*v))->qlist);
696: if (win->mapping) {
697: PetscObjectReference((PetscObject)win->mapping);
698: (*v)->mapping = win->mapping;
699: }
700: if (win->bmapping) {
701: PetscObjectReference((PetscObject)win->bmapping);
702: (*v)->bmapping = win->bmapping;
703: }
704: (*v)->map->bs = win->map->bs;
705: (*v)->bstash.bs = win->bstash.bs;
707: return(0);
708: }
710: /* ------------------------------------------------------------------------------------------*/
713: /*@C
714: VecCreateGhostBlockWithArray - Creates a parallel vector with ghost padding on each processor;
715: the caller allocates the array space. Indices in the ghost region are based on blocks.
717: Collective on MPI_Comm
719: Input Parameters:
720: + comm - the MPI communicator to use
721: . bs - block size
722: . n - local vector length
723: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
724: . nghost - number of local ghost blocks
725: . ghosts - global indices of ghost blocks (or PETSC_NULL if not needed)
726: - array - the space to store the vector values (as long as n + nghost*bs)
728: Output Parameter:
729: . vv - the global vector representation (without ghost points as part of vector)
730:
731: Notes:
732: Use VecGhostGetLocalForm() to access the local, ghosted representation
733: of the vector.
735: n is the local vector size (total local size not the number of blocks) while nghost
736: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
737: portion is bs*nghost
739: Level: advanced
741: Concepts: vectors^creating ghosted
742: Concepts: vectors^creating with array
744: .seealso: VecCreate(), VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
745: VecCreateGhost(), VecCreateSeqWithArray(), VecCreateMPIWithArray(),
746: VecCreateGhostWithArray(), VecCreateGhostBlocked()
748: @*/
749: PetscErrorCode VecCreateGhostBlockWithArray(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],const PetscScalar array[],Vec *vv)
750: {
752: Vec_MPI *w;
753: PetscScalar *larray;
754: IS from,to;
755: ISLocalToGlobalMapping ltog;
756: PetscInt rstart,i,nb,*indices;
759: *vv = 0;
761: if (n == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local size");
762: if (nghost == PETSC_DECIDE) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Must set local ghost size");
763: if (nghost < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Ghost length must be >= 0");
764: if (n % bs) SETERRQ(PETSC_ERR_ARG_INCOMP,"Local size must be a multiple of block size");
765: PetscSplitOwnership(comm,&n,&N);
766: /* Create global representation */
767: VecCreate(comm,vv);
768: VecSetSizes(*vv,n,N);
769: VecCreate_MPI_Private(*vv,PETSC_TRUE,nghost*bs,array);
770: VecSetBlockSize(*vv,bs);
771: w = (Vec_MPI *)(*vv)->data;
772: /* Create local representation */
773: VecGetArray(*vv,&larray);
774: VecCreateSeqWithArray(PETSC_COMM_SELF,n+bs*nghost,larray,&w->localrep);
775: VecSetBlockSize(w->localrep,bs);
776: PetscLogObjectParent(*vv,w->localrep);
777: VecRestoreArray(*vv,&larray);
779: /*
780: Create scatter context for scattering (updating) ghost values
781: */
782: ISCreateBlock(comm,bs,nghost,ghosts,&from);
783: ISCreateStride(PETSC_COMM_SELF,bs*nghost,n,1,&to);
784: VecScatterCreate(*vv,from,w->localrep,to,&w->localupdate);
785: PetscLogObjectParent(*vv,w->localupdate);
786: ISDestroy(to);
787: ISDestroy(from);
789: /* set local to global mapping for ghosted vector */
790: nb = n/bs;
791: PetscMalloc((nb+nghost)*sizeof(PetscInt),&indices);
792: VecGetOwnershipRange(*vv,&rstart,PETSC_NULL);
793: for (i=0; i<nb; i++) {
794: indices[i] = rstart + i*bs;
795: }
796: for (i=0; i<nghost; i++) {
797: indices[nb+i] = ghosts[i];
798: }
799: ISLocalToGlobalMappingCreate(comm,nb+nghost,indices,<og);
800: PetscFree(indices);
801: VecSetLocalToGlobalMappingBlock(*vv,ltog);
802: ISLocalToGlobalMappingDestroy(ltog);
803: PetscFree(indices);
805: return(0);
806: }
810: /*@
811: VecCreateGhostBlock - Creates a parallel vector with ghost padding on each processor.
812: The indicing of the ghost points is done with blocks.
814: Collective on MPI_Comm
816: Input Parameters:
817: + comm - the MPI communicator to use
818: . bs - the block size
819: . n - local vector length
820: . N - global vector length (or PETSC_DECIDE to have calculated if n is given)
821: . nghost - number of local ghost blocks
822: - ghosts - global indices of ghost blocks
824: Output Parameter:
825: . vv - the global vector representation (without ghost points as part of vector)
826:
827: Notes:
828: Use VecGhostGetLocalForm() to access the local, ghosted representation
829: of the vector.
831: n is the local vector size (total local size not the number of blocks) while nghost
832: is the number of blocks in the ghost portion, i.e. the number of elements in the ghost
833: portion is bs*nghost
835: Level: advanced
837: Concepts: vectors^ghosted
839: .seealso: VecCreateSeq(), VecCreate(), VecDuplicate(), VecDuplicateVecs(), VecCreateMPI(),
840: VecGhostGetLocalForm(), VecGhostRestoreLocalForm(),
841: VecCreateGhostWithArray(), VecCreateMPIWithArray(), VecCreateGhostBlockWithArray()
843: @*/
844: PetscErrorCode VecCreateGhostBlock(MPI_Comm comm,PetscInt bs,PetscInt n,PetscInt N,PetscInt nghost,const PetscInt ghosts[],Vec *vv)
845: {
849: VecCreateGhostBlockWithArray(comm,bs,n,N,nghost,ghosts,0,vv);
850: return(0);
851: }
853: /*
854: These introduce a ghosted vector where the ghosting is determined by the call to
855: VecSetLocalToGlobalMapping()
856: */
860: PetscErrorCode VecSetLocalToGlobalMapping_FETI(Vec vv,ISLocalToGlobalMapping map)
861: {
863: Vec_MPI *v = (Vec_MPI *)vv->data;
866: v->nghost = map->n - vv->map->n;
868: /* we need to make longer the array space that was allocated when the vector was created */
869: PetscFree(v->array_allocated);
870: PetscMalloc(map->n*sizeof(PetscScalar),&v->array_allocated);
871: v->array = v->array_allocated;
872:
873: /* Create local representation */
874: VecCreateSeqWithArray(PETSC_COMM_SELF,map->n,v->array,&v->localrep);
875: PetscLogObjectParent(vv,v->localrep);
876: return(0);
877: }
882: PetscErrorCode VecSetValuesLocal_FETI(Vec vv,PetscInt n,const PetscInt *ix,const PetscScalar *values,InsertMode mode)
883: {
885: Vec_MPI *v = (Vec_MPI *)vv->data;
888: VecSetValues(v->localrep,n,ix,values,mode);
889: return(0);
890: }
895: PetscErrorCode VecCreate_FETI(Vec vv)
896: {
900: VecSetType(vv,VECMPI);
901:
902: /* overwrite the functions to handle setting values locally */
903: vv->ops->setlocaltoglobalmapping = VecSetLocalToGlobalMapping_FETI;
904: vv->ops->setvalueslocal = VecSetValuesLocal_FETI;
905: vv->ops->assemblybegin = 0;
906: vv->ops->assemblyend = 0;
907: vv->ops->setvaluesblocked = 0;
908: vv->ops->setvaluesblocked = 0;
910: return(0);
911: }