Actual source code: pdvec.c

  1: #define PETSCVEC_DLL
  2: /*
  3:      Code for some of the parallel vector primatives.
  4: */
 5:  #include ../src/vec/vec/impls/mpi/pvecimpl.h
  6: #if defined(PETSC_HAVE_PNETCDF)
  8: #include "pnetcdf.h"
 10: #endif

 14: PetscErrorCode VecDestroy_MPI(Vec v)
 15: {
 16:   Vec_MPI        *x = (Vec_MPI*)v->data;

 20: #if defined(PETSC_USE_LOG)
 21:   PetscLogObjectState((PetscObject)v,"Length=%D",v->map->N);
 22: #endif  
 23:   PetscFree(x->array_allocated);

 25:   /* Destroy local representation of vector if it exists */
 26:   if (x->localrep) {
 27:     VecDestroy(x->localrep);
 28:     if (x->localupdate) {VecScatterDestroy(x->localupdate);}
 29:   }
 30:   /* Destroy the stashes: note the order - so that the tags are freed properly */
 31:   VecStashDestroy_Private(&v->bstash);
 32:   VecStashDestroy_Private(&v->stash);
 33:   PetscFree(x);
 34:   return(0);
 35: }

 39: PetscErrorCode VecView_MPI_ASCII(Vec xin,PetscViewer viewer)
 40: {
 41:   PetscErrorCode    ierr;
 42:   PetscInt          i,work = xin->map->n,cnt,len,nLen;
 43:   PetscMPIInt       j,n = 0,size,rank,tag = ((PetscObject)viewer)->tag;
 44:   MPI_Status        status;
 45:   PetscScalar       *values,*xarray;
 46:   const char        *name;
 47:   PetscViewerFormat format;

 50:   VecGetArray(xin,&xarray);
 51:   /* determine maximum message to arrive */
 52:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
 53:   MPI_Reduce(&work,&len,1,MPIU_INT,MPI_MAX,0,((PetscObject)xin)->comm);
 54:   MPI_Comm_size(((PetscObject)xin)->comm,&size);

 56:   if (!rank) {
 57:     PetscMalloc((len+1)*sizeof(PetscScalar),&values);
 58:     PetscViewerGetFormat(viewer,&format);
 59:     /*
 60:         Matlab format and ASCII format are very similar except 
 61:         Matlab uses %18.16e format while ASCII uses %g
 62:     */
 63:     if (format == PETSC_VIEWER_ASCII_MATLAB) {
 64:       PetscObjectGetName((PetscObject)xin,&name);
 65:       PetscViewerASCIIPrintf(viewer,"%s = [\n",name);
 66:       for (i=0; i<xin->map->n; i++) {
 67: #if defined(PETSC_USE_COMPLEX)
 68:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
 69:           PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16ei\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
 70:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
 71:           PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16ei\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
 72:         } else {
 73:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(xarray[i]));
 74:         }
 75: #else
 76:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
 77: #endif
 78:       }
 79:       /* receive and print messages */
 80:       for (j=1; j<size; j++) {
 81:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
 82:         MPI_Get_count(&status,MPIU_SCALAR,&n);
 83:         for (i=0; i<n; i++) {
 84: #if defined(PETSC_USE_COMPLEX)
 85:           if (PetscImaginaryPart(values[i]) > 0.0) {
 86:             PetscViewerASCIIPrintf(viewer,"%18.16e + %18.16e i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
 87:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
 88:             PetscViewerASCIIPrintf(viewer,"%18.16e - %18.16e i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
 89:           } else {
 90:             PetscViewerASCIIPrintf(viewer,"%18.16e\n",PetscRealPart(values[i]));
 91:           }
 92: #else
 93:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
 94: #endif
 95:         }
 96:       }
 97:       PetscViewerASCIIPrintf(viewer,"];\n");

 99:     } else if (format == PETSC_VIEWER_ASCII_SYMMODU) {
100:       for (i=0; i<xin->map->n; i++) {
101: #if defined(PETSC_USE_COMPLEX)
102:         PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
103: #else
104:         PetscViewerASCIIPrintf(viewer,"%18.16e\n",xarray[i]);
105: #endif
106:       }
107:       /* receive and print messages */
108:       for (j=1; j<size; j++) {
109:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
110:         MPI_Get_count(&status,MPIU_SCALAR,&n);
111:         for (i=0; i<n; i++) {
112: #if defined(PETSC_USE_COMPLEX)
113:           PetscViewerASCIIPrintf(viewer,"%18.16e %18.16e\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
114: #else
115:           PetscViewerASCIIPrintf(viewer,"%18.16e\n",values[i]);
116: #endif
117:         }
118:       }
119:     } else if (format == PETSC_VIEWER_ASCII_VTK || format == PETSC_VIEWER_ASCII_VTK_CELL) {
120:       /*
121:         state 0: No header has been output
122:         state 1: Only POINT_DATA has been output
123:         state 2: Only CELL_DATA has been output
124:         state 3: Output both, POINT_DATA last
125:         state 4: Output both, CELL_DATA last
126:       */
127:       static PetscInt stateId = -1;
128:       int outputState = 0;
129:       PetscTruth hasState;
130:       int doOutput = 0;
131:       PetscInt bs, b;

133:       if (stateId < 0) {
134:         PetscObjectComposedDataRegister(&stateId);
135:       }
136:       PetscObjectComposedDataGetInt((PetscObject) viewer, stateId, outputState, hasState);
137:       if (!hasState) {
138:         outputState = 0;
139:       }
140:       PetscObjectGetName((PetscObject) xin, &name);
141:       VecGetLocalSize(xin, &nLen);
142:       n    = PetscMPIIntCast(nLen);
143:       VecGetBlockSize(xin, &bs);
144:       if (format == PETSC_VIEWER_ASCII_VTK) {
145:         if (outputState == 0) {
146:           outputState = 1;
147:           doOutput = 1;
148:         } else if (outputState == 1) {
149:           doOutput = 0;
150:         } else if (outputState == 2) {
151:           outputState = 3;
152:           doOutput = 1;
153:         } else if (outputState == 3) {
154:           doOutput = 0;
155:         } else if (outputState == 4) {
156:           SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Tried to output POINT_DATA again after intervening CELL_DATA");
157:         }
158:         if (doOutput) {
159:           PetscViewerASCIIPrintf(viewer, "POINT_DATA %d\n", xin->map->N/bs);
160:         }
161:       } else {
162:         if (outputState == 0) {
163:           outputState = 2;
164:           doOutput = 1;
165:         } else if (outputState == 1) {
166:           outputState = 4;
167:           doOutput = 1;
168:         } else if (outputState == 2) {
169:           doOutput = 0;
170:         } else if (outputState == 3) {
171:           SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Tried to output CELL_DATA again after intervening POINT_DATA");
172:         } else if (outputState == 4) {
173:           doOutput = 0;
174:         }
175:         if (doOutput) {
176:           PetscViewerASCIIPrintf(viewer, "CELL_DATA %d\n", xin->map->N/bs);
177:         }
178:       }
179:       PetscObjectComposedDataSetInt((PetscObject) viewer, stateId, outputState);
180:       if (name) {
181:         PetscViewerASCIIPrintf(viewer, "SCALARS %s double %d\n", name, bs);
182:       } else {
183:         PetscViewerASCIIPrintf(viewer, "SCALARS scalars double %d\n", bs);
184:       }
185:       PetscViewerASCIIPrintf(viewer, "LOOKUP_TABLE default\n");
186:       for (i=0; i<n/bs; i++) {
187:         for (b=0; b<bs; b++) {
188:           if (b > 0) {
189:             PetscViewerASCIIPrintf(viewer," ");
190:           }
191:           PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(xarray[i*bs+b]));
192:         }
193:         PetscViewerASCIIPrintf(viewer,"\n");
194:       }
195:       for (j=1; j<size; j++) {
196:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
197:         MPI_Get_count(&status,MPIU_SCALAR,&n);
198:         for (i=0; i<n/bs; i++) {
199:           for (b=0; b<bs; b++) {
200:             if (b > 0) {
201:               PetscViewerASCIIPrintf(viewer," ");
202:             }
203:             PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(values[i*bs+b]));
204:           }
205:           PetscViewerASCIIPrintf(viewer,"\n");
206:         }
207:       }
208:     } else if (format == PETSC_VIEWER_ASCII_VTK_COORDS) {
209:       PetscInt bs, b;

211:       VecGetLocalSize(xin, &nLen);
212:       n    = PetscMPIIntCast(nLen);
213:       VecGetBlockSize(xin, &bs);
214:       if ((bs < 1) || (bs > 3)) {
215:         SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "VTK can only handle 3D objects, but vector dimension is %d", bs);
216:       }
217:       for (i=0; i<n/bs; i++) {
218:         for (b=0; b<bs; b++) {
219:           if (b > 0) {
220:             PetscViewerASCIIPrintf(viewer," ");
221:           }
222:           PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(xarray[i*bs+b]));
223:         }
224:         for (b=bs; b<3; b++) {
225:           PetscViewerASCIIPrintf(viewer," 0.0");
226:         }
227:         PetscViewerASCIIPrintf(viewer,"\n");
228:       }
229:       for (j=1; j<size; j++) {
230:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
231:         MPI_Get_count(&status,MPIU_SCALAR,&n);
232:         for (i=0; i<n/bs; i++) {
233:           for (b=0; b<bs; b++) {
234:             if (b > 0) {
235:               PetscViewerASCIIPrintf(viewer," ");
236:             }
237:             PetscViewerASCIIPrintf(viewer,"%G",PetscRealPart(values[i*bs+b]));
238:           }
239:           for (b=bs; b<3; b++) {
240:             PetscViewerASCIIPrintf(viewer," 0.0");
241:           }
242:           PetscViewerASCIIPrintf(viewer,"\n");
243:         }
244:       }
245:     } else if (format == PETSC_VIEWER_ASCII_PCICE) {
246:       PetscInt bs, b, vertexCount = 1;

248:       VecGetLocalSize(xin, &nLen);
249:       n    = PetscMPIIntCast(nLen);
250:       VecGetBlockSize(xin, &bs);
251:       if ((bs < 1) || (bs > 3)) {
252:         SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "PCICE can only handle up to 3D objects, but vector dimension is %d", bs);
253:       }
254:       PetscViewerASCIIPrintf(viewer,"%D\n", xin->map->N/bs);
255:       for (i=0; i<n/bs; i++) {
256:         PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
257:         for (b=0; b<bs; b++) {
258:           if (b > 0) {
259:             PetscViewerASCIIPrintf(viewer," ");
260:           }
261: #if !defined(PETSC_USE_COMPLEX)
262:           PetscViewerASCIIPrintf(viewer,"% 12.5E",xarray[i*bs+b]);
263: #endif
264:         }
265:         PetscViewerASCIIPrintf(viewer,"\n");
266:       }
267:       for (j=1; j<size; j++) {
268:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
269:         MPI_Get_count(&status,MPIU_SCALAR,&n);
270:         for (i=0; i<n/bs; i++) {
271:           PetscViewerASCIIPrintf(viewer,"%7D   ", vertexCount++);
272:           for (b=0; b<bs; b++) {
273:             if (b > 0) {
274:               PetscViewerASCIIPrintf(viewer," ");
275:             }
276: #if !defined(PETSC_USE_COMPLEX)
277:             PetscViewerASCIIPrintf(viewer,"% 12.5E",values[i*bs+b]);
278: #endif
279:           }
280:           PetscViewerASCIIPrintf(viewer,"\n");
281:         }
282:       }
283:     } else if (format == PETSC_VIEWER_ASCII_PYLITH) {
284:       PetscInt bs, b, vertexCount = 1;

286:       VecGetLocalSize(xin, &nLen);
287:       n    = PetscMPIIntCast(nLen);
288:       VecGetBlockSize(xin, &bs);
289:       if (bs != 3) {
290:         SETERRQ1(PETSC_ERR_ARG_WRONGSTATE, "PyLith can only handle 3D objects, but vector dimension is %d", bs);
291:       }
292:       PetscViewerASCIIPrintf(viewer,"#\n");
293:       PetscViewerASCIIPrintf(viewer,"#  Node      X-coord           Y-coord           Z-coord\n");
294:       PetscViewerASCIIPrintf(viewer,"#\n");
295:       for (i=0; i<n/bs; i++) {
296:         PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
297:         for (b=0; b<bs; b++) {
298:           if (b > 0) {
299:             PetscViewerASCIIPrintf(viewer," ");
300:           }
301: #if !defined(PETSC_USE_COMPLEX)
302:           PetscViewerASCIIPrintf(viewer,"% 16.8E",xarray[i*bs+b]);
303: #endif
304:         }
305:         PetscViewerASCIIPrintf(viewer,"\n");
306:       }
307:       for (j=1; j<size; j++) {
308:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
309:         MPI_Get_count(&status,MPIU_SCALAR,&n);
310:         for (i=0; i<n/bs; i++) {
311:           PetscViewerASCIIPrintf(viewer,"%7D ", vertexCount++);
312:           for (b=0; b<bs; b++) {
313:             if (b > 0) {
314:               PetscViewerASCIIPrintf(viewer," ");
315:             }
316: #if !defined(PETSC_USE_COMPLEX)
317:             PetscViewerASCIIPrintf(viewer,"% 16.8E",values[i*bs+b]);
318: #endif
319:           }
320:           PetscViewerASCIIPrintf(viewer,"\n");
321:         }
322:       }
323:     } else {
324:       if (format != PETSC_VIEWER_ASCII_COMMON) {PetscViewerASCIIPrintf(viewer,"Process [%d]\n",rank);}
325:       cnt = 0;
326:       for (i=0; i<xin->map->n; i++) {
327:         if (format == PETSC_VIEWER_ASCII_INDEX) {
328:           PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
329:         }
330: #if defined(PETSC_USE_COMPLEX)
331:         if (PetscImaginaryPart(xarray[i]) > 0.0) {
332:           PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(xarray[i]),PetscImaginaryPart(xarray[i]));
333:         } else if (PetscImaginaryPart(xarray[i]) < 0.0) {
334:           PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(xarray[i]),-PetscImaginaryPart(xarray[i]));
335:         } else {
336:           PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(xarray[i]));
337:         }
338: #else
339:         PetscViewerASCIIPrintf(viewer,"%G\n",xarray[i]);
340: #endif
341:       }
342:       /* receive and print messages */
343:       for (j=1; j<size; j++) {
344:         MPI_Recv(values,(PetscMPIInt)len,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
345:         MPI_Get_count(&status,MPIU_SCALAR,&n);
346:         if (format != PETSC_VIEWER_ASCII_COMMON) {
347:           PetscViewerASCIIPrintf(viewer,"Process [%d]\n",j);
348:         }
349:         for (i=0; i<n; i++) {
350:           if (format == PETSC_VIEWER_ASCII_INDEX) {
351:             PetscViewerASCIIPrintf(viewer,"%D: ",cnt++);
352:           }
353: #if defined(PETSC_USE_COMPLEX)
354:           if (PetscImaginaryPart(values[i]) > 0.0) {
355:             PetscViewerASCIIPrintf(viewer,"%G + %G i\n",PetscRealPart(values[i]),PetscImaginaryPart(values[i]));
356:           } else if (PetscImaginaryPart(values[i]) < 0.0) {
357:             PetscViewerASCIIPrintf(viewer,"%G - %G i\n",PetscRealPart(values[i]),-PetscImaginaryPart(values[i]));
358:           } else {
359:             PetscViewerASCIIPrintf(viewer,"%G\n",PetscRealPart(values[i]));
360:           }
361: #else
362:           PetscViewerASCIIPrintf(viewer,"%G\n",values[i]);
363: #endif
364:         }
365:       }
366:     }
367:     PetscFree(values);
368:   } else {
369:     /* send values */
370:     MPI_Send(xarray,xin->map->n,MPIU_SCALAR,0,tag,((PetscObject)xin)->comm);
371:   }
372:   PetscViewerFlush(viewer);
373:   VecRestoreArray(xin,&xarray);
374:   return(0);
375: }

379: PetscErrorCode VecView_MPI_Binary(Vec xin,PetscViewer viewer)
380: {
382:   PetscMPIInt    rank,size,mesgsize,tag = ((PetscObject)viewer)->tag, mesglen;
383:   PetscInt       len,n = xin->map->n,j,tr[2];
384:   int            fdes;
385:   MPI_Status     status;
386:   PetscScalar    *values,*xarray;
387:   FILE           *file;
388: #if defined(PETSC_HAVE_MPIIO)
389:   PetscTruth     isMPIIO;
390: #endif
391:   PetscInt message_count;

394:   VecGetArray(xin,&xarray);
395:   PetscViewerBinaryGetDescriptor(viewer,&fdes);

397:   /* determine maximum message to arrive */
398:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
399:   MPI_Comm_size(((PetscObject)xin)->comm,&size);

401:   tr[0] = VEC_FILE_COOKIE;
402:   tr[1] = xin->map->N;
403:   PetscViewerBinaryWrite(viewer,tr,2,PETSC_INT,PETSC_FALSE);

405: #if defined(PETSC_HAVE_MPIIO)
406:   PetscViewerBinaryGetMPIIO(viewer,&isMPIIO);
407:   if (!isMPIIO) {
408: #endif
409:     PetscViewerBinaryGetFlowControl(viewer,&message_count);
410:     if (!rank) {
411:       PetscBinaryWrite(fdes,xarray,xin->map->n,PETSC_SCALAR,PETSC_FALSE);
412: 
413:       len = 0;
414:       for (j=1; j<size; j++) len = PetscMax(len,xin->map->range[j+1]-xin->map->range[j]);
415:       PetscMalloc((len+1)*sizeof(PetscScalar),&values);
416:       mesgsize = PetscMPIIntCast(len);
417:       /* receive and save messages */
418:       for (j=1; j<size; j++) {
419:         if (j >= message_count) {
420:           message_count += 256;
421:           MPI_Bcast(&message_count,1,MPIU_INT,0,((PetscObject)xin)->comm);
422:         }
423:         MPI_Recv(values,mesgsize,MPIU_SCALAR,j,tag,((PetscObject)xin)->comm,&status);
424:         MPI_Get_count(&status,MPIU_SCALAR,&mesglen);
425:         n = (PetscInt)mesglen;
426:         PetscBinaryWrite(fdes,values,n,PETSC_SCALAR,PETSC_FALSE);
427:       }
428:       message_count = 0;
429:       MPI_Bcast(&message_count,1,MPIU_INT,0,((PetscObject)xin)->comm);
430:       PetscFree(values);
431:     } else {
432:       while (1) {
433:         if (rank < message_count) break;
434:         MPI_Bcast(&message_count,1,MPIU_INT,0,((PetscObject)xin)->comm);
435:       }
436:       /* send values */
437:       mesgsize = PetscMPIIntCast(xin->map->n);
438:       MPI_Send(xarray,mesgsize,MPIU_SCALAR,0,tag,((PetscObject)xin)->comm);
439:       while (1) {
440:         if (message_count == 0) break;
441:         MPI_Bcast(&message_count,1,MPIU_INT,0,((PetscObject)xin)->comm);
442:       }
443:     }
444: #if defined(PETSC_HAVE_MPIIO)
445:   } else {
446:     MPI_Offset   off;
447:     MPI_File     mfdes;
448:     PetscMPIInt  gsizes[1],lsizes[1],lstarts[1];
449:     MPI_Datatype view;

451:     gsizes[0]  = PetscMPIIntCast(xin->map->N);
452:     lsizes[0]  = PetscMPIIntCast(n);
453:     lstarts[0] = PetscMPIIntCast(xin->map->rstart);
454:     MPI_Type_create_subarray(1,gsizes,lsizes,lstarts,MPI_ORDER_FORTRAN,MPIU_SCALAR,&view);
455:     MPI_Type_commit(&view);

457:     PetscViewerBinaryGetMPIIODescriptor(viewer,&mfdes);
458:     PetscViewerBinaryGetMPIIOOffset(viewer,&off);
459:     MPIU_File_write_all(mfdes,xarray,lsizes[0],MPIU_SCALAR,MPI_STATUS_IGNORE);
460:     PetscViewerBinaryAddMPIIOOffset(viewer,xin->map->N*sizeof(PetscScalar));
461:     MPI_Type_free(&view);
462:   }
463: #endif

465:   VecRestoreArray(xin,&xarray);
466:   if (!rank) {
467:     PetscViewerBinaryGetInfoPointer(viewer,&file);
468:     if (file && xin->map->bs > 1) {
469:       if (((PetscObject)xin)->prefix) {
470:         PetscFPrintf(PETSC_COMM_SELF,file,"-%svecload_block_size %D\n",((PetscObject)xin)->prefix,xin->map->bs);
471:       } else {
472:         PetscFPrintf(PETSC_COMM_SELF,file,"-vecload_block_size %D\n",xin->map->bs);
473:       }
474:     }
475:   }
476:   return(0);
477: }

481: PetscErrorCode VecView_MPI_Draw_LG(Vec xin,PetscViewer viewer)
482: {
483:   PetscDraw      draw;
484:   PetscTruth     isnull;

487: #if defined(PETSC_USE_64BIT_INDICES)
489:   PetscViewerDrawGetDraw(viewer,0,&draw);
490:   PetscDrawIsNull(draw,&isnull);
491:   if (isnull) return(0);
492:   SETERRQ(PETSC_ERR_SUP,"Not supported with 64 bit integers");
493: #else
494:   PetscMPIInt    size,rank;
495:   int            i,N = xin->map->N,*lens;
496:   PetscReal      *xx,*yy;
497:   PetscDrawLG    lg;
498:   PetscScalar    *xarray;

501:   PetscViewerDrawGetDraw(viewer,0,&draw);
502:   PetscDrawIsNull(draw,&isnull);
503:   if (isnull) return(0);

505:   VecGetArray(xin,&xarray);
506:   PetscViewerDrawGetDrawLG(viewer,0,&lg);
507:   PetscDrawCheckResizedWindow(draw);
508:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
509:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
510:   if (!rank) {
511:     PetscDrawLGReset(lg);
512:     PetscMalloc(2*(N+1)*sizeof(PetscReal),&xx);
513:     for (i=0; i<N; i++) {xx[i] = (PetscReal) i;}
514:     yy   = xx + N;
515:     PetscMalloc(size*sizeof(PetscInt),&lens);
516:     for (i=0; i<size; i++) {
517:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
518:     }
519: #if !defined(PETSC_USE_COMPLEX)
520:     MPI_Gatherv(xarray,xin->map->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,((PetscObject)xin)->comm);
521: #else
522:     {
523:       PetscReal *xr;
524:       PetscMalloc((xin->map->n+1)*sizeof(PetscReal),&xr);
525:       for (i=0; i<xin->map->n; i++) {
526:         xr[i] = PetscRealPart(xarray[i]);
527:       }
528:       MPI_Gatherv(xr,xin->map->n,MPIU_REAL,yy,lens,xin->map->range,MPIU_REAL,0,((PetscObject)xin)->comm);
529:       PetscFree(xr);
530:     }
531: #endif
532:     PetscFree(lens);
533:     PetscDrawLGAddPoints(lg,N,&xx,&yy);
534:     PetscFree(xx);
535:   } else {
536: #if !defined(PETSC_USE_COMPLEX)
537:     MPI_Gatherv(xarray,xin->map->n,MPIU_REAL,0,0,0,MPIU_REAL,0,((PetscObject)xin)->comm);
538: #else
539:     {
540:       PetscReal *xr;
541:       PetscMalloc((xin->map->n+1)*sizeof(PetscReal),&xr);
542:       for (i=0; i<xin->map->n; i++) {
543:         xr[i] = PetscRealPart(xarray[i]);
544:       }
545:       MPI_Gatherv(xr,xin->map->n,MPIU_REAL,0,0,0,MPIU_REAL,0,((PetscObject)xin)->comm);
546:       PetscFree(xr);
547:     }
548: #endif
549:   }
550:   PetscDrawLGDraw(lg);
551:   PetscDrawSynchronizedFlush(draw);
552:   VecRestoreArray(xin,&xarray);
553: #endif
554:   return(0);
555: }

558: /* I am assuming this is Extern 'C' because it is dynamically loaded.  If not, we can remove the DLLEXPORT tag */
561: PetscErrorCode  VecView_MPI_Draw(Vec xin,PetscViewer viewer)
562: {
564:   PetscMPIInt    rank,size,tag = ((PetscObject)viewer)->tag;
565:   PetscInt       i,start,end;
566:   MPI_Status     status;
567:   PetscReal      coors[4],ymin,ymax,xmin,xmax,tmp;
568:   PetscDraw      draw;
569:   PetscTruth     isnull;
570:   PetscDrawAxis  axis;
571:   PetscScalar    *xarray;

574:   PetscViewerDrawGetDraw(viewer,0,&draw);
575:   PetscDrawIsNull(draw,&isnull); if (isnull) return(0);

577:   VecGetArray(xin,&xarray);
578:   PetscDrawCheckResizedWindow(draw);
579:   xmin = 1.e20; xmax = -1.e20;
580:   for (i=0; i<xin->map->n; i++) {
581: #if defined(PETSC_USE_COMPLEX)
582:     if (PetscRealPart(xarray[i]) < xmin) xmin = PetscRealPart(xarray[i]);
583:     if (PetscRealPart(xarray[i]) > xmax) xmax = PetscRealPart(xarray[i]);
584: #else
585:     if (xarray[i] < xmin) xmin = xarray[i];
586:     if (xarray[i] > xmax) xmax = xarray[i];
587: #endif
588:   }
589:   if (xmin + 1.e-10 > xmax) {
590:     xmin -= 1.e-5;
591:     xmax += 1.e-5;
592:   }
593:   MPI_Reduce(&xmin,&ymin,1,MPIU_REAL,MPI_MIN,0,((PetscObject)xin)->comm);
594:   MPI_Reduce(&xmax,&ymax,1,MPIU_REAL,MPI_MAX,0,((PetscObject)xin)->comm);
595:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
596:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
597:   PetscDrawAxisCreate(draw,&axis);
598:   PetscLogObjectParent(draw,axis);
599:   if (!rank) {
600:     PetscDrawClear(draw);
601:     PetscDrawFlush(draw);
602:     PetscDrawAxisSetLimits(axis,0.0,(double)xin->map->N,ymin,ymax);
603:     PetscDrawAxisDraw(axis);
604:     PetscDrawGetCoordinates(draw,coors,coors+1,coors+2,coors+3);
605:   }
606:   PetscDrawAxisDestroy(axis);
607:   MPI_Bcast(coors,4,MPIU_REAL,0,((PetscObject)xin)->comm);
608:   if (rank) {PetscDrawSetCoordinates(draw,coors[0],coors[1],coors[2],coors[3]);}
609:   /* draw local part of vector */
610:   VecGetOwnershipRange(xin,&start,&end);
611:   if (rank < size-1) { /*send value to right */
612:     MPI_Send(&xarray[xin->map->n-1],1,MPIU_REAL,rank+1,tag,((PetscObject)xin)->comm);
613:   }
614:   for (i=1; i<xin->map->n; i++) {
615: #if !defined(PETSC_USE_COMPLEX)
616:     PetscDrawLine(draw,(PetscReal)(i-1+start),xarray[i-1],(PetscReal)(i+start),
617:                    xarray[i],PETSC_DRAW_RED);
618: #else
619:     PetscDrawLine(draw,(PetscReal)(i-1+start),PetscRealPart(xarray[i-1]),(PetscReal)(i+start),
620:                    PetscRealPart(xarray[i]),PETSC_DRAW_RED);
621: #endif
622:   }
623:   if (rank) { /* receive value from right */
624:     MPI_Recv(&tmp,1,MPIU_REAL,rank-1,tag,((PetscObject)xin)->comm,&status);
625: #if !defined(PETSC_USE_COMPLEX)
626:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,xarray[0],PETSC_DRAW_RED);
627: #else
628:     PetscDrawLine(draw,(PetscReal)start-1,tmp,(PetscReal)start,PetscRealPart(xarray[0]),PETSC_DRAW_RED);
629: #endif
630:   }
631:   PetscDrawSynchronizedFlush(draw);
632:   PetscDrawPause(draw);
633:   VecRestoreArray(xin,&xarray);
634:   return(0);
635: }

638: #if defined(PETSC_HAVE_MATLAB_ENGINE)
641: PetscErrorCode VecView_MPI_Matlab(Vec xin,PetscViewer viewer)
642: {
644:   PetscMPIInt    rank,size,*lens;
645:   PetscInt       i,N = xin->map->N;
646:   PetscScalar    *xx,*xarray;

649:   VecGetArray(xin,&xarray);
650:   MPI_Comm_rank(((PetscObject)xin)->comm,&rank);
651:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
652:   if (!rank) {
653:     PetscMalloc((N+1)*sizeof(PetscScalar),&xx);
654:     PetscMalloc(size*sizeof(PetscMPIInt),&lens);
655:     for (i=0; i<size; i++) {
656:       lens[i] = xin->map->range[i+1] - xin->map->range[i];
657:     }
658:     MPI_Gatherv(xarray,xin->map->n,MPIU_SCALAR,xx,lens,xin->map->range,MPIU_SCALAR,0,((PetscObject)xin)->comm);
659:     PetscFree(lens);

661:     PetscObjectName((PetscObject)xin);
662:     PetscViewerMatlabPutArray(viewer,N,1,xx,((PetscObject)xin)->name);

664:     PetscFree(xx);
665:   } else {
666:     MPI_Gatherv(xarray,xin->map->n,MPIU_SCALAR,0,0,0,MPIU_SCALAR,0,((PetscObject)xin)->comm);
667:   }
668:   VecRestoreArray(xin,&xarray);
669:   return(0);
670: }
671: #endif

673: #if defined(PETSC_HAVE_PNETCDF)
676: PetscErrorCode VecView_MPI_Netcdf(Vec xin,PetscViewer v)
677: {
679:   int         n = xin->map->n,ncid,xdim,xdim_num=1,xin_id,xstart;
680:   PetscScalar *xarray;

683:   VecGetArray(xin,&xarray);
684:   PetscViewerNetcdfGetID(v,&ncid);
685:   if (ncid < 0) SETERRQ(PETSC_ERR_ORDER,"First call PetscViewerNetcdfOpen to create NetCDF dataset");
686:   /* define dimensions */
687:   ncmpi_def_dim(ncid,"PETSc_Vector_Global_Size",xin->map->N,&xdim);
688:   /* define variables */
689:   ncmpi_def_var(ncid,"PETSc_Vector_MPI",NC_DOUBLE,xdim_num,&xdim,&xin_id);
690:   /* leave define mode */
691:   ncmpi_enddef(ncid);
692:   /* store the vector */
693:   VecGetOwnershipRange(xin,&xstart,PETSC_NULL);
694:   ncmpi_put_vara_double_all(ncid,xin_id,(const MPI_Offset*)&xstart,(const MPI_Offset*)&n,xarray);
695:   VecRestoreArray(xin,&xarray);
696:   return(0);
697: }
698: #endif

700: #if defined(PETSC_HAVE_HDF5)
703: PetscErrorCode VecView_MPI_HDF5(Vec xin, PetscViewer viewer)
704: {
705:   hid_t         filespace; /* file dataspace identifier */
706:   hid_t                plist_id;  /* property list identifier */
707:   hid_t         dset_id;   /* dataset identifier */
708:   hid_t         memspace;  /* memory dataspace identifier */
709:   hid_t         file_id;
710:   herr_t        status;
711:   /* PetscInt       bs        = xin->map->bs > 0 ? xin->map->bs : 1; */
712:   hsize_t        dim      = 1; /* Could have dim 2 for blocked vectors */
713:   hsize_t        dims[2],count[2],offset[2];
714:   PetscInt       low;
715:   PetscScalar    *x;
717:   const char     *vecname;


721:   PetscViewerHDF5GetFileId(viewer, &file_id);

723:   /* Create the dataspace for the dataset */
724:   dims[0]  = PetscHDF5IntCast(xin->map->N);
725: #if defined(PETSC_USE_COMPLEX)
726:   dim++;
727:   dims[1] = 2;
728: #endif
729:   filespace = H5Screate_simple(dim, dims, NULL);
730:   if (filespace == -1) SETERRQ(PETSC_ERR_LIB,"Cannot H5Screate_simple()");

732:   /* Create the dataset with default properties and close filespace */
733:   PetscObjectGetName((PetscObject)xin,&vecname);
734: #if (H5_VERS_MAJOR * 10000 + H5_VERS_MINOR * 100 + H5_VERS_RELEASE >= 10800)
735:   dset_id = H5Dcreate2(file_id, vecname, H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);
736: #else
737:   dset_id = H5Dcreate(file_id, vecname, H5T_NATIVE_DOUBLE, filespace, H5P_DEFAULT);
738: #endif
739:   if (dset_id == -1) SETERRQ(PETSC_ERR_LIB,"Cannot H5Dcreate2()");
740:   status = H5Sclose(filespace);CHKERRQ(status);

742:   /* Each process defines a dataset and writes it to the hyperslab in the file */
743:   count[0] = PetscHDF5IntCast(xin->map->n);
744: #if defined(PETSC_USE_COMPLEX)
745:   count[1] = 2;
746: #endif
747:   memspace = H5Screate_simple(dim, count, NULL);
748:   if (memspace == -1) SETERRQ(PETSC_ERR_LIB,"Cannot H5Screate_simple()");

750:   /* Select hyperslab in the file */
751:   VecGetOwnershipRange(xin, &low, PETSC_NULL);
752:   offset[0] = PetscHDF5IntCast(low);
753: #if defined(PETSC_USE_COMPLEX)
754:   offset[1] = 0;
755: #endif
756:   filespace = H5Dget_space(dset_id);
757:   if (filespace == -1) SETERRQ(PETSC_ERR_LIB,"Cannot H5Dget_space()");
758:   status = H5Sselect_hyperslab(filespace, H5S_SELECT_SET, offset, NULL, count, NULL);CHKERRQ(status);

760:   /* Create property list for collective dataset write */
761:   plist_id = H5Pcreate(H5P_DATASET_XFER);
762:   if (plist_id == -1) SETERRQ(PETSC_ERR_LIB,"Cannot H5Pcreate()");
763: #if defined(PETSC_HAVE_H5PSET_FAPL_MPIO)
764:   status = H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_COLLECTIVE);CHKERRQ(status);
765: #endif
766:   /* To write dataset independently use H5Pset_dxpl_mpio(plist_id, H5FD_MPIO_INDEPENDENT) */

768:   VecGetArray(xin, &x);
769:   status = H5Dwrite(dset_id, H5T_NATIVE_DOUBLE, memspace, filespace, plist_id, x);CHKERRQ(status);
770:   status = H5Fflush(file_id, H5F_SCOPE_GLOBAL);CHKERRQ(status);
771:   VecRestoreArray(xin, &x);

773:   /* Close/release resources */
774:   status = H5Pclose(plist_id);CHKERRQ(status);
775:   status = H5Sclose(filespace);CHKERRQ(status);
776:   status = H5Sclose(memspace);CHKERRQ(status);
777:   status = H5Dclose(dset_id);CHKERRQ(status);
778:   return(0);
779: }
780: #endif

784: PetscErrorCode VecView_MPI(Vec xin,PetscViewer viewer)
785: {
787:   PetscTruth     iascii,isbinary,isdraw;
788: #if defined(PETSC_HAVE_MATHEMATICA)
789:   PetscTruth     ismathematica;
790: #endif
791: #if defined(PETSC_HAVE_NETCDF)
792:   PetscTruth     isnetcdf;
793: #endif
794: #if defined(PETSC_HAVE_HDF5)
795:   PetscTruth     ishdf5;
796: #endif
797: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SCALAR_SINGLE) && !defined(PETSC_USE_SCALAR_MAT_SINGLE)
798:   PetscTruth     ismatlab;
799: #endif

802:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);
803:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);
804:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);
805: #if defined(PETSC_HAVE_MATHEMATICA)
806:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATHEMATICA,&ismathematica);
807: #endif
808: #if defined(PETSC_HAVE_NETCDF)
809:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_NETCDF,&isnetcdf);
810: #endif
811: #if defined(PETSC_HAVE_HDF5)
812:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_HDF5,&ishdf5);
813: #endif
814: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SCALAR_SINGLE) && !defined(PETSC_USE_SCALAR_MAT_SINGLE)
815:   PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_MATLAB,&ismatlab);
816: #endif
817:   if (iascii){
818:     VecView_MPI_ASCII(xin,viewer);
819:   } else if (isbinary) {
820:     VecView_MPI_Binary(xin,viewer);
821:   } else if (isdraw) {
822:     PetscViewerFormat format;

824:     PetscViewerGetFormat(viewer,&format);
825:     if (format == PETSC_VIEWER_DRAW_LG) {
826:       VecView_MPI_Draw_LG(xin,viewer);
827:     } else {
828:       VecView_MPI_Draw(xin,viewer);
829:     }
830: #if defined(PETSC_HAVE_MATHEMATICA)
831:   } else if (ismathematica) {
832:     PetscViewerMathematicaPutVector(viewer,xin);
833: #endif
834: #if defined(PETSC_HAVE_PNETCDF)
835:   } else if (isnetcdf) {
836:     VecView_MPI_Netcdf(xin,viewer);
837: #endif
838: #if defined(PETSC_HAVE_HDF5)
839:   } else if (ishdf5) {
840:     VecView_MPI_HDF5(xin,viewer);
841: #endif
842: #if defined(PETSC_HAVE_MATLAB_ENGINE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_SCALAR_SINGLE) && !defined(PETSC_USE_SCALAR_MAT_SINGLE)
843:   } else if (ismatlab) {
844:     VecView_MPI_Matlab(xin,viewer);
845: #endif
846:   } else {
847:     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this object",((PetscObject)viewer)->type_name);
848:   }
849:   return(0);
850: }

854: PetscErrorCode VecGetSize_MPI(Vec xin,PetscInt *N)
855: {
857:   *N = xin->map->N;
858:   return(0);
859: }

863: PetscErrorCode VecGetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],PetscScalar y[])
864: {
865:   Vec_MPI     *x = (Vec_MPI *)xin->data;
866:   PetscScalar *xx = x->array;
867:   PetscInt    i,tmp,start = xin->map->range[xin->stash.rank];

870:   for (i=0; i<ni; i++) {
871:     if (xin->stash.ignorenegidx && ix[i] < 0) continue;
872:     tmp = ix[i] - start;
873: #if defined(PETSC_USE_DEBUG)
874:     if (tmp < 0 || tmp >= xin->map->n) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Can only get local values, trying %D",ix[i]);
875: #endif
876:     y[i] = xx[tmp];
877:   }
878:   return(0);
879: }

883: PetscErrorCode VecSetValues_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar y[],InsertMode addv)
884: {
886:   PetscMPIInt    rank = xin->stash.rank;
887:   PetscInt       *owners = xin->map->range,start = owners[rank];
888:   PetscInt       end = owners[rank+1],i,row;
889:   PetscScalar    *xx;

892: #if defined(PETSC_USE_DEBUG)
893:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
894:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
895:   } else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
896:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
897:   }
898: #endif
899:   VecGetArray(xin,&xx);
900:   xin->stash.insertmode = addv;

902:   if (addv == INSERT_VALUES) {
903:     for (i=0; i<ni; i++) {
904:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
905: #if defined(PETSC_USE_DEBUG)
906:       if (ix[i] < 0) {
907:         SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
908:       }
909: #endif 
910:       if ((row = ix[i]) >= start && row < end) {
911:         xx[row-start] = y[i];
912:       } else if (!xin->stash.donotstash) {
913: #if defined(PETSC_USE_DEBUG)
914:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
915: #endif
916:         VecStashValue_Private(&xin->stash,row,y[i]);
917:       }
918:     }
919:   } else {
920:     for (i=0; i<ni; i++) {
921:       if (xin->stash.ignorenegidx && ix[i] < 0) continue;
922: #if defined(PETSC_USE_DEBUG)
923:       if (ix[i] < 0) {
924:         SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D cannot be negative",ix[i]);
925:       }
926: #endif 
927:       if ((row = ix[i]) >= start && row < end) {
928:         xx[row-start] += y[i];
929:       } else if (!xin->stash.donotstash) {
930: #if defined(PETSC_USE_DEBUG)
931:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D maximum %D",ix[i],xin->map->N);
932: #endif        
933:         VecStashValue_Private(&xin->stash,row,y[i]);
934:       }
935:     }
936:   }
937:   VecRestoreArray(xin,&xx);
938:   return(0);
939: }

943: PetscErrorCode VecSetValuesBlocked_MPI(Vec xin,PetscInt ni,const PetscInt ix[],const PetscScalar yin[],InsertMode addv)
944: {
945:   PetscMPIInt    rank = xin->stash.rank;
946:   PetscInt       *owners = xin->map->range,start = owners[rank];
948:   PetscInt       end = owners[rank+1],i,row,bs = xin->map->bs,j;
949:   PetscScalar    *xx,*y = (PetscScalar*)yin;

952:   VecGetArray(xin,&xx);
953: #if defined(PETSC_USE_DEBUG)
954:   if (xin->stash.insertmode == INSERT_VALUES && addv == ADD_VALUES) {
955:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already inserted values; you cannot now add");
956:   }
957:   else if (xin->stash.insertmode == ADD_VALUES && addv == INSERT_VALUES) {
958:    SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"You have already added values; you cannot now insert");
959:   }
960: #endif
961:   xin->stash.insertmode = addv;

963:   if (addv == INSERT_VALUES) {
964:     for (i=0; i<ni; i++) {
965:       if ((row = bs*ix[i]) >= start && row < end) {
966:         for (j=0; j<bs; j++) {
967:           xx[row-start+j] = y[j];
968:         }
969:       } else if (!xin->stash.donotstash) {
970:         if (ix[i] < 0) continue;
971: #if defined(PETSC_USE_DEBUG)
972:         if (ix[i] >= xin->map->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
973: #endif
974:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
975:       }
976:       y += bs;
977:     }
978:   } else {
979:     for (i=0; i<ni; i++) {
980:       if ((row = bs*ix[i]) >= start && row < end) {
981:         for (j=0; j<bs; j++) {
982:           xx[row-start+j] += y[j];
983:         }
984:       } else if (!xin->stash.donotstash) {
985:         if (ix[i] < 0) continue;
986: #if defined(PETSC_USE_DEBUG)
987:         if (ix[i] > xin->map->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Out of range index value %D max %D",ix[i],xin->map->N);
988: #endif
989:         VecStashValuesBlocked_Private(&xin->bstash,ix[i],y);
990:       }
991:       y += bs;
992:     }
993:   }
994:   VecRestoreArray(xin,&xx);
995:   return(0);
996: }

998: /*
999:    Since nsends or nreceives may be zero we add 1 in certain mallocs
1000: to make sure we never malloc an empty one.      
1001: */
1004: PetscErrorCode VecAssemblyBegin_MPI(Vec xin)
1005: {
1007:   PetscInt       *owners = xin->map->range,*bowners,i,bs,nstash,reallocs;
1008:   PetscMPIInt    size;
1009:   InsertMode     addv;
1010:   MPI_Comm       comm = ((PetscObject)xin)->comm;

1013:   if (xin->stash.donotstash) {
1014:     return(0);
1015:   }

1017:   MPI_Allreduce(&xin->stash.insertmode,&addv,1,MPI_INT,MPI_BOR,comm);
1018:   if (addv == (ADD_VALUES|INSERT_VALUES)) {
1019:     SETERRQ(PETSC_ERR_ARG_NOTSAMETYPE,"Some processors inserted values while others added");
1020:   }
1021:   xin->stash.insertmode = addv; /* in case this processor had no cache */
1022: 
1023:   bs = xin->map->bs;
1024:   MPI_Comm_size(((PetscObject)xin)->comm,&size);
1025:   if (!xin->bstash.bowners && xin->map->bs != -1) {
1026:     PetscMalloc((size+1)*sizeof(PetscInt),&bowners);
1027:     for (i=0; i<size+1; i++){ bowners[i] = owners[i]/bs;}
1028:     xin->bstash.bowners = bowners;
1029:   } else {
1030:     bowners = xin->bstash.bowners;
1031:   }
1032:   VecStashScatterBegin_Private(&xin->stash,owners);
1033:   VecStashScatterBegin_Private(&xin->bstash,bowners);
1034:   VecStashGetInfo_Private(&xin->stash,&nstash,&reallocs);
1035:   PetscInfo2(xin,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);
1036:   VecStashGetInfo_Private(&xin->bstash,&nstash,&reallocs);
1037:   PetscInfo2(xin,"Block-Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);

1039:   return(0);
1040: }

1044: PetscErrorCode VecAssemblyEnd_MPI(Vec vec)
1045: {
1047:   PetscInt       base,i,j,*row,flg,bs;
1048:   PetscMPIInt    n;
1049:   PetscScalar    *val,*vv,*array,*xarray;

1052:   if (!vec->stash.donotstash) {
1053:     VecGetArray(vec,&xarray);
1054:     base = vec->map->range[vec->stash.rank];
1055:     bs   = vec->map->bs;

1057:     /* Process the stash */
1058:     while (1) {
1059:       VecStashScatterGetMesg_Private(&vec->stash,&n,&row,&val,&flg);
1060:       if (!flg) break;
1061:       if (vec->stash.insertmode == ADD_VALUES) {
1062:         for (i=0; i<n; i++) { xarray[row[i] - base] += val[i]; }
1063:       } else if (vec->stash.insertmode == INSERT_VALUES) {
1064:         for (i=0; i<n; i++) { xarray[row[i] - base] = val[i]; }
1065:       } else {
1066:         SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1067:       }
1068:     }
1069:     VecStashScatterEnd_Private(&vec->stash);

1071:     /* now process the block-stash */
1072:     while (1) {
1073:       VecStashScatterGetMesg_Private(&vec->bstash,&n,&row,&val,&flg);
1074:       if (!flg) break;
1075:       for (i=0; i<n; i++) {
1076:         array = xarray+row[i]*bs-base;
1077:         vv    = val+i*bs;
1078:         if (vec->stash.insertmode == ADD_VALUES) {
1079:           for (j=0; j<bs; j++) { array[j] += vv[j];}
1080:         } else if (vec->stash.insertmode == INSERT_VALUES) {
1081:           for (j=0; j<bs; j++) { array[j] = vv[j]; }
1082:         } else {
1083:           SETERRQ(PETSC_ERR_ARG_CORRUPT,"Insert mode is not set correctly; corrupted vector");
1084:         }
1085:       }
1086:     }
1087:     VecStashScatterEnd_Private(&vec->bstash);
1088:     VecRestoreArray(vec,&xarray);
1089:   }
1090:   vec->stash.insertmode = NOT_SET_VALUES;
1091:   return(0);
1092: }